diff --git a/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-22.50.32.930/source.csv b/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-22.50.32.930/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..b827f70bca775638b80a4563c326689f523563a7 --- /dev/null +++ b/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-22.50.32.930/source.csv @@ -0,0 +1,182 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,2,"src/recording.ts",0,0,"import * as fs from 'node:fs'\nimport * as path from 'node:path'\nimport * as vscode from 'vscode'\nimport * as readline from 'node:readline'\nimport axios from 'axios'\nimport { hasConsent, showConsentChangeDialog } from './consent'\nimport {\n getEditorFileName,\n escapeString,\n getEditorLanguage,\n notificationWithProgress,\n generateBaseFilePath,\n formatDisplayTime,\n getExportPath,\n logToOutput,\n formatSrtTime,\n getConfig,\n removeDoubleQuotes,\n unescapeString,\n addToGitignore,\n} from './utilities'\nimport { type File, ChangeType, type CSVRowBuilder, type Change, type Recording, type ConsentStatus } from './types'\nimport { extContext, statusBarItem, actionsProvider } from './extension'\n\nexport const commands = {\n openSettings: 'crowd-code.openSettings',\n startRecording: 'crowd-code.startRecording',\n stopRecording: 'crowd-code.stopRecording',\n panicButton: 'crowd-code.panicButton',\n}\n\nexport const recording: Recording = {\n isRecording: false,\n timer: 0,\n startDateTime: null,\n endDateTime: null,\n sequence: 0,\n customFolderName: '',\n activatedFiles: new Set(),\n}\n\nlet intervalId: NodeJS.Timeout\nconst fileQueue: File[] = []\nlet isAppending = false\n\nlet uploadIntervalId: NodeJS.Timeout;\nconst sessionUuid = vscode.env.sessionId;\n\nlet panicStatusBarItem: vscode.StatusBarItem | undefined;\nlet panicButtonPressCount = 0;\nlet panicButtonTimeoutId: NodeJS.Timeout | undefined;\nlet accumulatedRemovedContent: Array<{content: string, sequence: number}> = []; // Store content with sequence numbers\n\nconst CROWD_CODE_API_GATEWAY_URL = process.env.CROWD_CODE_API_GATEWAY_URL;\n\nconst PANIC_BUTTON_TIMEOUT = 3000; // 3 seconds timeout for successive presses\n\n/**\n * Builds a CSV row with the given parameters.\n *\n * @param {CSVRowBuilder} sequence - The sequence number of the change.\n * @param {CSVRowBuilder} rangeOffset - The offset of the changed range.\n * @param {CSVRowBuilder} rangeLength - The length of the changed range.\n * @param {CSVRowBuilder} text - The text of the change.\n * @param {string} type - The type of the change (optional, defaults to 'content').\n * @return {string} A CSV row string with the provided information.\n */\nexport function buildCsvRow({\n sequence,\n rangeOffset,\n rangeLength,\n text,\n type = ChangeType.CONTENT,\n}: CSVRowBuilder): string | undefined {\n if (!recording.startDateTime) {\n return\n }\n\n const time = new Date().getTime() - recording.startDateTime.getTime()\n\n if (type === ChangeType.HEADING) {\n return 'Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type\n'\n }\n\n if (type === ChangeType.TERMINAL_FOCUS || type === ChangeType.TERMINAL_COMMAND || type === ChangeType.TERMINAL_OUTPUT) {\n return `${sequence},${time},""TERMINAL"",${rangeOffset},${rangeLength},""${escapeString(text)}"",,${type}\n`\n }\n\n const editorFileName = getEditorFileName()\n return `${sequence},${time},""${editorFileName}"",${rangeOffset},${rangeLength},""${escapeString(text)}"",${getEditorLanguage()},${type}\n`\n}\n\n/**\n * Checks if the current file being edited is within the configured export path.\n * This is used to determine if the current file should be recorded or not.\n *\n * @returns {boolean} `true` if the current file is within the export path, `false` otherwise.\n */\nexport function isCurrentFileExported(): boolean {\n const editor = vscode.window.activeTextEditor\n const filename = editor?.document.fileName.replaceAll('\\', '/')\n const exportPath = getExportPath()\n if (!editor || !filename || !exportPath) {\n return false\n }\n return filename.startsWith(exportPath)\n}\n\nconst onChangeSubscription = vscode.workspace.onDidChangeTextDocument(event => {\n if (!recording.isRecording) {\n return\n }\n\n if (isCurrentFileExported()) {\n return\n }\n const editor = vscode.window.activeTextEditor\n if (editor && event.document === editor.document) {\n for (const change of event.contentChanges) {\n recording.sequence++\n addToFileQueue(\n buildCsvRow({\n sequence: recording.sequence,\n rangeOffset: change.rangeOffset,\n rangeLength: change.rangeLength,\n text: change.text,\n })\n )\n appendToFile()\n }\n }\n})\n\n/**\n * Creates the recording folder if it doesn't exist.\n * @param folderPath - The path to the recording folder.\n */\nfunction createRecordingFolder(folderPath: string): void {\n if (!fs.existsSync(folderPath)) {\n fs.mkdirSync(folderPath, { recursive: true })\n }\n}\n\n/**\n * Starts the recording process and initializes necessary variables.\n */\nexport async function startRecording(): Promise {\n if (recording.isRecording) {\n notificationWithProgress('Already recording')\n logToOutput('Already recording', 'info')\n return\n }\n const exportPath = getExportPath()\n if (!exportPath) {\n return\n }\n\n // If the setting is enabled and the path is inside the workspace, add it to .gitignore\n if (\n getConfig().get('export.addToGitignore') &&\n getConfig().get('export.exportPath')?.startsWith('${workspaceFolder}')\n ) {\n await addToGitignore()\n }\n\n recording.startDateTime = new Date()\n recording.activatedFiles = new Set()\n\n // Ask for folder name if enabled in settings\n let customFolderName: string | undefined\n if (getConfig().get('recording.askFolderName')) {\n customFolderName = await vscode.window.showInputBox({\n prompt: 'Enter a name for the recording folder',\n placeHolder: 'Enter recording folder name',\n })\n if (!customFolderName) {\n stopRecording(true)\n return\n }\n recording.customFolderName = customFolderName\n }\n\n const baseFilePath = generateBaseFilePath(recording.startDateTime, false, recording.customFolderName, sessionUuid)\n if (!baseFilePath) {\n stopRecording(true)\n return\n }\n\n // Create the recording folder\n const folderPath = path.dirname(path.join(exportPath, baseFilePath))\n createRecordingFolder(folderPath)\n\n recording.isRecording = true\n recording.timer = 0\n recording.endDateTime = null\n recording.sequence = 0\n panicButtonPressCount = 0 // Reset panic button counter for new recording\n accumulatedRemovedContent = [] // Clear accumulated content for new recording\n if (panicButtonTimeoutId) {\n clearTimeout(panicButtonTimeoutId)\n panicButtonTimeoutId = undefined\n }\n intervalId = setInterval(() => {\n recording.timer++\n updateStatusBarItem()\n }, 1000)\n notificationWithProgress('Recording started')\n logToOutput('Recording started', 'info')\n\n // Only log initial editor content if there's an active text editor\n const editorText = vscode.window.activeTextEditor?.document.getText()\n const activeEditorUri = vscode.window.activeTextEditor?.document.uri.toString()\n\n if (editorText !== undefined && activeEditorUri) {\n recording.sequence++\n const csvRow = {\n sequence: recording.sequence,\n rangeOffset: 0,\n rangeLength: 0,\n text: editorText,\n type: ChangeType.TAB,\n }\n addToFileQueue(buildCsvRow({ ...csvRow, type: ChangeType.HEADING }))\n addToFileQueue(buildCsvRow(csvRow))\n appendToFile()\n recording.activatedFiles.add(activeEditorUri)\n actionsProvider.setCurrentFile(vscode.window.activeTextEditor?.document.fileName || '')\n } else {\n // If no active editor, just add the header row\n recording.sequence++\n addToFileQueue(buildCsvRow({ \n sequence: recording.sequence,\n rangeOffset: 0,\n rangeLength: 0,\n text: '',\n type: ChangeType.HEADING \n }))\n appendToFile()\n }\n\n extContext.subscriptions.push(onChangeSubscription)\n updateStatusBarItem()\n updatePanicButton()\n actionsProvider.setRecordingState(true)\n\n // Set up a timer to send data to the Lambda endpoint periodically\n uploadIntervalId = setInterval(async () => {\n if (!exportPath) {\n return;\n }\n \n if (typeof CROWD_CODE_API_GATEWAY_URL !== 'string' || !CROWD_CODE_API_GATEWAY_URL.trim()) {\n logToOutput(""CROWD_CODE_API_GATEWAY_URL must be a non-empty string. Please check your build configuration."", 'error');\n return;\n }\n\n // Only upload data if user has given consent\n if (!hasConsent()) {\n return;\n }\n\n const filePath = path.join(exportPath, `${baseFilePath}.csv`);\n const extensionVersion = extContext.extension.packageJSON.version as string;\n const userId = extContext.globalState.get('userId');\n\n try {\n const fileContent = await fs.promises.readFile(filePath, 'utf-8');\n\n if (fileContent) {\n const payload = {\n fileName: `${baseFilePath}.csv`,\n content: fileContent,\n version: extensionVersion,\n userId: userId\n };\n await axios.post(CROWD_CODE_API_GATEWAY_URL, payload);\n console.log(`Successfully sent ${payload.fileName} to Lambda endpoint.`);\n logToOutput(`Successfully sent to Lambda endpoint.`, 'info');\n }\n } catch (error: any) {\n if (error.code === 'ENOENT') {\n console.warn(`File not found at ${filePath}. It might be created on first write.`);\n } else {\n console.error(`Error sending data to Lambda: ${error.message}`);\n if (axios.isAxiosError(error) && error.response) {\n console.error(""Lambda response status:"", error.response.status);\n console.error(""Lambda response data:"", error.response.data);\n }\n }\n }\n }, 1 * 60 * 1000); // 5 minutes\n}\n\n/**\n * Stops the recording process and finalizes the recording data.\n * @param context - The extension context.\n */\nexport function stopRecording(force = false): Promise | void {\n if (!recording.isRecording) {\n notificationWithProgress('Not recording')\n return\n }\n\n recording.isRecording = false\n clearInterval(intervalId)\n clearInterval(uploadIntervalId); // Clear the upload timer\n recording.timer = 0\n recording.activatedFiles?.clear()\n panicButtonPressCount = 0 // Reset panic button counter when recording stops\n accumulatedRemovedContent = [] // Clear accumulated content when recording stops\n if (panicButtonTimeoutId) {\n clearTimeout(panicButtonTimeoutId)\n panicButtonTimeoutId = undefined\n }\n const index = extContext.subscriptions.indexOf(onChangeSubscription)\n if (index !== -1) {\n extContext.subscriptions.splice(index, 1)\n }\n updateStatusBarItem()\n updatePanicButton()\n actionsProvider.setRecordingState(false)\n if (force) {\n notificationWithProgress('Recording cancelled')\n logToOutput('Recording cancelled', 'info')\n recording.customFolderName = undefined\n return\n }\n notificationWithProgress('Recording finished')\n logToOutput('Recording finished', 'info')\n recording.endDateTime = new Date()\n return processCsvFile().then(() => {\n // Reset customFolderName after processing is complete\n recording.customFolderName = undefined\n }).catch(err => {\n logToOutput(`Error processing CSV file during stop: ${String(err)}`, 'error')\n recording.customFolderName = undefined\n });\n}\n\n/**\n * Appends data from the file queue to the appropriate file in the workspace.\n */\nexport async function appendToFile(): Promise {\n if (isAppending) {\n return\n }\n isAppending = true\n\n const exportPath = getExportPath()\n if (!exportPath) {\n logToOutput('Export path not available in appendToFile, stopping recording.', 'error')\n stopRecording(true)\n isAppending = false\n return\n }\n\n while (fileQueue.length > 0) {\n const itemToAppend = fileQueue.shift()\n if (!itemToAppend) {\n continue\n }\n\n const filePath = path.join(exportPath, itemToAppend.name)\n\n try {\n const directory = path.dirname(filePath)\n if (!fs.existsSync(directory)) {\n fs.mkdirSync(directory, { recursive: true })\n }\n await fs.promises.appendFile(filePath, itemToAppend.content)\n } catch (err) {\n logToOutput(\n `Failed to append to file ${filePath}: ${err}. Item dropped. Content: ${itemToAppend.content.substring(0, 100)}...`,\n 'error'\n )\n }\n }\n isAppending = false\n}\n\n/**\n * Appends an SRT line to the file queue for the previous change.\n *\n * This function is responsible for generating the SRT format line for the previous change and adding it to the file queue.\n * It checks if the SRT export format is enabled, and if so, it generates the SRT line for the previous change and adds it to the file queue.\n *\n * @param processedChanges - An array of processed changes.\n * @param i - The index of the current change in the processedChanges array.\n * @param exportInSrt - A boolean indicating whether the SRT export format is enabled.\n */\nfunction addToSRTFile(processedChanges: Change[], i: number, exportInSrt: boolean) {\n if (!exportInSrt) {\n return\n }\n if (i === 0) {\n return\n }\n addToFileQueue(\n addSrtLine(\n processedChanges[i - 1].sequence,\n processedChanges[i - 1].startTime,\n processedChanges[i - 1].endTime,\n JSON.stringify({\n text: processedChanges[i - 1].text,\n file: processedChanges[i - 1].file,\n language: processedChanges[i - 1].language,\n })\n ),\n 'srt',\n true\n )\n}\n\n/**\n * Returns the new text content based on the change type and the previous change.\n * @param type - The type of the change.\n * @param text - The text of the change.\n * @param previousChange - The previous change.\n * @param rangeOffset - The offset of the range.\n * @param rangeLength - The length of the range.\n */\nfunction getNewTextContent(\n type: string,\n text: string,\n previousChange: Change | null,\n rangeOffset: number,\n rangeLength: number\n): string {\n if (type === ChangeType.TAB) {\n return text\n }\n if (!previousChange) {\n return ''\n }\n return getUpdatedText(previousChange.text, rangeOffset, rangeLength, text)\n}\n\n/**\n * Processes a single CSV line and returns the processed change\n */\nasync function processCSVLine(line: string, previousChange: Change | null): Promise {\n const lineArr = line.split(/,(?=(?:[^""]*""[^""]*"")*[^""]*$)/)\n\n if (Number.isNaN(Number.parseInt(lineArr[0]))) {\n return null\n }\n\n const time = Number.parseInt(lineArr[1])\n const file = removeDoubleQuotes(lineArr[2])\n const rangeOffset = Number.parseInt(lineArr[3])\n const rangeLength = Number.parseInt(lineArr[4])\n const text = unescapeString(removeDoubleQuotes(lineArr[5]))\n const language = lineArr[6]\n const type = lineArr[7]\n\n const newText = getNewTextContent(type, text, previousChange, rangeOffset, rangeLength)\n\n /**\n * Skip exporting changes with the same values to the previous change.\n */\n if (\n previousChange &&\n time === previousChange.startTime &&\n file === previousChange.file &&\n newText === previousChange.text &&\n language === previousChange.language\n ) {\n return null\n }\n\n return {\n sequence: previousChange ? previousChange.sequence + 1 : 1,\n file,\n startTime: time,\n endTime: 0,\n language,\n text: newText,\n }\n}\n\n/**\n * Returns the updated text content based on the previous text, range offset, range length, and new text.\n * @param previousText - The previous text.\n * @param rangeOffset - The offset of the range.\n * @param rangeLength - The length of the range.\n * @param newText - The new text.\n */\nfunction getUpdatedText(\n previousText: string,\n rangeOffset: number,\n rangeLength: number,\n newText: string\n): string {\n const textArray = previousText.split('')\n textArray.splice(rangeOffset, rangeLength, newText)\n return textArray.join('')\n}\n\n/**\n * Processes the CSV file and generates the necessary output files.\n */\nasync function processCsvFile(): Promise {\n if (!validateRecordingState()) {\n return\n }\n\n const exportFormats = getConfig().get('export.exportFormats', [])\n if (exportFormats.length === 0) {\n logToOutput('No export formats specified', 'info')\n vscode.window.showWarningMessage('No export formats specified')\n return\n }\n\n const exportPath = getExportPath()\n if (!exportPath) {\n return\n }\n\n if (!recording.startDateTime) {\n return\n }\n\n // Use the same custom folder name for reading the source file\n const baseFilePathSource = generateBaseFilePath(\n recording.startDateTime,\n false,\n recording.customFolderName,\n sessionUuid\n )\n if (!baseFilePathSource) {\n return\n }\n\n const filePath = path.join(exportPath, `${baseFilePathSource}.csv`)\n\n try {\n if (!fs.existsSync(filePath)) {\n throw new Error(`Source file not found: ${filePath}`)\n }\n\n const processedChanges: Change[] = []\n\n const rl = readline.createInterface({\n input: fs.createReadStream(filePath),\n crlfDelay: Number.POSITIVE_INFINITY,\n })\n\n for await (const line of rl) {\n const previousChange = processedChanges[processedChanges.length - 1]\n const change = await processCSVLine(line, previousChange)\n\n if (change) {\n if (previousChange) {\n previousChange.endTime = change.startTime\n if (exportFormats.includes('SRT')) {\n addToSRTFile(processedChanges, processedChanges.length, true)\n }\n }\n processedChanges.push(change)\n }\n }\n\n rl.close();\n\n return finalizeRecording(processedChanges, exportFormats);\n\n } catch (err) {\n vscode.window.showErrorMessage(`Error processing recording: ${err}`)\n logToOutput('Error processing CSV file: ' + String(err), 'error')\n return Promise.resolve(); // Resolve even on error after showing message\n }\n}\n\nfunction validateRecordingState(): boolean {\n if (!vscode.workspace.workspaceFolders) {\n logToOutput(\n 'No workspace folder found. To process the recording is needed a workspace folder',\n 'error'\n )\n return false\n }\n if (!recording.endDateTime || !recording.startDateTime) {\n logToOutput('Recording date time is not properly set', 'error')\n return false\n }\n return true\n}\n\nfunction finalizeRecording(processedChanges: Change[], exportFormats: string[]): Promise {\n const lastChange = processedChanges[processedChanges.length - 1]\n if (lastChange && recording.endDateTime && recording.startDateTime) {\n lastChange.endTime = recording.endDateTime.getTime() - recording.startDateTime.getTime()\n if (exportFormats.includes('SRT')) {\n addToSRTFile(processedChanges, processedChanges.length, true)\n }\n }\n if (exportFormats.includes('JSON')) {\n addToFileQueue(JSON.stringify(processedChanges), 'json', true)\n }\n return appendToFile().then(() => {\n // Refresh the recordFiles view after export is complete\n vscode.commands.executeCommand('crowd-code.refreshRecordFiles')\n })\n}\n\n/**\n * Adds a line to the SRT file format.\n * @param sequence - The sequence number of the change.\n * @param start - The start time of the change.\n * @param end - The end time of the change.\n * @param text - The text of the change.\n * @returns A string representing a line in the SRT file format.\n */\nfunction addSrtLine(sequence: number, start: number, end: number, text: string): string {\n return `${sequence}\n${formatSrtTime(start)} --> ${formatSrtTime(end)}\n${text}\n\n`\n}\n\n/**\n * Adds content to the file queue.\n * @param content - The content to add.\n * @param fileExtension - The file extension (optional, defaults to 'csv').\n */\nexport function addToFileQueue(\n content: string | undefined,\n fileExtension = 'csv',\n isExport = false\n): void {\n if (!content) {\n return\n }\n if (!recording.startDateTime) {\n return\n }\n // Use the same custom name throughout the recording session\n const baseFilePath = generateBaseFilePath(recording.startDateTime, isExport, recording.customFolderName, sessionUuid)\n if (!baseFilePath) {\n return\n }\n fileQueue.push({\n name: `${baseFilePath}.${fileExtension}`,\n content: content,\n })\n}\n\n/**\n * Updates the status bar item with the current recording status and time.\n */\nexport function updateStatusBarItem(): void {\n if (recording.isRecording) {\n if (getConfig().get('appearance.showTimer') === false) {\n statusBarItem.text = '$(debug-stop)'\n statusBarItem.tooltip = 'Current time: ' + formatDisplayTime(recording.timer)\n }\n if (getConfig().get('appearance.showTimer') === true) {\n statusBarItem.text = '$(debug-stop) ' + formatDisplayTime(recording.timer)\n statusBarItem.tooltip = 'Stop Recording'\n }\n statusBarItem.command = commands.stopRecording\n statusBarItem.show()\n } else {\n const editor = vscode.window.activeTextEditor\n if (!editor) {\n statusBarItem.hide()\n return\n }\n if (getConfig().get('appearance.minimalMode') === true) {\n statusBarItem.text = '$(circle-large-filled)'\n } else {\n statusBarItem.text = '$(circle-large-filled) Start Recording'\n }\n statusBarItem.tooltip = 'Start Recording'\n statusBarItem.command = commands.startRecording\n statusBarItem.show()\n }\n}\n\n/**\n * Creates and updates the panic button status bar item.\n */\nexport function updatePanicButton(): void {\n if (!recording.isRecording) {\n if (panicStatusBarItem) {\n panicStatusBarItem.hide()\n }\n return\n }\n\n // Create panic button if it doesn't exist\n if (!panicStatusBarItem) {\n panicStatusBarItem = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Right, 8999) // Position it to the left of the recording button\n extContext.subscriptions.push(panicStatusBarItem)\n }\n\n const secondsToRemove = (panicButtonPressCount + 1) * 10 // Show what the next press will remove\n panicStatusBarItem.text = '$(refresh)'\n panicStatusBarItem.tooltip = `Remove last ${secondsToRemove} seconds of recording (click again within 3 seconds to remove more)`\n panicStatusBarItem.command = commands.panicButton\n panicStatusBarItem.show()\n}\n\n/**\n * Deletes the last N seconds of recording data from the CSV file.\n * This is a ""panic button"" feature that allows users to quickly remove recent sensitive data.\n * Each successive press within 3 seconds removes more time: 10s, 20s, 30s, etc.\n * After 3 seconds of inactivity, the next press will be treated as a fresh press (10s).\n */\nexport async function panicButton(): Promise {\n if (!recording.isRecording) {\n vscode.window.showWarningMessage('No active recording to remove data from')\n logToOutput('No active recording to remove data from', 'info')\n return\n }\n\n if (!recording.startDateTime) {\n vscode.window.showErrorMessage('Recording start time not available')\n logToOutput('Recording start time not available', 'error')\n return\n }\n\n const exportPath = getExportPath()\n if (!exportPath) {\n vscode.window.showErrorMessage('Export path not available')\n logToOutput('Export path not available', 'error')\n return\n }\n\n const baseFilePath = generateBaseFilePath(recording.startDateTime, false, recording.customFolderName, sessionUuid)\n if (!baseFilePath) {\n vscode.window.showErrorMessage('Could not generate file path')\n logToOutput('Could not generate file path', 'error')\n return\n }\n\n const filePath = path.join(exportPath, `${baseFilePath}.csv`)\n\n try {\n // Check if file exists\n if (!fs.existsSync(filePath)) {\n vscode.window.showWarningMessage('No recording file found to remove data from')\n logToOutput('No recording file found to remove data from', 'info')\n return\n }\n\n // Read the file\n const content = fs.readFileSync(filePath, 'utf-8')\n const lines = content.split('\n')\n \n if (lines.length <= 1) {\n vscode.window.showWarningMessage('Recording file is empty, nothing to remove')\n logToOutput('Recording file is empty, nothing to remove', 'info')\n return\n }\n\n // Calculate how many lines to remove (10 seconds per press)\n const linesToRemove = Math.min((panicButtonPressCount + 1) * 10, lines.length - 1)\n const newLines = lines.slice(0, lines.length - linesToRemove)\n \n // Capture the lines that will be removed for display\n const removedLines = lines.slice(lines.length - linesToRemove)\n\n // Write back to file\n fs.writeFileSync(filePath, newLines.join('\n'))\n\n // Update panic button state\n panicButtonPressCount++\n \n // Set up timeout to reset the counter after 3 seconds of inactivity\n if (panicButtonTimeoutId) {\n clearTimeout(panicButtonTimeoutId)\n }\n panicButtonTimeoutId = setTimeout(() => {\n panicButtonPressCount = 0\n accumulatedRemovedContent = [] // Clear accumulated content\n updatePanicButton()\n }, PANIC_BUTTON_TIMEOUT)\n \n updatePanicButton()\n\n const secondsToRemove = panicButtonPressCount * 10\n const actualLinesRemoved = lines.length - newLines.length\n \n // Accumulate removed content and show immediate popup\n if (removedLines.length > 0) {\n const nonEmptyLines = removedLines.filter(line => line.trim())\n if (nonEmptyLines.length > 0) {\n // Create a simple, readable summary of removed content\n const contentSummary = nonEmptyLines.map(line => {\n // Extract just the text content from CSV for cleaner display\n const parts = line.split(',')\n if (parts.length >= 6) {\n const textContent = parts[5].replace(/^""|""$/g, '') // Remove quotes\n // Clean up common escape sequences\n const cleanText = textContent\n .replace(/\\n/g, '\n')\n .replace(/\\t/g, '\t')\n .replace(/\\r/g, '\r')\n return { content: cleanText, sequence: Number.parseInt(parts[0]) }\n }\n return { content: line, sequence: Number.parseInt(line.split(',')[0]) }\n }).filter(item => item.content.trim().length > 0)\n \n // Add to accumulated content\n accumulatedRemovedContent.push(...contentSummary)\n \n // Sort by sequence number to show in original file order\n const sortedContent = accumulatedRemovedContent.sort((a, b) => a.sequence - b.sequence)\n \n // Show immediate popup with accumulated content\n const totalContent = sortedContent.map(item => item.content).join(' ')\n const summaryText = totalContent.length > 100 \n ? totalContent.substring(0, 100) + '...' \n : totalContent\n \n vscode.window.showInformationMessage(\n `Removed content: ""${summaryText}""`,\n 'Dismiss'\n )\n }\n }\n\n } catch (error) {\n const errorMessage = `Error during panic button operation: ${error}`\n vscode.window.showErrorMessage(errorMessage)\n logToOutput(errorMessage, 'error')\n }\n}",typescript,tab +2,78,"extension-output-pdoom-org.crowd-code-#5-crowd-code",0,0,"10:50:32 PM [info] Activating crowd-code\n10:50:32 PM [info] Recording started\n10:50:32 PM [info] Initializing git provider using file system watchers...\n10:50:32 PM [info] Git repository found\n10:50:32 PM [info] Git provider initialized successfully\n10:50:32 PM [info] Initial git state: [object Object]\n",Log,tab +3,2192,"src/recording.ts",0,0,"",typescript,tab +4,2193,"src/recording.ts",10135,0,"",typescript,selection_mouse +5,256675,"src/recording.ts",9085,0,"",typescript,selection_command +6,310049,"src/recording.ts",9119,0,"",typescript,selection_command +7,310303,"src/recording.ts",9172,0,"",typescript,selection_command +8,310329,"src/recording.ts",9214,0,"",typescript,selection_command +9,310363,"src/recording.ts",9261,0,"",typescript,selection_command +10,310399,"src/recording.ts",9291,0,"",typescript,selection_command +11,310431,"src/recording.ts",9315,0,"",typescript,selection_command +12,310681,"src/recording.ts",9386,0,"",typescript,selection_command +13,311616,"src/recording.ts",9476,0,"",typescript,selection_command +14,313315,"src/recording.ts",9454,78,"",typescript,content +15,313342,"src/recording.ts",9466,0,"",typescript,selection_command +16,314505,"src/recording.ts",9466,0," logToOutput(`Successfully sent to Lambda endpoint.`, 'info');\n ",typescript,content +17,314508,"src/recording.ts",9476,0,"",typescript,selection_command +18,316423,"src/recording.ts",9454,78,"",typescript,content +19,316439,"src/recording.ts",9466,0,"",typescript,selection_command +20,317197,"src/recording.ts",9480,0,"",typescript,selection_command +21,317443,"src/recording.ts",9511,0,"",typescript,selection_command +22,317478,"src/recording.ts",9554,0,"",typescript,selection_command +23,317508,"src/recording.ts",9654,0,"",typescript,selection_command +24,317544,"src/recording.ts",9675,0,"",typescript,selection_command +25,317578,"src/recording.ts",9756,0,"",typescript,selection_command +26,317611,"src/recording.ts",9823,0,"",typescript,selection_command +27,317645,"src/recording.ts",9908,0,"",typescript,selection_command +28,317679,"src/recording.ts",9989,0,"",typescript,selection_command +29,317794,"src/recording.ts",10007,0,"",typescript,selection_command +30,318048,"src/recording.ts",10017,0,"",typescript,selection_command +31,318077,"src/recording.ts",10031,0,"",typescript,selection_command +32,318441,"src/recording.ts",10030,0,"",typescript,selection_command +33,318590,"src/recording.ts",10028,0,"",typescript,selection_command +34,318767,"src/recording.ts",10026,0,"",typescript,selection_command +35,319213,"src/recording.ts",10026,1,"5",typescript,content +36,320144,"src/recording.ts",10016,0,"",typescript,selection_command +37,320398,"src/recording.ts",10002,0,"",typescript,selection_command +38,320427,"src/recording.ts",9984,0,"",typescript,selection_command +39,320465,"src/recording.ts",9903,0,"",typescript,selection_command +40,320493,"src/recording.ts",9818,0,"",typescript,selection_command +41,320528,"src/recording.ts",9751,0,"",typescript,selection_command +42,320562,"src/recording.ts",9670,0,"",typescript,selection_command +43,320594,"src/recording.ts",9649,0,"",typescript,selection_command +44,320629,"src/recording.ts",9549,0,"",typescript,selection_command +45,320662,"src/recording.ts",9506,0,"",typescript,selection_command +46,320698,"src/recording.ts",9475,0,"",typescript,selection_command +47,321493,"src/recording.ts",9461,0,"",typescript,selection_command +48,321915,"src/recording.ts",9371,0,"",typescript,selection_command +49,322434,"src/recording.ts",9300,0,"",typescript,selection_command +50,329951,"src/recording.ts",9281,0,"",typescript,selection_command +51,330204,"src/recording.ts",9246,0,"",typescript,selection_command +52,330230,"src/recording.ts",9199,0,"",typescript,selection_command +53,330375,"src/recording.ts",9157,0,"",typescript,selection_command +54,330540,"src/recording.ts",9104,0,"",typescript,selection_command +55,330712,"src/recording.ts",9157,0,"",typescript,selection_command +56,330910,"src/recording.ts",9170,0,"",typescript,selection_command +57,330953,"src/recording.ts",9117,0,"",typescript,selection_command +58,331166,"src/recording.ts",9083,0,"",typescript,selection_command +59,331302,"src/recording.ts",9085,0,"",typescript,selection_command +60,331503,"src/recording.ts",9093,0,"",typescript,selection_command +61,331610,"src/recording.ts",9127,0,"",typescript,selection_command +62,331751,"src/recording.ts",9130,0,"",typescript,selection_command +63,350255,"src/recording.ts",9183,0,"",typescript,selection_command +64,350406,"src/recording.ts",9225,0,"",typescript,selection_command +65,350598,"src/recording.ts",9272,0,"",typescript,selection_command +66,352808,"src/recording.ts",9225,0,"",typescript,selection_command +67,354872,"src/recording.ts",9183,0,"",typescript,selection_command +68,359461,"src/recording.ts",9130,0,"",typescript,selection_command +69,359617,"src/recording.ts",9095,0,"",typescript,selection_command +70,359673,"src/recording.ts",9117,0,"",typescript,selection_command +71,359794,"src/recording.ts",9083,0,"",typescript,selection_command +72,359976,"src/recording.ts",9052,0,"",typescript,selection_command +73,360191,"src/recording.ts",9031,0,"",typescript,selection_command +74,360447,"src/recording.ts",8972,0,"",typescript,selection_command +75,360582,"src/recording.ts",8982,0,"",typescript,selection_command +76,360831,"src/recording.ts",8984,0,"",typescript,selection_command +77,360860,"src/recording.ts",8990,0,"",typescript,selection_command +78,360894,"src/recording.ts",8992,0,"",typescript,selection_command +79,360925,"src/recording.ts",8993,0,"",typescript,selection_command +80,360961,"src/recording.ts",9001,0,"",typescript,selection_command +81,360991,"src/recording.ts",9002,0,"",typescript,selection_command +82,361027,"src/recording.ts",9010,0,"",typescript,selection_command +83,361058,"src/recording.ts",9011,0,"",typescript,selection_command +84,361092,"src/recording.ts",9019,0,"",typescript,selection_command +85,361127,"src/recording.ts",9021,0,"",typescript,selection_command +86,361386,"src/recording.ts",9019,0,"",typescript,selection_command +87,361536,"src/recording.ts",9011,0,"",typescript,selection_command +88,371148,"src/recording.ts",9031,0,"",typescript,selection_command +89,371398,"src/recording.ts",9061,0,"",typescript,selection_command +90,371425,"src/recording.ts",9095,0,"",typescript,selection_command +91,371594,"src/recording.ts",9148,0,"",typescript,selection_command +92,372005,"src/recording.ts",9147,0,"",typescript,selection_command +93,372143,"src/recording.ts",9144,0,"",typescript,selection_command +94,372278,"src/recording.ts",9142,0,"",typescript,selection_command +95,372403,"src/recording.ts",9130,0,"",typescript,selection_command +96,373188,"src/recording.ts",8762,0,"",typescript,selection_command +97,373475,"src/recording.ts",6202,0,"",typescript,selection_command +98,401363,"src/recording.ts",6203,0,"",typescript,selection_mouse +99,471528,"src/recording.ts",6144,72," const folderPath = path.dirname(path.join(exportPath, baseFilePath))",typescript,selection_command +100,472180,"src/recording.ts",6203,0,"",typescript,selection_command +101,474540,"src/recording.ts",6202,0,"",typescript,selection_command +102,474720,"src/recording.ts",6200,0,"",typescript,selection_command +103,474892,"src/recording.ts",6190,0,"",typescript,selection_command +104,475017,"src/recording.ts",6189,0,"",typescript,selection_command +105,475195,"src/recording.ts",6185,0,"",typescript,selection_command +106,475350,"src/recording.ts",6184,0,"",typescript,selection_command +107,475507,"src/recording.ts",6180,0,"",typescript,selection_command +108,475678,"src/recording.ts",6179,0,"",typescript,selection_command +109,475981,"src/recording.ts",6172,0,"",typescript,selection_command +110,476576,"src/recording.ts",6215,0,"",typescript,selection_command +111,563556,"src/recording.ts",6148,0,"",typescript,selection_command +112,563913,"src/recording.ts",6154,0,"",typescript,selection_command +113,565484,"src/recording.ts",6165,0,"",typescript,selection_command +114,565741,"src/recording.ts",6167,0,"",typescript,selection_command +115,565766,"src/recording.ts",6171,0,"",typescript,selection_command295,2976491,"src/recording.ts",28645,0,"\n",typescript,content +296,2976613,"src/recording.ts",28646,0,"s",typescript,content +297,2976614,"src/recording.ts",28647,0,"",typescript,selection_keyboard +298,2976738,"src/recording.ts",28647,0,"u",typescript,content +299,2976741,"src/recording.ts",28648,0,"",typescript,selection_keyboard +300,2976798,"src/recording.ts",28648,0,"p",typescript,content +301,2976801,"src/recording.ts",28649,0,"",typescript,selection_keyboard +302,2976934,"src/recording.ts",28649,0,"e",typescript,content +303,2976957,"src/recording.ts",28650,0,"",typescript,selection_keyboard +304,2976992,"src/recording.ts",28650,0,"r",typescript,content +305,2976997,"src/recording.ts",28651,0,"",typescript,selection_keyboard +306,2977200,"src/recording.ts",28651,0,"s",typescript,content +307,2977202,"src/recording.ts",28652,0,"",typescript,selection_keyboard +308,2977525,"src/recording.ts",28652,0,"e",typescript,content +309,2977530,"src/recording.ts",28653,0,"",typescript,selection_keyboard +310,2977634,"src/recording.ts",28653,0,"c",typescript,content +311,2977635,"src/recording.ts",28654,0,"",typescript,selection_keyboard +312,2977825,"src/recording.ts",28654,0,"r",typescript,content +313,2977826,"src/recording.ts",28655,0,"",typescript,selection_keyboard +314,2977896,"src/recording.ts",28655,0,"e",typescript,content324,2997670,"src/recording.ts",28659,0,"",typescript,selection_command +325,2998126,"src/recording.ts",28646,14,"",typescript,content +326,3005814,"src/recording.ts",28646,0,"s",typescript,content +327,3005815,"src/recording.ts",28647,0,"",typescript,selection_keyboard +328,3005883,"src/recording.ts",28647,0,"e",typescript,content +329,3005888,"src/recording.ts",28648,0,"",typescript,selection_keyboard +330,3006331,"src/recording.ts",28648,0,"c",typescript,content +331,3006335,"src/recording.ts",28649,0,"",typescript,selection_keyboard +332,3006535,"src/recording.ts",28649,0,"r",typescript,content +333,3006538,"src/recording.ts",28650,0,"",typescript,selection_keyboard +334,3006577,"src/recording.ts",28650,0,"e",typescript,content +335,3006581,"src/recording.ts",28651,0,"",typescript,selection_keyboard +336,3006735,"src/recording.ts",28651,0,"t",typescript,content +337,3006738,"src/recording.ts",28652,0,"",typescript,selection_keyboard +338,3006976,"src/recording.ts",28652,0,"p",typescript,content +339,3006979,"src/recording.ts",28653,0,"",typescript,selection_keyboard +340,3007037,"src/recording.ts",28653,0,"a",typescript,content380,3137765,"src/recording.ts",28814,0,"",typescript,selection_command +381,3138176,"src/recording.ts",28647,168,"",typescript,content +382,3140498,"src/recording.ts",28647,0,"s",typescript,content +383,3140500,"src/recording.ts",28648,0,"",typescript,selection_keyboard +384,3140565,"src/recording.ts",28648,0,"e",typescript,content +385,3140566,"src/recording.ts",28649,0,"",typescript,selection_keyboard +386,3140680,"src/recording.ts",28649,0,"c",typescript,content +387,3140684,"src/recording.ts",28650,0,"",typescript,selection_keyboard +388,3140885,"src/recording.ts",28650,0,"r",typescript,content +389,3140889,"src/recording.ts",28651,0,"",typescript,selection_keyboard +390,3140958,"src/recording.ts",28651,0,"e",typescript,content +391,3140962,"src/recording.ts",28652,0,"",typescript,selection_keyboard +392,3141094,"src/recording.ts",28652,0,"t",typescript,content +393,3141097,"src/recording.ts",28653,0,"",typescript,selection_keyboard +394,3141268,"src/recording.ts",28653,0,"p",typescript,content +395,3141270,"src/recording.ts",28654,0,"",typescript,selection_keyboard +396,3141340,"src/recording.ts",28654,0,"a",typescript,content +397,3141341,"src/recording.ts",28655,0,"",typescript,selection_keyboard +398,3141375,"src/recording.ts",28655,0,"s",typescript,content +399,3141376,"src/recording.ts",28656,0,"",typescript,selection_keyboard +400,3141534,"src/recording.ts",28656,0,"s",typescript,content +401,3141534,"src/recording.ts",28657,0,"",typescript,selection_keyboard +402,3141721,"src/recording.ts",28657,0,"w",typescript,content +403,3141721,"src/recording.ts",28658,0,"",typescript,selection_keyboard +404,3141921,"src/recording.ts",28658,0,"o",typescript,content +405,3141921,"src/recording.ts",28659,0,"",typescript,selection_keyboard +406,3141976,"src/recording.ts",28659,0,"r",typescript,content +407,3141976,"src/recording.ts",28660,0,"",typescript,selection_keyboard +408,3142099,"src/recording.ts",28660,0,"d",typescript,content418,3167843,"src/recording.ts",28660,0,"",typescript,selection_command +419,3168521,"src/recording.ts",28646,15,"",typescript,content +420,3169730,"src/recording.ts",28645,1,"",typescript,content +421,3169747,"src/recording.ts",28644,0,"",typescript,selection_command diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-000d5684-56eb-441c-a6df-7ac4df8ff5c71752846982966-2025_07_18-15.57.40.939/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-000d5684-56eb-441c-a6df-7ac4df8ff5c71752846982966-2025_07_18-15.57.40.939/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..0396f1d7328ceaa3419d98949b425c2db441f43e --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-000d5684-56eb-441c-a6df-7ac4df8ff5c71752846982966-2025_07_18-15.57.40.939/source.csv @@ -0,0 +1,1949 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,508,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"3:57:40 PM [info] Activating crowd-code\n3:57:40 PM [info] Recording started\n3:57:40 PM [info] Initializing git provider using file system watchers...\n3:57:41 PM [info] Git repository found\n3:57:41 PM [info] Git provider initialized successfully\n3:57:41 PM [info] Initial git state: [object Object]\n",Log,tab +3,3437,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,3487,"TERMINAL",0,0,"]633;E;2025-07-18 15:57:44 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;129dcca6-7a0e-4cb3-bccf-f5dc5a21abf7]633;C",,terminal_output +5,3538,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +6,25450,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(32):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +7,54568,"genie.py",5404,0,"",python,selection_mouse +8,54573,"genie.py",5403,0,"",python,selection_command +9,56039,"genie.py",5354,0,"",python,selection_command +10,56226,"genie.py",5270,0,"",python,selection_command +11,56436,"genie.py",5229,0,"",python,selection_command +12,56982,"genie.py",5203,0,"",python,selection_command +13,57182,"genie.py",5199,0,"",python,selection_command +14,57473,"genie.py",5120,0,"",python,selection_command +15,57667,"genie.py",5077,0,"",python,selection_command +16,57848,"genie.py",5037,0,"",python,selection_command +17,58019,"genie.py",4994,0,"",python,selection_command +18,58217,"genie.py",4945,0,"",python,selection_command +19,58369,"genie.py",4877,0,"",python,selection_command +20,58538,"genie.py",4851,0,"",python,selection_command +21,60412,"genie.py",4877,0,"",python,selection_command +22,93112,"genie.py",5129,0,"",python,selection_mouse +23,213248,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +24,215461,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:,:,1:]\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)",python,tab +25,298004,"models/dynamics.py",3114,0,"",python,selection_mouse +26,298027,"models/dynamics.py",3113,0,"",python,selection_command +27,298891,"models/dynamics.py",3114,0,"",python,selection_mouse +28,298898,"models/dynamics.py",3113,0,"",python,selection_command +29,299430,"models/dynamics.py",3062,0,"",python,selection_mouse +30,300232,"models/dynamics.py",3063,0,"",python,selection_command +31,300553,"models/dynamics.py",3063,0," ",python,content +32,300557,"models/dynamics.py",3064,0,"",python,selection_keyboard +33,301050,"models/dynamics.py",3063,0,"",python,selection_command +34,301140,"models/dynamics.py",3064,0,"",python,selection_command +35,301441,"models/dynamics.py",3065,0,"",python,selection_command +36,301629,"models/dynamics.py",3066,0,"",python,selection_command +37,301926,"models/dynamics.py",3066,0," ",python,content +38,301927,"models/dynamics.py",3067,0,"",python,selection_keyboard +39,302008,"models/dynamics.py",3066,0,"",python,selection_command +40,302408,"models/dynamics.py",3115,0,"",python,selection_command +41,303213,"models/dynamics.py",3167,0,"",python,selection_command +42,303537,"models/dynamics.py",3115,0,"",python,selection_command +43,303787,"models/dynamics.py",3066,0,"",python,selection_command +44,303967,"models/dynamics.py",2986,0,"",python,selection_command +45,304169,"models/dynamics.py",2912,0,"",python,selection_command +46,304316,"models/dynamics.py",2852,0,"",python,selection_command +47,304513,"models/dynamics.py",2792,0,"",python,selection_command +48,304748,"models/dynamics.py",2704,0,"",python,selection_command +49,305034,"models/dynamics.py",2792,0,"",python,selection_command +50,305234,"models/dynamics.py",2852,0,"",python,selection_command +51,305406,"models/dynamics.py",2912,0,"",python,selection_command +52,305576,"models/dynamics.py",2986,0,"",python,selection_command +53,305733,"models/dynamics.py",3066,0,"",python,selection_command +54,305902,"models/dynamics.py",3115,0,"",python,selection_command +55,306297,"models/dynamics.py",3167,0,"",python,selection_command +56,322877,"models/dynamics.py",2998,0,"",python,selection_mouse +57,323355,"models/dynamics.py",3168,0,"",python,selection_mouse +58,323364,"models/dynamics.py",3167,0,"",python,selection_command +59,324132,"models/dynamics.py",3116,0,"",python,selection_mouse +60,324136,"models/dynamics.py",3115,0,"",python,selection_command +61,324789,"models/dynamics.py",3168,0,"",python,selection_mouse +62,324803,"models/dynamics.py",3167,0,"",python,selection_command +63,332836,"models/dynamics.py",3115,0,"",python,selection_command +64,333045,"models/dynamics.py",3062,0,"",python,selection_command +65,333205,"models/dynamics.py",2982,0,"",python,selection_command +66,333351,"models/dynamics.py",2908,0,"",python,selection_command +67,333663,"models/dynamics.py",2982,0,"",python,selection_command +68,333944,"models/dynamics.py",3011,0,"\n ",python,content +69,334896,"models/dynamics.py",3020,0,"j",python,content +70,334897,"models/dynamics.py",3021,0,"",python,selection_keyboard +71,334950,"models/dynamics.py",3021,0,"a",python,content +72,334952,"models/dynamics.py",3022,0,"",python,selection_keyboard +73,335115,"models/dynamics.py",3022,0,"x",python,content +74,335116,"models/dynamics.py",3023,0,"",python,selection_keyboard +75,335236,"models/dynamics.py",3023,0,".",python,content +76,335238,"models/dynamics.py",3024,0,"",python,selection_keyboard +77,335378,"models/dynamics.py",3024,0,"d",python,content +78,335380,"models/dynamics.py",3025,0,"",python,selection_keyboard +79,335575,"models/dynamics.py",3025,0,"e",python,content +80,335577,"models/dynamics.py",3026,0,"",python,selection_keyboard +81,335679,"models/dynamics.py",3026,0,"b",python,content +82,335681,"models/dynamics.py",3027,0,"",python,selection_keyboard +83,335894,"models/dynamics.py",3027,0,"g",python,content +84,335897,"models/dynamics.py",3028,0,"",python,selection_keyboard +85,336177,"models/dynamics.py",3027,1,"",python,content +86,336415,"models/dynamics.py",3027,0,"u",python,content +87,336417,"models/dynamics.py",3028,0,"",python,selection_keyboard +88,336533,"models/dynamics.py",3028,0,"g",python,content +89,336535,"models/dynamics.py",3029,0,"",python,selection_keyboard +90,336685,"models/dynamics.py",3029,0,".",python,content +91,336686,"models/dynamics.py",3030,0,"",python,selection_keyboard +92,337222,"models/dynamics.py",3030,0,"b",python,content +93,337224,"models/dynamics.py",3031,0,"",python,selection_keyboard +94,337326,"models/dynamics.py",3031,0,"r",python,content +95,337328,"models/dynamics.py",3032,0,"",python,selection_keyboard +96,337847,"models/dynamics.py",3030,2,"breakpoint",python,content +97,338694,"models/dynamics.py",3040,0,"()",python,content +98,338696,"models/dynamics.py",3041,0,"",python,selection_keyboard +99,338735,"models/dynamics.py",3041,1,")",python,content +100,338736,"models/dynamics.py",3042,0,"",python,selection_keyboard +101,339721,"models/dynamics.py",3041,0,"",python,selection_command +102,340687,"models/dynamics.py",2961,0,"",python,selection_command +103,340857,"models/dynamics.py",2887,0,"",python,selection_command +104,341147,"models/dynamics.py",2931,0,"\n jax.debug.breakpoint()",python,content +105,341163,"models/dynamics.py",2940,0,"",python,selection_command +106,341334,"models/dynamics.py",2971,0,"",python,selection_command +107,341497,"models/dynamics.py",2940,0,"",python,selection_command +108,341854,"models/dynamics.py",2866,0,"",python,selection_command +109,342160,"models/dynamics.py",2806,0,"",python,selection_command +110,342953,"models/dynamics.py",2857,0,"\n jax.debug.breakpoint()",python,content +111,342967,"models/dynamics.py",2866,0,"",python,selection_command +112,343459,"models/dynamics.py",2897,0,"",python,selection_command +113,343650,"models/dynamics.py",2971,0,"",python,selection_command +114,343824,"models/dynamics.py",3002,0,"",python,selection_command +115,343974,"models/dynamics.py",3082,0,"",python,selection_command +116,344255,"models/dynamics.py",3113,0,"",python,selection_command +117,346251,"TERMINAL",0,0,"bash",,terminal_focus +118,348906,"TERMINAL",0,0,"queue",,terminal_command +119,348992,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:29 queue;2fac4da8-d4f0-4d83-a6ce-f6776ed5ed51]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Fri Jul 18 16:03:29 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3357147 accelerat interact tum_cte0 R 4:19:39\t 1 hkn0720",,terminal_output +120,350035,"TERMINAL",0,0,"3040",,terminal_output +121,351088,"TERMINAL",0,0,"11",,terminal_output +122,351169,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +123,354224,"TERMINAL",0,0,"scancel 3357147",,terminal_command +124,354271,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:35 scancel 3357147;2fac4da8-d4f0-4d83-a6ce-f6776ed5ed51]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +125,360667,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5",,terminal_command +126,360739,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:41 salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5;2fac4da8-d4f0-4d83-a6ce-f6776ed5ed51]633;Csalloc: Pending job allocation 3357893\r\nsalloc: job 3357893 queued and waiting for resources\r\n",,terminal_output +127,363412,"TERMINAL",0,0,"",,terminal_focus +128,370721,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +129,370777,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:51 salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;Csalloc: Pending job allocation 3357894\r\nsalloc: job 3357894 queued and waiting for resources\r\n",,terminal_output +130,373133,"TERMINAL",0,0,"bash",,terminal_focus +131,374167,"TERMINAL",0,0,"undefinedjafar[tum_cte0515@hkn1990 jafar]$ queue",,terminal_command +132,374221,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:55 queue;ec0e1a7c-4669-4536-8bdb-633880f5f144]633;C",,terminal_output +133,374303,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Fri Jul 18 16:03:55 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3357147 accelerat interact tum_cte0 CG 4:19:45\t 1 hkn07203357894 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3357893 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)",,terminal_output +134,375322,"TERMINAL",0,0,"6\t ",,terminal_output +135,375917,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +136,377984,"TERMINAL",0,0,"idling",,terminal_command +137,378056,"TERMINAL",0,0,"]633;E;2025-07-18 16:03:58 idling;ec0e1a7c-4669-4536-8bdb-633880f5f144]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Fri Jul 18 16:03:58 2025Partition dev_cpuonly:\t 6 nodes idle\rPartition cpuonly: 165 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 10 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +138,379105,"TERMINAL",0,0,"9\t ",,terminal_output +139,380143,"TERMINAL",0,0,"4:00\t ",,terminal_output +140,380823,"TERMINAL",0,0,"salloc",,terminal_focus +141,381206,"TERMINAL",0,0,"1\t ",,terminal_output +142,382218,"TERMINAL",0,0,"3\t ",,terminal_output +143,382289,"TERMINAL",0,0,"salloc",,terminal_focus +144,383262,"TERMINAL",0,0,"4\t ",,terminal_output +145,383496,"TERMINAL",0,0,"salloc",,terminal_focus +146,384293,"TERMINAL",0,0,"5\t ",,terminal_output +147,385331,"TERMINAL",0,0,"6\t ",,terminal_output +148,386371,"TERMINAL",0,0,"75",,terminal_output +149,387237,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\npython sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $array_records_dir\n\n",shellscript,tab +150,387429,"TERMINAL",0,0,"8\t ",,terminal_output +151,388449,"TERMINAL",0,0,"9\t ",,terminal_output +152,389599,"TERMINAL",0,0,"10\t ",,terminal_output +153,390533,"TERMINAL",0,0,"1\t ",,terminal_output +154,391591,"TERMINAL",0,0,"2\t ",,terminal_output +155,392542,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",612,0,"",shellscript,selection_mouse +156,392558,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",611,0,"",shellscript,selection_command +157,392628,"TERMINAL",0,0,"3\t ",,terminal_output +158,393649,"TERMINAL",0,0,"4\t ",,terminal_output +159,393732,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",424,0,"",shellscript,selection_mouse +160,394355,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",423,7,"scratch",shellscript,selection_mouse +161,394697,"TERMINAL",0,0,"5\t ",,terminal_output +162,395012,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",424,0,"",shellscript,selection_mouse +163,395724,"TERMINAL",0,0,"6\t ",,terminal_output +164,396762,"TERMINAL",0,0,"7\t ",,terminal_output +165,397794,"TERMINAL",0,0,"8\t ",,terminal_output +166,398831,"TERMINAL",0,0,"9\t ",,terminal_output +167,399870,"TERMINAL",0,0,"20\t ",,terminal_output +168,400913,"TERMINAL",0,0,"1\t ",,terminal_output +169,401945,"TERMINAL",0,0,"2\t ",,terminal_output +170,402981,"TERMINAL",0,0,"3\t ",,terminal_output +171,404138,"TERMINAL",0,0,"salloc: job 3357894 has been allocated resources\r\nsalloc: Granted job allocation 3357894\r\n",,terminal_output +172,404152,"TERMINAL",0,0,"42 9",,terminal_output +173,404280,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +174,405226,"TERMINAL",0,0,"5\t ",,terminal_output +175,406230,"TERMINAL",0,0,"7\t ",,terminal_output +176,407276,"TERMINAL",0,0,"8\t ",,terminal_output +177,408301,"TERMINAL",0,0,"9\t ",,terminal_output +178,409338,"TERMINAL",0,0,"30\t ",,terminal_output +179,410365,"TERMINAL",0,0,"1\t ",,terminal_output +180,411399,"TERMINAL",0,0,"2\t ",,terminal_output +181,412431,"TERMINAL",0,0,"3\t ",,terminal_output +182,413468,"TERMINAL",0,0,"4\t ",,terminal_output +183,414504,"TERMINAL",0,0,"5\t ",,terminal_output +184,415536,"TERMINAL",0,0,"6\t ",,terminal_output +185,416576,"TERMINAL",0,0,"771",,terminal_output +186,417606,"TERMINAL",0,0,"86",,terminal_output +187,418640,"TERMINAL",0,0,"9\t ",,terminal_output +188,419676,"TERMINAL",0,0,"40\t ",,terminal_output +189,420711,"TERMINAL",0,0,"1\t ",,terminal_output +190,421786,"TERMINAL",0,0,"2\t ",,terminal_output +191,422785,"TERMINAL",0,0,"3\t ",,terminal_output +192,423820,"TERMINAL",0,0,"4\t ",,terminal_output +193,424865,"TERMINAL",0,0,"5\t ",,terminal_output +194,425911,"TERMINAL",0,0,"6\t ",,terminal_output +195,426932,"TERMINAL",0,0,"7\t ",,terminal_output +196,427074,"TERMINAL",0,0,"salloc",,terminal_focus +197,427968,"TERMINAL",0,0,"8\t ",,terminal_output +198,428224,"TERMINAL",0,0,"salloc",,terminal_focus +199,429010,"TERMINAL",0,0,"9\t ",,terminal_output +200,430044,"TERMINAL",0,0,"50\t ",,terminal_output +201,431154,"TERMINAL",0,0,"1\t ",,terminal_output +202,432120,"TERMINAL",0,0,"2\t ",,terminal_output +203,433156,"TERMINAL",0,0,"3\t ",,terminal_output +204,434228,"TERMINAL",0,0,"4\t ",,terminal_output +205,435225,"TERMINAL",0,0,"6\t ",,terminal_output +206,436260,"TERMINAL",0,0,"7\t ",,terminal_output +207,436811,"TERMINAL",0,0,"salloc: Nodes hkn0715 are ready for job\r\n",,terminal_output +208,437329,"TERMINAL",0,0,"8\t ",,terminal_output +209,437859,"TERMINAL",0,0,"]0;tum_cte0515@hkn0715:~/Projects/jafar[?2004h[tum_cte0515@hkn0715 jafar]$ ",,terminal_output +210,438389,"TERMINAL",0,0,"9\t ",,terminal_output +211,438430,"TERMINAL",0,0,"s",,terminal_output +212,438526,"TERMINAL",0,0,"o",,terminal_output +213,438621,"TERMINAL",0,0,"u",,terminal_output +214,438763,"TERMINAL",0,0,"r",,terminal_output +215,439021,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +216,439120,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +217,439362,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +218,439374,"TERMINAL",0,0,"5:00\t ",,terminal_output +219,439791,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +220,439965,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +221,440225,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +222,440417,"TERMINAL",0,0,"1\t ",,terminal_output +223,440457,"TERMINAL",0,0,"env/",,terminal_output +224,440676,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +225,440817,"TERMINAL",0,0,"in/",,terminal_output +226,441068,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +227,441120,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +228,441282,"TERMINAL",0,0,"tivate",,terminal_output +229,441444,"TERMINAL",0,0,"2\t ",,terminal_output +230,441515,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0715:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0715 jafar]$ [?25h",,terminal_output +231,442485,"TERMINAL",0,0,"3\t ",,terminal_output +232,443516,"TERMINAL",0,0,"4\t ",,terminal_output +233,444575,"TERMINAL",0,0,"5\t ",,terminal_output +234,445587,"TERMINAL",0,0,"6\t ",,terminal_output +235,446626,"TERMINAL",0,0,"7\t ",,terminal_output +236,447767,"TERMINAL",0,0,"8\t ",,terminal_output +237,448706,"TERMINAL",0,0,"9\t ",,terminal_output +238,449757,"TERMINAL",0,0,"10\t ",,terminal_output +239,450780,"TERMINAL",0,0,"1\t ",,terminal_output +240,451814,"TERMINAL",0,0,"2\t ",,terminal_output +241,452849,"TERMINAL",0,0,"3\t ",,terminal_output +242,453890,"TERMINAL",0,0,"4\t ",,terminal_output +243,454931,"TERMINAL",0,0,"5\t ",,terminal_output +244,455972,"TERMINAL",0,0,"6\t ",,terminal_output +245,457013,"TERMINAL",0,0,"7\t ",,terminal_output +246,458053,"TERMINAL",0,0,"8\t ",,terminal_output +247,459088,"TERMINAL",0,0,"9\t ",,terminal_output +248,460130,"TERMINAL",0,0,"20\t ",,terminal_output +249,461165,"TERMINAL",0,0,"1\t ",,terminal_output +250,462277,"TERMINAL",0,0,"3\t ",,terminal_output +251,463247,"TERMINAL",0,0,"4\t ",,terminal_output +252,464282,"TERMINAL",0,0,"5\t ",,terminal_output +253,465320,"TERMINAL",0,0,"6\t ",,terminal_output +254,466357,"TERMINAL",0,0,"7\t ",,terminal_output +255,467400,"TERMINAL",0,0,"8\t ",,terminal_output +256,468433,"TERMINAL",0,0,"9\t ",,terminal_output +257,469476,"TERMINAL",0,0,"30\t ",,terminal_output +258,470516,"TERMINAL",0,0,"1\t ",,terminal_output +259,471553,"TERMINAL",0,0,"2\t ",,terminal_output +260,472596,"TERMINAL",0,0,"3\t ",,terminal_output +261,473628,"TERMINAL",0,0,"4\t ",,terminal_output +262,474665,"TERMINAL",0,0,"5\t ",,terminal_output +263,475707,"TERMINAL",0,0,"6\t ",,terminal_output +264,476738,"TERMINAL",0,0,"7\t ",,terminal_output +265,477780,"TERMINAL",0,0,"8\t ",,terminal_output +266,478825,"TERMINAL",0,0,"9\t ",,terminal_output +267,479856,"TERMINAL",0,0,"40\t ",,terminal_output +268,480892,"TERMINAL",0,0,"1\t ",,terminal_output +269,481931,"TERMINAL",0,0,"2\t ",,terminal_output +270,482994,"TERMINAL",0,0,"3\t ",,terminal_output +271,484016,"TERMINAL",0,0,"4\t ",,terminal_output +272,485050,"TERMINAL",0,0,"5\t ",,terminal_output +273,486104,"TERMINAL",0,0,"6\t ",,terminal_output +274,487118,"TERMINAL",0,0,"7\t ",,terminal_output +275,488156,"TERMINAL",0,0,"8\t ",,terminal_output +276,489239,"TERMINAL",0,0,"9\t ",,terminal_output +277,490242,"TERMINAL",0,0,"51\t ",,terminal_output +278,491275,"TERMINAL",0,0,"2\t ",,terminal_output +279,492337,"TERMINAL",0,0,"3\t ",,terminal_output +280,493366,"TERMINAL",0,0,"4\t ",,terminal_output +281,494386,"TERMINAL",0,0,"51",,terminal_output +282,495422,"TERMINAL",0,0,"6\t ",,terminal_output +283,496464,"TERMINAL",0,0,"7\t ",,terminal_output +284,497500,"TERMINAL",0,0,"8\t ",,terminal_output +285,498539,"TERMINAL",0,0,"9\t ",,terminal_output +286,499360,"TERMINAL",0,0,"salloc",,terminal_focus +287,499571,"TERMINAL",0,0,"6:00\t ",,terminal_output +288,500468,"TERMINAL",0,0,"srun",,terminal_focus +289,500643,"TERMINAL",0,0,"1\t ",,terminal_output +290,501655,"TERMINAL",0,0,"2\t ",,terminal_output +291,502717,"TERMINAL",0,0,"3\t ",,terminal_output +292,503736,"TERMINAL",0,0,"4\t ",,terminal_output +293,504782,"TERMINAL",0,0,"5\t ",,terminal_output +294,505819,"TERMINAL",0,0,"6\t ",,terminal_output +295,506857,"TERMINAL",0,0,"7\t ",,terminal_output +296,507901,"TERMINAL",0,0,"8\t ",,terminal_output +297,508935,"TERMINAL",0,0,"9\t ",,terminal_output +298,509977,"TERMINAL",0,0,"10\t ",,terminal_output +299,511016,"TERMINAL",0,0,"1\t ",,terminal_output +300,512052,"TERMINAL",0,0,"2\t ",,terminal_output +301,513091,"TERMINAL",0,0,"3\t ",,terminal_output +302,514129,"TERMINAL",0,0,"4\t ",,terminal_output +303,515167,"TERMINAL",0,0,"511",,terminal_output +304,516239,"TERMINAL",0,0,"7\t ",,terminal_output +305,517281,"TERMINAL",0,0,"8\t ",,terminal_output +306,518278,"TERMINAL",0,0,"9\t ",,terminal_output +307,519324,"TERMINAL",0,0,"20\t ",,terminal_output +308,520360,"TERMINAL",0,0,"1\t ",,terminal_output +309,521399,"TERMINAL",0,0,"2\t ",,terminal_output +310,522457,"TERMINAL",0,0,"3\t ",,terminal_output +311,523478,"TERMINAL",0,0,"4\t ",,terminal_output +312,525131,"TERMINAL",0,0,"5 36",,terminal_output +313,526165,"TERMINAL",0,0,"6\t ",,terminal_output +314,527239,"TERMINAL",0,0,"7\t ",,terminal_output +315,528235,"TERMINAL",0,0,"9\t ",,terminal_output +316,529271,"TERMINAL",0,0,"30\t ",,terminal_output +317,530313,"TERMINAL",0,0,"1\t ",,terminal_output +318,531347,"TERMINAL",0,0,"2\t ",,terminal_output +319,532383,"TERMINAL",0,0,"3\t ",,terminal_output +320,533423,"TERMINAL",0,0,"4\t ",,terminal_output +321,534459,"TERMINAL",0,0,"5\t ",,terminal_output +322,535497,"TERMINAL",0,0,"6\t ",,terminal_output +323,536540,"TERMINAL",0,0,"7\t ",,terminal_output +324,537577,"TERMINAL",0,0,"8\t ",,terminal_output +325,538618,"TERMINAL",0,0,"9\t ",,terminal_output +326,539651,"TERMINAL",0,0,"40\t ",,terminal_output +327,540691,"TERMINAL",0,0,"1\t ",,terminal_output +328,541730,"TERMINAL",0,0,"2\t ",,terminal_output +329,542769,"TERMINAL",0,0,"3\t ",,terminal_output +330,543817,"TERMINAL",0,0,"4\t ",,terminal_output +331,544856,"TERMINAL",0,0,"5\t ",,terminal_output +332,545898,"TERMINAL",0,0,"6\t ",,terminal_output +333,546938,"TERMINAL",0,0,"7\t ",,terminal_output +334,547972,"TERMINAL",0,0,"8\t ",,terminal_output +335,549016,"TERMINAL",0,0,"9\t ",,terminal_output +336,550055,"TERMINAL",0,0,"50\t ",,terminal_output +337,551098,"TERMINAL",0,0,"1\t ",,terminal_output +338,552143,"TERMINAL",0,0,"2\t ",,terminal_output +339,553184,"TERMINAL",0,0,"3\t ",,terminal_output +340,554342,"TERMINAL",0,0,"5\t ",,terminal_output +341,555415,"TERMINAL",0,0,"6\t ",,terminal_output +342,556427,"TERMINAL",0,0,"7\t ",,terminal_output +343,557467,"TERMINAL",0,0,"8\t ",,terminal_output +344,558507,"TERMINAL",0,0,"9\t ",,terminal_output +345,559542,"TERMINAL",0,0,"7:00\t ",,terminal_output +346,560583,"TERMINAL",0,0,"1\t ",,terminal_output +347,561626,"TERMINAL",0,0,"2\t ",,terminal_output +348,562653,"TERMINAL",0,0,"3\t ",,terminal_output +349,563691,"TERMINAL",0,0,"4\t ",,terminal_output +350,564733,"TERMINAL",0,0,"5\t ",,terminal_output +351,565769,"TERMINAL",0,0,"6\t ",,terminal_output +352,566804,"TERMINAL",0,0,"7 9",,terminal_output +353,567840,"TERMINAL",0,0,"8\t ",,terminal_output +354,568880,"TERMINAL",0,0,"9\t ",,terminal_output +355,569923,"TERMINAL",0,0,"10\t ",,terminal_output +356,570964,"TERMINAL",0,0,"1\t ",,terminal_output +357,571998,"TERMINAL",0,0,"2\t ",,terminal_output +358,573039,"TERMINAL",0,0,"3\t ",,terminal_output +359,574085,"TERMINAL",0,0,"4\t ",,terminal_output +360,575121,"TERMINAL",0,0,"5\t ",,terminal_output +361,576160,"TERMINAL",0,0,"6\t ",,terminal_output +362,577193,"TERMINAL",0,0,"7\t ",,terminal_output +363,578278,"TERMINAL",0,0,"9\t ",,terminal_output +364,579264,"TERMINAL",0,0,"20\t ",,terminal_output +365,580304,"TERMINAL",0,0,"1\t ",,terminal_output +366,581341,"TERMINAL",0,0,"2\t ",,terminal_output +367,582386,"TERMINAL",0,0,"3\t ",,terminal_output +368,583419,"TERMINAL",0,0,"4\t ",,terminal_output +369,584460,"TERMINAL",0,0,"5\t ",,terminal_output +370,585495,"TERMINAL",0,0,"6\t ",,terminal_output +371,586537,"TERMINAL",0,0,"7\t ",,terminal_output +372,587574,"TERMINAL",0,0,"8\t ",,terminal_output +373,588622,"TERMINAL",0,0,"9\t ",,terminal_output +374,589646,"TERMINAL",0,0,"30\t ",,terminal_output +375,590683,"TERMINAL",0,0,"1\t ",,terminal_output +376,591720,"TERMINAL",0,0,"2\t ",,terminal_output +377,592759,"TERMINAL",0,0,"3\t ",,terminal_output +378,593795,"TERMINAL",0,0,"4\t ",,terminal_output +379,594834,"TERMINAL",0,0,"5\t ",,terminal_output +380,595868,"TERMINAL",0,0,"6\t ",,terminal_output +381,596906,"TERMINAL",0,0,"7\t ",,terminal_output +382,597945,"TERMINAL",0,0,"812",,terminal_output +383,598985,"TERMINAL",0,0,"9\t ",,terminal_output +384,600058,"TERMINAL",0,0,"40\t ",,terminal_output +385,601062,"TERMINAL",0,0,"1\t ",,terminal_output +386,602098,"TERMINAL",0,0,"2\t ",,terminal_output +387,603136,"TERMINAL",0,0,"3\t ",,terminal_output +388,604176,"TERMINAL",0,0,"4\t ",,terminal_output +389,605216,"TERMINAL",0,0,"6\t ",,terminal_output +390,606251,"TERMINAL",0,0,"7\t ",,terminal_output +391,607291,"TERMINAL",0,0,"8\t ",,terminal_output +392,608328,"TERMINAL",0,0,"9\t ",,terminal_output +393,609366,"TERMINAL",0,0,"50\t ",,terminal_output +394,610424,"TERMINAL",0,0,"1\t ",,terminal_output +395,611437,"TERMINAL",0,0,"2\t ",,terminal_output +396,612474,"TERMINAL",0,0,"3\t ",,terminal_output +397,613507,"TERMINAL",0,0,"4\t ",,terminal_output +398,614545,"TERMINAL",0,0,"5\t ",,terminal_output +399,615592,"TERMINAL",0,0,"6\t ",,terminal_output +400,616627,"TERMINAL",0,0,"7\t ",,terminal_output +401,617670,"TERMINAL",0,0,"8\t ",,terminal_output +402,618708,"TERMINAL",0,0,"9\t ",,terminal_output +403,619746,"TERMINAL",0,0,"8:00\t ",,terminal_output +404,620787,"TERMINAL",0,0,"1\t ",,terminal_output +405,621826,"TERMINAL",0,0,"2\t ",,terminal_output +406,622868,"TERMINAL",0,0,"37",,terminal_output +407,623903,"TERMINAL",0,0,"4\t ",,terminal_output +408,624943,"TERMINAL",0,0,"5\t ",,terminal_output +409,625980,"TERMINAL",0,0,"6\t ",,terminal_output +410,627016,"TERMINAL",0,0,"7\t ",,terminal_output +411,627407,"TERMINAL",0,0," ",,terminal_output +412,628055,"TERMINAL",0,0,"8\t ",,terminal_output +413,628862,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\n# grain_iterator = _get_dataloader_iterator()\n# video_batch = next(grain_iterator)\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n\nvideo_batch = video_batch.astype(args.dtype) #/ 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +414,629106,"TERMINAL",0,0,"9\t ",,terminal_output +415,630132,"TERMINAL",0,0,"10\t ",,terminal_output +416,631178,"TERMINAL",0,0,"1\t ",,terminal_output +417,632286,"TERMINAL",0,0,"3\t ",,terminal_output +418,633287,"TERMINAL",0,0,"411",,terminal_output +419,634329,"TERMINAL",0,0,"5\t ",,terminal_output +420,635336,"TERMINAL",0,0,"6\t ",,terminal_output +421,636380,"TERMINAL",0,0,"7\t ",,terminal_output +422,637423,"TERMINAL",0,0,"8\t ",,terminal_output +423,638476,"TERMINAL",0,0,"9\t ",,terminal_output +424,639503,"TERMINAL",0,0,"20\t ",,terminal_output +425,640546,"TERMINAL",0,0,"1\t ",,terminal_output +426,641348,"TERMINAL",0,0,"salloc",,terminal_focus +427,641588,"TERMINAL",0,0,"2\t ",,terminal_output +428,642582,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +429,642676,"TERMINAL",0,0,"3\t ",,terminal_output +430,643682,"TERMINAL",0,0,"4\t ",,terminal_output +431,644660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6796,0,"",python,selection_mouse +432,644696,"TERMINAL",0,0,"5\t ",,terminal_output +433,645833,"TERMINAL",0,0,"6 9",,terminal_output +434,646319,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6794,0,"",python,selection_command +435,646531,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6782,0,"",python,selection_command +436,646866,"TERMINAL",0,0,"7\t ",,terminal_output +437,647039,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6764,0,"",python,selection_command +438,647061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6736,0,"",python,selection_command +439,647087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6717,0,"",python,selection_command +440,647135,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6680,0,"",python,selection_command +441,647182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6666,0,"",python,selection_command +442,647183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6613,0,"",python,selection_command +443,647221,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6563,0,"",python,selection_command +444,647261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6522,0,"",python,selection_command +445,647276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6477,0,"",python,selection_command +446,647382,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6449,0,"",python,selection_command +447,647583,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6414,0,"",python,selection_command +448,647747,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6320,0,"",python,selection_command +449,647903,"TERMINAL",0,0,"8\t ",,terminal_output +450,647932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6272,0,"",python,selection_command +451,648158,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6320,0,"",python,selection_command +452,648296,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6272,0,"",python,selection_command +453,648381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6320,0,"",python,selection_command +454,648508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6272,0,"",python,selection_command +455,648598,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6320,0,"",python,selection_command +456,648697,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6272,0,"",python,selection_command +457,648941,"TERMINAL",0,0,"9\t ",,terminal_output +458,649206,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6250,0,"",python,selection_command +459,649219,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6249,0,"",python,selection_command +460,649248,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6171,0,"",python,selection_command +461,649290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6137,0,"",python,selection_command +462,649333,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6085,0,"",python,selection_command +463,649342,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",6024,0,"",python,selection_command +464,649378,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5981,0,"",python,selection_command +465,649418,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5930,0,"",python,selection_command +466,649461,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5904,0,"",python,selection_command +467,649501,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5903,0,"",python,selection_command +468,649501,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5880,0,"",python,selection_command +469,649541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5792,0,"",python,selection_command +470,649583,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5741,0,"",python,selection_command +471,649621,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5658,0,"",python,selection_command +472,649667,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5592,0,"",python,selection_command +473,649712,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5538,0,"",python,selection_command +474,649714,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5509,0,"",python,selection_command +475,649715,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5475,0,"",python,selection_command +476,649755,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5474,0,"",python,selection_command +477,649763,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5395,0,"",python,selection_command +478,649797,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5322,0,"",python,selection_command +479,649841,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5272,0,"",python,selection_command +480,649883,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5223,0,"",python,selection_command +481,649977,"TERMINAL",0,0,"30\t ",,terminal_output +482,650178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5272,0,"",python,selection_command +483,650666,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5322,0,"",python,selection_command +484,650690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5395,0,"",python,selection_command +485,650720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5474,0,"",python,selection_command +486,650764,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5475,0,"",python,selection_command +487,650776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5509,0,"",python,selection_command +488,651017,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5475,0,"",python,selection_command +489,651020,"TERMINAL",0,0,"1\t ",,terminal_output +490,651517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5474,0,"",python,selection_command +491,651545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5395,0,"",python,selection_command +492,651587,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5322,0,"",python,selection_command +493,651630,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5272,0,"",python,selection_command +494,651639,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5223,0,"",python,selection_command +495,651673,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5169,0,"",python,selection_command +496,651716,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5168,0,"",python,selection_command +497,651756,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,0,"",python,selection_command +498,651769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5069,0,"",python,selection_command +499,651799,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5023,0,"",python,selection_command +500,651839,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4986,0,"",python,selection_command +501,651849,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4985,0,"",python,selection_command +502,651884,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4959,0,"",python,selection_command +503,651938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4880,0,"",python,selection_command +504,651938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4819,0,"",python,selection_command +505,651980,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4813,0,"",python,selection_command +506,652003,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4789,0,"",python,selection_command +507,652046,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4757,0,"",python,selection_command +508,652064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4734,0,"",python,selection_command +509,652065,"TERMINAL",0,0,"2\t ",,terminal_output +510,652097,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4712,0,"",python,selection_command +511,652139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4687,0,"",python,selection_command +512,652182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4622,0,"",python,selection_command +513,652268,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4687,0,"",python,selection_command +514,652746,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4712,0,"",python,selection_command +515,652768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4734,0,"",python,selection_command +516,652791,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4757,0,"",python,selection_command +517,652833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4789,0,"",python,selection_command +518,652874,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4813,0,"",python,selection_command +519,652917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4819,0,"",python,selection_command +520,652918,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4880,0,"",python,selection_command +521,652960,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4959,0,"",python,selection_command +522,653002,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4985,0,"",python,selection_command +523,653012,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",4986,0,"",python,selection_command +524,653095,"TERMINAL",0,0,"3\t ",,terminal_output +525,653187,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5023,0,"",python,selection_command +526,653369,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5069,0,"",python,selection_command +527,653638,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,0,"",python,selection_command +528,653813,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5168,0,"",python,selection_command +529,654079,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,0,"",python,selection_command +530,654163,"TERMINAL",0,0,"4\t ",,terminal_output +531,654361,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5069,0,"",python,selection_command +532,654686,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,0,"",python,selection_command +533,655213,"TERMINAL",0,0,"5\t ",,terminal_output +534,656212,"TERMINAL",0,0,"6\t ",,terminal_output +535,657278,"TERMINAL",0,0,"8\t ",,terminal_output +536,658277,"TERMINAL",0,0,"9\t ",,terminal_output +537,659321,"TERMINAL",0,0,"40\t ",,terminal_output +538,660350,"TERMINAL",0,0,"1\t ",,terminal_output +539,661394,"TERMINAL",0,0,"2\t ",,terminal_output +540,662445,"TERMINAL",0,0,"3\t ",,terminal_output +541,663491,"TERMINAL",0,0,"4\t ",,terminal_output +542,664504,"TERMINAL",0,0,"5\t ",,terminal_output +543,665531,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,0,"video_batch = np.load(""overfit_dir/8_sample.npy"")\n",python,content +544,665537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5156,62,"",python,content +545,665617,"TERMINAL",0,0,"6\t ",,terminal_output +546,666575,"TERMINAL",0,0,"7\t ",,terminal_output +547,667620,"TERMINAL",0,0,"8\t ",,terminal_output +548,668657,"TERMINAL",0,0,"9\t ",,terminal_output +549,669693,"TERMINAL",0,0,"50\t ",,terminal_output +550,670044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5106,49,"video_batch = np.load(""overfit_dir/single_sample_corner.npy"")",python,content +551,670775,"TERMINAL",0,0,"1\t ",,terminal_output +552,671783,"TERMINAL",0,0,"2169",,terminal_output +553,672805,"TERMINAL",0,0,"3\t ",,terminal_output +554,673846,"TERMINAL",0,0,"4\t ",,terminal_output +555,674893,"TERMINAL",0,0,"5\t ",,terminal_output +556,675931,"TERMINAL",0,0,"6\t ",,terminal_output +557,676973,"TERMINAL",0,0,"7\t ",,terminal_output +558,678012,"TERMINAL",0,0,"8\t ",,terminal_output +559,679062,"TERMINAL",0,0,"9\t ",,terminal_output +560,680105,"TERMINAL",0,0,"9:00\t ",,terminal_output +561,681142,"TERMINAL",0,0,"1\t ",,terminal_output +562,682227,"TERMINAL",0,0,"2\t ",,terminal_output +563,683221,"TERMINAL",0,0,"4\t ",,terminal_output +564,684325,"TERMINAL",0,0,"5\t ",,terminal_output +565,685309,"TERMINAL",0,0,"6\t ",,terminal_output +566,686350,"TERMINAL",0,0,"7\t ",,terminal_output +567,687394,"TERMINAL",0,0,"8\t ",,terminal_output +568,688442,"TERMINAL",0,0,"9\t ",,terminal_output +569,689468,"TERMINAL",0,0,"10\t ",,terminal_output +570,690513,"TERMINAL",0,0,"1\t ",,terminal_output +571,691555,"TERMINAL",0,0,"2\t ",,terminal_output +572,692591,"TERMINAL",0,0,"3\t ",,terminal_output +573,693627,"TERMINAL",0,0,"4\t ",,terminal_output +574,694674,"TERMINAL",0,0,"5\t ",,terminal_output +575,695713,"TERMINAL",0,0,"6\t ",,terminal_output +576,696753,"TERMINAL",0,0,"7\t ",,terminal_output +577,697835,"TERMINAL",0,0,"8\t ",,terminal_output +578,698843,"TERMINAL",0,0,"9\t ",,terminal_output +579,699894,"TERMINAL",0,0,"20\t ",,terminal_output +580,700928,"TERMINAL",0,0,"1\t ",,terminal_output +581,701970,"TERMINAL",0,0,"2\t ",,terminal_output +582,703015,"TERMINAL",0,0,"3\t ",,terminal_output +583,704054,"TERMINAL",0,0,"42",,terminal_output +584,705095,"TERMINAL",0,0,"5\t ",,terminal_output +585,706143,"TERMINAL",0,0,"6\t ",,terminal_output +586,707186,"TERMINAL",0,0,"7\t ",,terminal_output +587,708236,"TERMINAL",0,0,"9\t ",,terminal_output +588,709269,"TERMINAL",0,0,"30\t ",,terminal_output +589,710308,"TERMINAL",0,0,"1\t ",,terminal_output +590,711347,"TERMINAL",0,0,"2\t ",,terminal_output +591,712392,"TERMINAL",0,0,"3\t ",,terminal_output +592,713433,"TERMINAL",0,0,"4\t ",,terminal_output +593,714474,"TERMINAL",0,0,"5\t ",,terminal_output +594,715511,"TERMINAL",0,0,"6\t ",,terminal_output +595,716548,"TERMINAL",0,0,"7\t ",,terminal_output +596,717601,"TERMINAL",0,0,"8\t ",,terminal_output +597,718635,"TERMINAL",0,0,"9\t ",,terminal_output +598,719673,"TERMINAL",0,0,"40\t ",,terminal_output +599,720713,"TERMINAL",0,0,"1\t ",,terminal_output +600,721771,"TERMINAL",0,0,"2\t ",,terminal_output +601,722816,"TERMINAL",0,0,"3\t ",,terminal_output +602,723867,"TERMINAL",0,0,"4\t ",,terminal_output +603,724876,"TERMINAL",0,0,"5\t ",,terminal_output +604,725916,"TERMINAL",0,0,"6\t ",,terminal_output +605,726961,"TERMINAL",0,0,"7\t ",,terminal_output +606,728028,"TERMINAL",0,0,"8\t ",,terminal_output +607,729033,"TERMINAL",0,0,"9\t ",,terminal_output +608,730075,"TERMINAL",0,0,"50\t ",,terminal_output +609,731143,"TERMINAL",0,0,"1\t ",,terminal_output +610,732159,"TERMINAL",0,0,"2\t ",,terminal_output +611,733207,"TERMINAL",0,0,"3\t ",,terminal_output +612,734285,"TERMINAL",0,0,"5\t ",,terminal_output +613,735282,"TERMINAL",0,0,"6\t ",,terminal_output +614,736327,"TERMINAL",0,0,"7\t ",,terminal_output +615,737368,"TERMINAL",0,0,"8\t ",,terminal_output +616,738407,"TERMINAL",0,0,"9\t ",,terminal_output +617,739451,"TERMINAL",0,0,"10:00\t ",,terminal_output +618,740491,"TERMINAL",0,0,"1\t ",,terminal_output +619,741537,"TERMINAL",0,0,"2\t ",,terminal_output +620,742588,"TERMINAL",0,0,"3\t ",,terminal_output +621,743621,"TERMINAL",0,0,"4\t ",,terminal_output +622,744661,"TERMINAL",0,0,"5\t ",,terminal_output +623,745704,"TERMINAL",0,0,"6\t ",,terminal_output +624,746745,"TERMINAL",0,0,"7\t ",,terminal_output +625,747792,"TERMINAL",0,0,"8\t ",,terminal_output +626,748834,"TERMINAL",0,0,"9\t ",,terminal_output +627,749883,"TERMINAL",0,0,"10\t ",,terminal_output +628,750920,"TERMINAL",0,0,"1\t ",,terminal_output +629,751958,"TERMINAL",0,0,"2\t ",,terminal_output +630,753001,"TERMINAL",0,0,"3\t ",,terminal_output +631,754040,"TERMINAL",0,0,"4\t ",,terminal_output +632,755082,"TERMINAL",0,0,"5\t ",,terminal_output +633,756126,"TERMINAL",0,0,"6\t ",,terminal_output +634,757165,"TERMINAL",0,0,"7\t ",,terminal_output +635,758206,"TERMINAL",0,0,"9\t ",,terminal_output +636,759286,"TERMINAL",0,0,"20\t ",,terminal_output +637,760278,"TERMINAL",0,0,"1\t ",,terminal_output +638,761319,"TERMINAL",0,0,"2\t ",,terminal_output +639,762361,"TERMINAL",0,0,"3\t ",,terminal_output +640,763400,"TERMINAL",0,0,"4\t ",,terminal_output +641,764444,"TERMINAL",0,0,"5\t ",,terminal_output +642,765484,"TERMINAL",0,0,"6\t ",,terminal_output +643,766552,"TERMINAL",0,0,"7\t ",,terminal_output +644,767591,"TERMINAL",0,0,"8\t ",,terminal_output +645,768636,"TERMINAL",0,0,"9\t ",,terminal_output +646,769677,"TERMINAL",0,0,"30\t ",,terminal_output +647,770723,"TERMINAL",0,0,"1\t ",,terminal_output +648,771767,"TERMINAL",0,0,"2\t ",,terminal_output +649,772805,"TERMINAL",0,0,"3\t ",,terminal_output +650,773851,"TERMINAL",0,0,"4\t ",,terminal_output +651,774890,"TERMINAL",0,0,"5\t ",,terminal_output +652,775929,"TERMINAL",0,0,"6\t ",,terminal_output +653,776971,"TERMINAL",0,0,"7\t ",,terminal_output +654,778012,"TERMINAL",0,0,"8\t ",,terminal_output +655,779052,"TERMINAL",0,0,"9\t ",,terminal_output +656,780095,"TERMINAL",0,0,"40\t ",,terminal_output +657,781136,"TERMINAL",0,0,"1\t ",,terminal_output +658,782233,"TERMINAL",0,0,"2\t ",,terminal_output +659,783220,"TERMINAL",0,0,"4\t ",,terminal_output +660,784316,"TERMINAL",0,0,"5\t ",,terminal_output +661,785304,"TERMINAL",0,0,"6\t ",,terminal_output +662,786340,"TERMINAL",0,0,"7\t ",,terminal_output +663,787379,"TERMINAL",0,0,"8\t ",,terminal_output +664,788418,"TERMINAL",0,0,"9\t ",,terminal_output +665,789456,"TERMINAL",0,0,"50\t ",,terminal_output +666,790493,"TERMINAL",0,0,"1\t ",,terminal_output +667,791536,"TERMINAL",0,0,"2\t ",,terminal_output +668,792576,"TERMINAL",0,0,"3\t ",,terminal_output +669,793610,"TERMINAL",0,0,"4\t ",,terminal_output +670,794652,"TERMINAL",0,0,"5\t ",,terminal_output +671,795687,"TERMINAL",0,0,"670",,terminal_output +672,796723,"TERMINAL",0,0,"7\t ",,terminal_output +673,797761,"TERMINAL",0,0,"8\t ",,terminal_output +674,798801,"TERMINAL",0,0,"9\t ",,terminal_output +675,799836,"TERMINAL",0,0,"1:00\t ",,terminal_output +676,800873,"TERMINAL",0,0,"1\t ",,terminal_output +677,801916,"TERMINAL",0,0,"2\t ",,terminal_output +678,802951,"TERMINAL",0,0,"3\t ",,terminal_output +679,803988,"TERMINAL",0,0,"4\t ",,terminal_output +680,805023,"TERMINAL",0,0,"5\t ",,terminal_output +681,806062,"TERMINAL",0,0,"6\t ",,terminal_output +682,807104,"TERMINAL",0,0,"71",,terminal_output +683,808146,"TERMINAL",0,0,"8\t ",,terminal_output +684,809186,"TERMINAL",0,0,"9\t ",,terminal_output +685,810224,"TERMINAL",0,0,"11\t ",,terminal_output +686,811306,"TERMINAL",0,0,"2\t ",,terminal_output +687,812342,"TERMINAL",0,0,"3\t ",,terminal_output +688,813357,"TERMINAL",0,0,"4\t ",,terminal_output +689,814394,"TERMINAL",0,0,"5\t ",,terminal_output +690,815434,"TERMINAL",0,0,"6\t ",,terminal_output +691,816475,"TERMINAL",0,0,"7\t ",,terminal_output +692,817510,"TERMINAL",0,0,"8\t ",,terminal_output +693,818552,"TERMINAL",0,0,"9\t ",,terminal_output +694,819593,"TERMINAL",0,0,"20\t ",,terminal_output +695,820647,"TERMINAL",0,0,"1\t ",,terminal_output +696,821683,"TERMINAL",0,0,"2\t ",,terminal_output +697,822724,"TERMINAL",0,0,"3\t ",,terminal_output +698,823763,"TERMINAL",0,0,"4\t ",,terminal_output +699,824805,"TERMINAL",0,0,"5\t ",,terminal_output +700,825846,"TERMINAL",0,0,"6\t ",,terminal_output +701,826892,"TERMINAL",0,0,"7\t ",,terminal_output +702,827931,"TERMINAL",0,0,"8\t ",,terminal_output +703,828973,"TERMINAL",0,0,"9\t ",,terminal_output +704,830015,"TERMINAL",0,0,"30\t ",,terminal_output +705,831057,"TERMINAL",0,0,"1\t ",,terminal_output +706,832092,"TERMINAL",0,0,"2\t ",,terminal_output +707,833131,"TERMINAL",0,0,"3\t ",,terminal_output +708,834181,"TERMINAL",0,0,"4\t ",,terminal_output +709,835218,"TERMINAL",0,0,"6\t ",,terminal_output +710,836295,"TERMINAL",0,0,"7\t ",,terminal_output +711,837302,"TERMINAL",0,0,"8\t ",,terminal_output +712,838341,"TERMINAL",0,0,"9\t ",,terminal_output +713,839384,"TERMINAL",0,0,"40\t ",,terminal_output +714,840426,"TERMINAL",0,0,"1\t ",,terminal_output +715,841470,"TERMINAL",0,0,"2\t ",,terminal_output +716,842513,"TERMINAL",0,0,"3\t ",,terminal_output +717,843551,"TERMINAL",0,0,"4\t ",,terminal_output +718,844600,"TERMINAL",0,0,"5\t ",,terminal_output +719,845636,"TERMINAL",0,0,"6\t ",,terminal_output +720,846674,"TERMINAL",0,0,"7\t ",,terminal_output +721,847715,"TERMINAL",0,0,"8\t ",,terminal_output +722,848764,"TERMINAL",0,0,"9\t ",,terminal_output +723,849806,"TERMINAL",0,0,"50\t ",,terminal_output +724,850850,"TERMINAL",0,0,"1\t ",,terminal_output +725,851893,"TERMINAL",0,0,"2\t ",,terminal_output +726,852934,"TERMINAL",0,0,"3\t ",,terminal_output +727,853991,"TERMINAL",0,0,"410",,terminal_output +728,855026,"TERMINAL",0,0,"5\t ",,terminal_output +729,856073,"TERMINAL",0,0,"6\t ",,terminal_output +730,857109,"TERMINAL",0,0,"7\t ",,terminal_output +731,858151,"TERMINAL",0,0,"8\t ",,terminal_output +732,859198,"TERMINAL",0,0,"92",,terminal_output +733,860233,"TERMINAL",0,0,"2:01\t ",,terminal_output +734,861739,"TERMINAL",0,0,"28",,terminal_output +735,862782,"TERMINAL",0,0,"3\t ",,terminal_output +736,863818,"TERMINAL",0,0,"4\t ",,terminal_output +737,864861,"TERMINAL",0,0,"5\t ",,terminal_output +738,865906,"TERMINAL",0,0,"6\t ",,terminal_output +739,866940,"TERMINAL",0,0,"7 90",,terminal_output +740,867981,"TERMINAL",0,0,"8\t ",,terminal_output +741,869023,"TERMINAL",0,0,"9\t ",,terminal_output +742,870065,"TERMINAL",0,0,"10\t ",,terminal_output +743,871095,"TERMINAL",0,0,"1\t ",,terminal_output +744,872132,"TERMINAL",0,0,"2\t ",,terminal_output +745,873171,"TERMINAL",0,0,"3\t ",,terminal_output +746,874210,"TERMINAL",0,0,"5\t ",,terminal_output +747,875247,"TERMINAL",0,0,"6\t ",,terminal_output +748,876321,"TERMINAL",0,0,"7\t ",,terminal_output +749,877323,"TERMINAL",0,0,"8\t ",,terminal_output +750,878358,"TERMINAL",0,0,"9\t ",,terminal_output +751,879451,"TERMINAL",0,0,"20\t ",,terminal_output +752,880448,"TERMINAL",0,0,"1\t ",,terminal_output +753,881473,"TERMINAL",0,0,"2\t ",,terminal_output +754,882515,"TERMINAL",0,0,"3\t ",,terminal_output +755,883551,"TERMINAL",0,0,"4\t ",,terminal_output +756,884593,"TERMINAL",0,0,"5\t ",,terminal_output +757,885634,"TERMINAL",0,0,"6\t ",,terminal_output +758,887565,"TERMINAL",0,0,"7 37",,terminal_output +759,888604,"TERMINAL",0,0,"9\t ",,terminal_output +760,889648,"TERMINAL",0,0,"30\t ",,terminal_output +761,890692,"TERMINAL",0,0,"1\t ",,terminal_output +762,891737,"TERMINAL",0,0,"2\t ",,terminal_output +763,892774,"TERMINAL",0,0,"3\t ",,terminal_output +764,893817,"TERMINAL",0,0,"4\t ",,terminal_output +765,894870,"TERMINAL",0,0,"5\t ",,terminal_output +766,895899,"TERMINAL",0,0,"6\t ",,terminal_output +767,896940,"TERMINAL",0,0,"7\t ",,terminal_output +768,897980,"TERMINAL",0,0,"8\t ",,terminal_output +769,899021,"TERMINAL",0,0,"9\t ",,terminal_output +770,900063,"TERMINAL",0,0,"40\t ",,terminal_output +771,901109,"TERMINAL",0,0,"1\t ",,terminal_output +772,902152,"TERMINAL",0,0,"2\t ",,terminal_output +773,903188,"TERMINAL",0,0,"3\t ",,terminal_output +774,904235,"TERMINAL",0,0,"5\t ",,terminal_output +775,905279,"TERMINAL",0,0,"6\t ",,terminal_output +776,906317,"TERMINAL",0,0,"7\t ",,terminal_output +777,907352,"TERMINAL",0,0,"8\t ",,terminal_output +778,908397,"TERMINAL",0,0,"9\t ",,terminal_output +779,909434,"TERMINAL",0,0,"50\t ",,terminal_output +780,910475,"TERMINAL",0,0,"1\t ",,terminal_output +781,911516,"TERMINAL",0,0,"2\t ",,terminal_output +782,912559,"TERMINAL",0,0,"3\t ",,terminal_output +783,913601,"TERMINAL",0,0,"4\t ",,terminal_output +784,914642,"TERMINAL",0,0,"5\t ",,terminal_output +785,915687,"TERMINAL",0,0,"6\t ",,terminal_output +786,916728,"TERMINAL",0,0,"7\t ",,terminal_output +787,917769,"TERMINAL",0,0,"8\t ",,terminal_output +788,918814,"TERMINAL",0,0,"9\t ",,terminal_output +789,919855,"TERMINAL",0,0,"3:00\t ",,terminal_output +790,920889,"TERMINAL",0,0,"1\t ",,terminal_output +791,921930,"TERMINAL",0,0,"2\t ",,terminal_output +792,922973,"TERMINAL",0,0,"3\t ",,terminal_output +793,924014,"TERMINAL",0,0,"4\t ",,terminal_output +794,925053,"TERMINAL",0,0,"5\t ",,terminal_output +795,926095,"TERMINAL",0,0,"6\t ",,terminal_output +796,927141,"TERMINAL",0,0,"71",,terminal_output +797,928443,"TERMINAL",0,0,"8\t ",,terminal_output +798,929235,"TERMINAL",0,0,"10\t ",,terminal_output +799,930357,"TERMINAL",0,0,"1\t ",,terminal_output +800,931305,"TERMINAL",0,0,"2\t ",,terminal_output +801,932388,"TERMINAL",0,0,"3\t ",,terminal_output +802,933385,"TERMINAL",0,0,"4\t ",,terminal_output +803,934428,"TERMINAL",0,0,"5\t ",,terminal_output +804,935472,"TERMINAL",0,0,"6\t ",,terminal_output +805,936520,"TERMINAL",0,0,"7\t ",,terminal_output +806,937614,"TERMINAL",0,0,"8\t ",,terminal_output +807,938598,"TERMINAL",0,0,"9\t ",,terminal_output +808,939681,"TERMINAL",0,0,"20\t ",,terminal_output +809,940789,"TERMINAL",0,0,"1\t ",,terminal_output +810,941817,"TERMINAL",0,0,"2\t ",,terminal_output +811,942840,"TERMINAL",0,0,"3\t ",,terminal_output +812,943803,"TERMINAL",0,0,"4\t ",,terminal_output +813,944842,"TERMINAL",0,0,"5\t ",,terminal_output +814,945910,"TERMINAL",0,0,"6\t ",,terminal_output +815,947046,"TERMINAL",0,0,"7\t ",,terminal_output +816,948573,"TERMINAL",0,0,"8\t ",,terminal_output +817,949079,"TERMINAL",0,0,"9\t ",,terminal_output +818,950039,"TERMINAL",0,0,"30\t ",,terminal_output +819,951096,"TERMINAL",0,0,"1\t ",,terminal_output +820,952243,"TERMINAL",0,0,"2\t ",,terminal_output +821,953186,"TERMINAL",0,0,"3\t ",,terminal_output +822,954249,"TERMINAL",0,0,"4\t ",,terminal_output +823,955233,"TERMINAL",0,0,"6\t ",,terminal_output +824,956348,"TERMINAL",0,0,"7\t ",,terminal_output +825,957357,"TERMINAL",0,0,"8\t ",,terminal_output +826,958403,"TERMINAL",0,0,"9\t ",,terminal_output +827,959420,"TERMINAL",0,0,"40\t ",,terminal_output +828,960446,"TERMINAL",0,0,"1\t ",,terminal_output +829,961488,"TERMINAL",0,0,"2\t ",,terminal_output +830,962519,"TERMINAL",0,0,"3\t ",,terminal_output +831,963561,"TERMINAL",0,0,"4\t ",,terminal_output +832,964642,"TERMINAL",0,0,"5\t ",,terminal_output +833,965650,"TERMINAL",0,0,"6\t ",,terminal_output +834,966792,"TERMINAL",0,0,"7\t ",,terminal_output +835,967730,"TERMINAL",0,0,"8\t ",,terminal_output +836,968841,"TERMINAL",0,0,"9\t ",,terminal_output +837,969806,"TERMINAL",0,0,"50\t ",,terminal_output +838,970889,"TERMINAL",0,0,"1\t ",,terminal_output +839,971918,"TERMINAL",0,0,"2\t ",,terminal_output +840,973039,"TERMINAL",0,0,"3\t ",,terminal_output +841,974067,"TERMINAL",0,0,"4\t ",,terminal_output +842,975108,"TERMINAL",0,0,"5\t ",,terminal_output +843,976111,"TERMINAL",0,0,"6\t ",,terminal_output +844,977098,"TERMINAL",0,0,"7\t ",,terminal_output +845,978156,"TERMINAL",0,0,"8\t ",,terminal_output +846,979388,"TERMINAL",0,0,"9\t ",,terminal_output +847,980412,"TERMINAL",0,0,"4:01\t ",,terminal_output +848,981385,"TERMINAL",0,0,"2\t ",,terminal_output +849,982306,"TERMINAL",0,0,"3\t ",,terminal_output +850,983383,"TERMINAL",0,0,"4\t ",,terminal_output +851,984406,"TERMINAL",0,0,"5\t ",,terminal_output +852,985431,"TERMINAL",0,0,"6\t ",,terminal_output +853,986467,"TERMINAL",0,0,"7\t ",,terminal_output +854,987508,"TERMINAL",0,0,"8\t ",,terminal_output +855,988547,"TERMINAL",0,0,"9\t ",,terminal_output +856,989636,"TERMINAL",0,0,"10\t ",,terminal_output +857,990658,"TERMINAL",0,0,"1\t ",,terminal_output +858,991717,"TERMINAL",0,0,"2\t ",,terminal_output +859,992805,"TERMINAL",0,0,"3\t ",,terminal_output +860,993775,"TERMINAL",0,0,"4\t ",,terminal_output +861,994849,"TERMINAL",0,0,"5\t ",,terminal_output +862,995831,"TERMINAL",0,0,"6\t ",,terminal_output +863,996898,"TERMINAL",0,0,"7\t ",,terminal_output +864,997922,"TERMINAL",0,0,"8\t ",,terminal_output +865,998950,"TERMINAL",0,0,"9\t ",,terminal_output +866,1000072,"TERMINAL",0,0,"20\t ",,terminal_output +867,1001040,"TERMINAL",0,0,"1\t ",,terminal_output +868,1002122,"TERMINAL",0,0,"2\t ",,terminal_output +869,1003157,"TERMINAL",0,0,"3\t ",,terminal_output +870,1004283,"TERMINAL",0,0,"4\t ",,terminal_output +871,1005299,"TERMINAL",0,0,"5\t ",,terminal_output +872,1006323,"TERMINAL",0,0,"7\t ",,terminal_output +873,1007275,"TERMINAL",0,0,"8\t ",,terminal_output +874,1008417,"TERMINAL",0,0,"9\t ",,terminal_output +875,1009397,"TERMINAL",0,0,"30\t ",,terminal_output +876,1010397,"TERMINAL",0,0,"1\t ",,terminal_output +877,1011438,"TERMINAL",0,0,"2\t ",,terminal_output +878,1012479,"TERMINAL",0,0,"38",,terminal_output +879,1013517,"TERMINAL",0,0,"4\t ",,terminal_output +880,1014563,"TERMINAL",0,0,"5\t ",,terminal_output +881,1015646,"TERMINAL",0,0,"6\t ",,terminal_output +882,1016662,"TERMINAL",0,0,"7\t ",,terminal_output +883,1017788,"TERMINAL",0,0,"8\t ",,terminal_output +884,1018811,"TERMINAL",0,0,"9\t ",,terminal_output +885,1019836,"TERMINAL",0,0,"40\t ",,terminal_output +886,1020860,"TERMINAL",0,0,"1\t ",,terminal_output +887,1021866,"TERMINAL",0,0,"2\t ",,terminal_output +888,1022910,"TERMINAL",0,0,"3\t ",,terminal_output +889,1024033,"TERMINAL",0,0,"4\t ",,terminal_output +890,1025058,"TERMINAL",0,0,"5\t ",,terminal_output +891,1026082,"TERMINAL",0,0,"6\t ",,terminal_output +892,1027126,"TERMINAL",0,0,"7\t ",,terminal_output +893,1028085,"TERMINAL",0,0,"8\t ",,terminal_output +894,1029161,"TERMINAL",0,0,"9\t ",,terminal_output +895,1030282,"TERMINAL",0,0,"50\t ",,terminal_output +896,1031306,"TERMINAL",0,0,"2\t ",,terminal_output +897,1032246,"TERMINAL",0,0,"31",,terminal_output +898,1033291,"TERMINAL",0,0,"4\t ",,terminal_output +899,1034487,"TERMINAL",0,0,"5\t ",,terminal_output +900,1035409,"TERMINAL",0,0,"6\t ",,terminal_output +901,1036440,"TERMINAL",0,0,"7\t ",,terminal_output +902,1037449,"TERMINAL",0,0,"8\t ",,terminal_output +903,1038487,"TERMINAL",0,0,"9\t ",,terminal_output +904,1039538,"TERMINAL",0,0,"5:00\t ",,terminal_output +905,1040563,"TERMINAL",0,0,"1\t ",,terminal_output +906,1041682,"TERMINAL",0,0,"2\t ",,terminal_output +907,1042674,"TERMINAL",0,0,"3\t ",,terminal_output +908,1043697,"TERMINAL",0,0,"4\t ",,terminal_output +909,1044729,"TERMINAL",0,0,"5\t ",,terminal_output +910,1045782,"TERMINAL",0,0,"6\t ",,terminal_output +911,1046872,"TERMINAL",0,0,"7\t ",,terminal_output +912,1047997,"TERMINAL",0,0,"8\t ",,terminal_output +913,1048919,"TERMINAL",0,0,"9\t ",,terminal_output +914,1050048,"TERMINAL",0,0,"10\t ",,terminal_output +915,1051041,"TERMINAL",0,0,"1\t ",,terminal_output +916,1052092,"TERMINAL",0,0,"2\t ",,terminal_output +917,1053117,"TERMINAL",0,0,"3\t ",,terminal_output +918,1054147,"TERMINAL",0,0,"4\t ",,terminal_output +919,1055167,"TERMINAL",0,0,"5\t ",,terminal_output +920,1056290,"TERMINAL",0,0,"6\t ",,terminal_output +921,1057253,"TERMINAL",0,0,"8\t ",,terminal_output +922,1058340,"TERMINAL",0,0,"9\t ",,terminal_output +923,1059412,"TERMINAL",0,0,"20\t ",,terminal_output +924,1060432,"TERMINAL",0,0,"1\t ",,terminal_output +925,1061411,"TERMINAL",0,0,"2\t ",,terminal_output +926,1062480,"TERMINAL",0,0,"3\t ",,terminal_output +927,1063533,"TERMINAL",0,0,"4\t ",,terminal_output +928,1064495,"TERMINAL",0,0,"5\t ",,terminal_output +929,1065552,"TERMINAL",0,0,"6\t ",,terminal_output +930,1066940,"TERMINAL",0,0,"7\t ",,terminal_output +931,1067657,"TERMINAL",0,0,"8\t ",,terminal_output +932,1068642,"TERMINAL",0,0,"9\t ",,terminal_output +933,1069687,"TERMINAL",0,0,"30\t ",,terminal_output +934,1070718,"TERMINAL",0,0,"1\t ",,terminal_output +935,1071880,"TERMINAL",0,0,"2\t ",,terminal_output +936,1072888,"TERMINAL",0,0,"3\t ",,terminal_output +937,1073824,"TERMINAL",0,0,"4\t ",,terminal_output +938,1074944,"TERMINAL",0,0,"5\t ",,terminal_output +939,1075956,"TERMINAL",0,0,"6\t ",,terminal_output +940,1077024,"TERMINAL",0,0,"7\t ",,terminal_output +941,1078002,"TERMINAL",0,0,"8\t ",,terminal_output +942,1079129,"TERMINAL",0,0,"9\t ",,terminal_output +943,1080155,"TERMINAL",0,0,"40\t ",,terminal_output +944,1081177,"TERMINAL",0,0,"1\t ",,terminal_output +945,1082199,"TERMINAL",0,0,"2\t ",,terminal_output +946,1083226,"TERMINAL",0,0,"3\t ",,terminal_output +947,1084256,"TERMINAL",0,0,"5\t ",,terminal_output +948,1085285,"TERMINAL",0,0,"6\t ",,terminal_output +949,1086279,"TERMINAL",0,0,"7\t ",,terminal_output +950,1087314,"TERMINAL",0,0,"8\t ",,terminal_output +951,1088354,"TERMINAL",0,0,"9\t ",,terminal_output +952,1089427,"TERMINAL",0,0,"50\t ",,terminal_output +953,1090438,"TERMINAL",0,0,"1\t ",,terminal_output +954,1091472,"TERMINAL",0,0,"2\t ",,terminal_output +955,1092519,"TERMINAL",0,0,"3\t ",,terminal_output +956,1093551,"TERMINAL",0,0,"4\t ",,terminal_output +957,1094591,"TERMINAL",0,0,"5\t ",,terminal_output +958,1095657,"TERMINAL",0,0,"6\t ",,terminal_output +959,1096680,"TERMINAL",0,0,"7\t ",,terminal_output +960,1097712,"TERMINAL",0,0,"87",,terminal_output +961,1098754,"TERMINAL",0,0,"9\t ",,terminal_output +962,1099787,"TERMINAL",0,0,"6:00\t ",,terminal_output +963,1100829,"TERMINAL",0,0,"1\t ",,terminal_output +964,1101875,"TERMINAL",0,0,"2\t ",,terminal_output +965,1102913,"TERMINAL",0,0,"3\t ",,terminal_output +966,1103976,"TERMINAL",0,0,"4\t ",,terminal_output +967,1104988,"TERMINAL",0,0,"5\t ",,terminal_output +968,1106024,"TERMINAL",0,0,"6\t ",,terminal_output +969,1107070,"TERMINAL",0,0,"70",,terminal_output +970,1108142,"TERMINAL",0,0,"8\t ",,terminal_output +971,1109153,"TERMINAL",0,0,"9\t ",,terminal_output +972,1110221,"TERMINAL",0,0,"10\t ",,terminal_output +973,1111274,"TERMINAL",0,0,"2\t ",,terminal_output +974,1112264,"TERMINAL",0,0,"3\t ",,terminal_output +975,1113297,"TERMINAL",0,0,"4\t ",,terminal_output +976,1114346,"TERMINAL",0,0,"5\t ",,terminal_output +977,1115397,"TERMINAL",0,0,"6\t ",,terminal_output +978,1116448,"TERMINAL",0,0,"7\t ",,terminal_output +979,1117477,"TERMINAL",0,0,"8\t ",,terminal_output +980,1118508,"TERMINAL",0,0,"9\t ",,terminal_output +981,1119532,"TERMINAL",0,0,"20\t ",,terminal_output +982,1120577,"TERMINAL",0,0,"1\t ",,terminal_output +983,1121615,"TERMINAL",0,0,"2\t ",,terminal_output +984,1122661,"TERMINAL",0,0,"3\t ",,terminal_output +985,1123703,"TERMINAL",0,0,"4\t ",,terminal_output +986,1124729,"TERMINAL",0,0,"5\t ",,terminal_output +987,1125774,"TERMINAL",0,0,"6\t ",,terminal_output +988,1126824,"TERMINAL",0,0,"7\t ",,terminal_output +989,1127859,"TERMINAL",0,0,"8\t ",,terminal_output +990,1128893,"TERMINAL",0,0,"9\t ",,terminal_output +991,1129932,"TERMINAL",0,0,"30\t ",,terminal_output +992,1130967,"TERMINAL",0,0,"1\t ",,terminal_output +993,1132008,"TERMINAL",0,0,"2\t ",,terminal_output +994,1133051,"TERMINAL",0,0,"3\t ",,terminal_output +995,1134085,"TERMINAL",0,0,"4\t ",,terminal_output +996,1135132,"TERMINAL",0,0,"5\t ",,terminal_output +997,1136170,"TERMINAL",0,0,"6\t ",,terminal_output +998,1137206,"TERMINAL",0,0,"8\t ",,terminal_output +999,1138241,"TERMINAL",0,0,"9\t ",,terminal_output +1000,1139278,"TERMINAL",0,0,"40\t ",,terminal_output +1001,1140310,"TERMINAL",0,0,"1\t ",,terminal_output +1002,1141350,"TERMINAL",0,0,"2\t ",,terminal_output +1003,1142387,"TERMINAL",0,0,"3\t ",,terminal_output +1004,1143461,"TERMINAL",0,0,"4\t ",,terminal_output +1005,1143793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5271,0,"",python,selection_mouse +1006,1143794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5270,0,"",python,selection_command +1007,1144457,"TERMINAL",0,0,"5\t ",,terminal_output +1008,1145490,"TERMINAL",0,0,"6\t ",,terminal_output +1009,1146527,"TERMINAL",0,0,"7\t ",,terminal_output +1010,1147569,"TERMINAL",0,0,"8\t ",,terminal_output +1011,1148391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5319,0,"",python,selection_command +1012,1148531,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5369,0,"",python,selection_command +1013,1148613,"TERMINAL",0,0,"9\t ",,terminal_output +1014,1148895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5319,0,"",python,selection_command +1015,1149067,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5270,0,"",python,selection_command +1016,1149223,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5216,0,"",python,selection_command +1017,1149388,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",5168,0,"",python,selection_command +1018,1149641,"TERMINAL",0,0,"50\t ",,terminal_output +1019,1150689,"TERMINAL",0,0,"1\t ",,terminal_output +1020,1151730,"TERMINAL",0,0,"2\t ",,terminal_output +1021,1152768,"TERMINAL",0,0,"3\t ",,terminal_output +1022,1152952,"train_dynamics.py",0,0,"",python,tab +1023,1153796,"TERMINAL",0,0,"4\t ",,terminal_output +1024,1153932,"models/dynamics.py",0,0,"",python,tab +1025,1154834,"TERMINAL",0,0,"5\t ",,terminal_output +1026,1155880,"TERMINAL",0,0,"6\t ",,terminal_output +1027,1156915,"TERMINAL",0,0,"7\t ",,terminal_output +1028,1157715,"train_dynamics.py",0,0,"",python,tab +1029,1157956,"TERMINAL",0,0,"8\t ",,terminal_output +1030,1158994,"TERMINAL",0,0,"9\t ",,terminal_output +1031,1159785,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +1032,1160040,"TERMINAL",0,0,"7:00\t ",,terminal_output +1033,1161067,"TERMINAL",0,0,"1\t ",,terminal_output +1034,1162109,"TERMINAL",0,0,"2\t ",,terminal_output +1035,1163145,"TERMINAL",0,0,"3\t ",,terminal_output +1036,1164188,"TERMINAL",0,0,"4\t ",,terminal_output +1037,1165228,"TERMINAL",0,0,"6\t ",,terminal_output +1038,1166265,"TERMINAL",0,0,"7\t ",,terminal_output +1039,1167305,"TERMINAL",0,0,"8\t ",,terminal_output +1040,1168345,"TERMINAL",0,0,"9\t ",,terminal_output +1041,1169381,"TERMINAL",0,0,"10\t ",,terminal_output +1042,1169558,"genie.py",0,0,"",python,tab +1043,1170431,"TERMINAL",0,0,"1\t ",,terminal_output +1044,1171458,"TERMINAL",0,0,"2\t ",,terminal_output +1045,1172493,"TERMINAL",0,0,"3\t ",,terminal_output +1046,1173537,"TERMINAL",0,0,"4\t ",,terminal_output +1047,1174576,"TERMINAL",0,0,"5\t ",,terminal_output +1048,1175615,"TERMINAL",0,0,"6\t ",,terminal_output +1049,1176655,"TERMINAL",0,0,"7\t ",,terminal_output +1050,1177693,"TERMINAL",0,0,"8\t ",,terminal_output +1051,1178731,"TERMINAL",0,0,"9\t ",,terminal_output +1052,1179770,"TERMINAL",0,0,"20\t ",,terminal_output +1053,1180804,"TERMINAL",0,0,"1\t ",,terminal_output +1054,1181833,"TERMINAL",0,0,"2\t ",,terminal_output +1055,1182871,"TERMINAL",0,0,"3\t ",,terminal_output +1056,1183910,"TERMINAL",0,0,"4\t ",,terminal_output +1057,1184947,"TERMINAL",0,0,"5\t ",,terminal_output +1058,1185994,"TERMINAL",0,0,"6\t ",,terminal_output +1059,1187038,"TERMINAL",0,0,"7\t ",,terminal_output +1060,1188079,"TERMINAL",0,0,"8\t ",,terminal_output +1061,1189115,"TERMINAL",0,0,"9\t ",,terminal_output +1062,1190153,"TERMINAL",0,0,"30\t ",,terminal_output +1063,1191184,"TERMINAL",0,0,"1\t ",,terminal_output +1064,1192223,"TERMINAL",0,0,"3\t ",,terminal_output +1065,1193266,"TERMINAL",0,0,"4\t ",,terminal_output +1066,1194317,"TERMINAL",0,0,"5\t ",,terminal_output +1067,1195349,"TERMINAL",0,0,"6\t ",,terminal_output +1068,1196393,"TERMINAL",0,0,"7\t ",,terminal_output +1069,1196413,"genie.py",5788,0,"",python,selection_mouse +1070,1196436,"genie.py",5787,0,"",python,selection_command +1071,1197002,"genie.py",5712,0,"",python,selection_mouse +1072,1197467,"TERMINAL",0,0,"8\t ",,terminal_output +1073,1198463,"TERMINAL",0,0,"9\t ",,terminal_output +1074,1199509,"TERMINAL",0,0,"40\t ",,terminal_output +1075,1200598,"TERMINAL",0,0,"1\t ",,terminal_output +1076,1201584,"TERMINAL",0,0,"2\t ",,terminal_output +1077,1202626,"TERMINAL",0,0,"3\t ",,terminal_output +1078,1203662,"TERMINAL",0,0,"4\t ",,terminal_output +1079,1204704,"TERMINAL",0,0,"5\t ",,terminal_output +1080,1205730,"TERMINAL",0,0,"6\t ",,terminal_output +1081,1206769,"TERMINAL",0,0,"7\t ",,terminal_output +1082,1207808,"TERMINAL",0,0,"8\t ",,terminal_output +1083,1208847,"TERMINAL",0,0,"9\t ",,terminal_output +1084,1209890,"TERMINAL",0,0,"50\t ",,terminal_output +1085,1210931,"TERMINAL",0,0,"1\t ",,terminal_output +1086,1211120,"models/dynamics.py",0,0,"",python,tab +1087,1211976,"TERMINAL",0,0,"2\t ",,terminal_output +1088,1213016,"TERMINAL",0,0,"3\t ",,terminal_output +1089,1214053,"TERMINAL",0,0,"4\t ",,terminal_output +1090,1215092,"TERMINAL",0,0,"5\t ",,terminal_output +1091,1215323,"TERMINAL",0,0,"srun",,terminal_focus +1092,1216137,"TERMINAL",0,0,"6\t ",,terminal_output +1093,1217206,"TERMINAL",0,0,"7\t ",,terminal_output +1094,1218210,"TERMINAL",0,0,"9\t ",,terminal_output +1095,1219251,"TERMINAL",0,0,"8:00\t ",,terminal_output +1096,1220309,"models/dynamics.py",0,0,"",python,tab +1097,1220349,"TERMINAL",0,0,"1\t ",,terminal_output +1098,1221336,"TERMINAL",0,0,"2\t ",,terminal_output +1099,1222371,"TERMINAL",0,0,"3\t ",,terminal_output +1100,1223413,"TERMINAL",0,0,"4\t ",,terminal_output +1101,1224457,"TERMINAL",0,0,"5\t ",,terminal_output +1102,1225497,"TERMINAL",0,0,"6\t ",,terminal_output +1103,1226537,"TERMINAL",0,0,"7\t ",,terminal_output +1104,1227580,"TERMINAL",0,0,"8\t ",,terminal_output +1105,1228622,"TERMINAL",0,0,"9\t ",,terminal_output +1106,1229661,"TERMINAL",0,0,"10\t ",,terminal_output +1107,1230787,"TERMINAL",0,0,"1\t ",,terminal_output +1108,1231860,"TERMINAL",0,0,"2\t ",,terminal_output +1109,1232902,"TERMINAL",0,0,"3\t ",,terminal_output +1110,1233996,"TERMINAL",0,0,"4\t ",,terminal_output +1111,1235010,"TERMINAL",0,0,"5\t ",,terminal_output +1112,1236023,"TERMINAL",0,0,"6\t ",,terminal_output +1113,1237045,"TERMINAL",0,0,"7\t ",,terminal_output +1114,1238159,"TERMINAL",0,0,"8\t ",,terminal_output +1115,1239179,"TERMINAL",0,0,"9\t ",,terminal_output +1116,1240075,"TERMINAL",0,0,"20\t ",,terminal_output +1117,1241120,"TERMINAL",0,0,"1\t ",,terminal_output +1118,1242212,"TERMINAL",0,0,"2\t ",,terminal_output +1119,1243305,"TERMINAL",0,0,"3\t ",,terminal_output +1120,1244241,"TERMINAL",0,0,"5\t ",,terminal_output +1121,1245285,"TERMINAL",0,0,"6\t ",,terminal_output +1122,1246323,"TERMINAL",0,0,"7\t ",,terminal_output +1123,1247361,"TERMINAL",0,0,"8\t ",,terminal_output +1124,1248402,"TERMINAL",0,0,"9\t ",,terminal_output +1125,1249586,"TERMINAL",0,0,"301",,terminal_output +1126,1250628,"TERMINAL",0,0,"1\t ",,terminal_output +1127,1251671,"TERMINAL",0,0,"2\t ",,terminal_output +1128,1252718,"TERMINAL",0,0,"3\t ",,terminal_output +1129,1253751,"TERMINAL",0,0,"4\t ",,terminal_output +1130,1254791,"TERMINAL",0,0,"5\t ",,terminal_output +1131,1255828,"TERMINAL",0,0,"6\t ",,terminal_output +1132,1256872,"TERMINAL",0,0,"7\t ",,terminal_output +1133,1257915,"TERMINAL",0,0,"8\t ",,terminal_output +1134,1258951,"TERMINAL",0,0,"9\t ",,terminal_output +1135,1259991,"TERMINAL",0,0,"40\t ",,terminal_output +1136,1261038,"TERMINAL",0,0,"1\t ",,terminal_output +1137,1262075,"TERMINAL",0,0,"2\t ",,terminal_output +1138,1263119,"TERMINAL",0,0,"3\t ",,terminal_output +1139,1264154,"TERMINAL",0,0,"4\t ",,terminal_output +1140,1265197,"TERMINAL",0,0,"5\t ",,terminal_output +1141,1266243,"TERMINAL",0,0,"7\t ",,terminal_output +1142,1267280,"TERMINAL",0,0,"8\t ",,terminal_output +1143,1268325,"TERMINAL",0,0,"9\t ",,terminal_output +1144,1269360,"TERMINAL",0,0,"50\t ",,terminal_output +1145,1270405,"TERMINAL",0,0,"1\t ",,terminal_output +1146,1271481,"TERMINAL",0,0,"2\t ",,terminal_output +1147,1272485,"TERMINAL",0,0,"3\t ",,terminal_output +1148,1273948,"TERMINAL",0,0,"410",,terminal_output +1149,1274986,"TERMINAL",0,0,"5\t ",,terminal_output +1150,1276028,"TERMINAL",0,0,"6\t ",,terminal_output +1151,1277074,"TERMINAL",0,0,"7\t ",,terminal_output +1152,1278122,"TERMINAL",0,0,"8\t ",,terminal_output +1153,1279185,"TERMINAL",0,0,"9\t ",,terminal_output +1154,1280198,"TERMINAL",0,0,"9:00\t ",,terminal_output +1155,1281240,"TERMINAL",0,0,"2\t ",,terminal_output +1156,1282286,"TERMINAL",0,0,"3\t ",,terminal_output +1157,1283326,"TERMINAL",0,0,"4\t ",,terminal_output +1158,1284362,"TERMINAL",0,0,"5\t ",,terminal_output +1159,1285410,"TERMINAL",0,0,"6\t ",,terminal_output +1160,1286480,"TERMINAL",0,0,"7\t ",,terminal_output +1161,1287485,"TERMINAL",0,0,"8\t ",,terminal_output +1162,1288525,"TERMINAL",0,0,"9\t ",,terminal_output +1163,1289565,"TERMINAL",0,0,"10\t ",,terminal_output +1164,1290605,"TERMINAL",0,0,"1\t ",,terminal_output +1165,1291645,"TERMINAL",0,0,"2\t ",,terminal_output +1166,1292691,"TERMINAL",0,0,"3\t ",,terminal_output +1167,1293731,"TERMINAL",0,0,"4\t ",,terminal_output +1168,1294772,"TERMINAL",0,0,"5\t ",,terminal_output +1169,1295815,"TERMINAL",0,0,"6\t ",,terminal_output +1170,1296868,"TERMINAL",0,0,"71",,terminal_output +1171,1297903,"TERMINAL",0,0,"8\t ",,terminal_output +1172,1298942,"TERMINAL",0,0,"9\t ",,terminal_output +1173,1299984,"TERMINAL",0,0,"20\t ",,terminal_output +1174,1301023,"TERMINAL",0,0,"1\t ",,terminal_output +1175,1302064,"TERMINAL",0,0,"2\t ",,terminal_output +1176,1303109,"TERMINAL",0,0,"3\t ",,terminal_output +1177,1304145,"TERMINAL",0,0,"4\t ",,terminal_output +1178,1305186,"TERMINAL",0,0,"5\t ",,terminal_output +1179,1306224,"TERMINAL",0,0,"7\t ",,terminal_output +1180,1307263,"TERMINAL",0,0,"8\t ",,terminal_output +1181,1308302,"TERMINAL",0,0,"9\t ",,terminal_output +1182,1309339,"TERMINAL",0,0,"30\t ",,terminal_output +1183,1310380,"TERMINAL",0,0,"1\t ",,terminal_output +1184,1311421,"TERMINAL",0,0,"2\t ",,terminal_output +1185,1312498,"TERMINAL",0,0,"3\t ",,terminal_output +1186,1313502,"TERMINAL",0,0,"4\t ",,terminal_output +1187,1314548,"TERMINAL",0,0,"5\t ",,terminal_output +1188,1315579,"TERMINAL",0,0,"6\t ",,terminal_output +1189,1316620,"TERMINAL",0,0,"7\t ",,terminal_output +1190,1317660,"TERMINAL",0,0,"8\t ",,terminal_output +1191,1318702,"TERMINAL",0,0,"9\t ",,terminal_output +1192,1319738,"TERMINAL",0,0,"40\t ",,terminal_output +1193,1320782,"TERMINAL",0,0,"1\t ",,terminal_output +1194,1321820,"TERMINAL",0,0,"2\t ",,terminal_output +1195,1322862,"TERMINAL",0,0,"3\t ",,terminal_output +1196,1323890,"TERMINAL",0,0,"4\t ",,terminal_output +1197,1324933,"TERMINAL",0,0,"5\t ",,terminal_output +1198,1325971,"TERMINAL",0,0,"6\t ",,terminal_output +1199,1327006,"TERMINAL",0,0,"7\t ",,terminal_output +1200,1328046,"TERMINAL",0,0,"8\t ",,terminal_output +1201,1329081,"TERMINAL",0,0,"9\t ",,terminal_output +1202,1330121,"TERMINAL",0,0,"50\t ",,terminal_output +1203,1331154,"TERMINAL",0,0,"1\t ",,terminal_output +1204,1332195,"TERMINAL",0,0,"2\t ",,terminal_output +1205,1333235,"TERMINAL",0,0,"4\t ",,terminal_output +1206,1334271,"TERMINAL",0,0,"5\t ",,terminal_output +1207,1335313,"TERMINAL",0,0,"6\t ",,terminal_output +1208,1336347,"TERMINAL",0,0,"7\t ",,terminal_output +1209,1337381,"TERMINAL",0,0,"8\t ",,terminal_output +1210,1338420,"TERMINAL",0,0,"9\t ",,terminal_output +1211,1339496,"TERMINAL",0,0,"20:00\t ",,terminal_output +1212,1340498,"TERMINAL",0,0,"1\t ",,terminal_output +1213,1341537,"TERMINAL",0,0,"2\t ",,terminal_output +1214,1342577,"TERMINAL",0,0,"3\t ",,terminal_output +1215,1343608,"TERMINAL",0,0,"4\t ",,terminal_output +1216,1344648,"TERMINAL",0,0,"5\t ",,terminal_output +1217,1345688,"TERMINAL",0,0,"6\t ",,terminal_output +1218,1346727,"TERMINAL",0,0,"7\t ",,terminal_output +1219,1348033,"TERMINAL",0,0,"82",,terminal_output +1220,1349071,"TERMINAL",0,0,"9 9",,terminal_output +1221,1350114,"TERMINAL",0,0,"10\t ",,terminal_output +1222,1351147,"TERMINAL",0,0,"1\t ",,terminal_output +1223,1352184,"TERMINAL",0,0,"2\t ",,terminal_output +1224,1353220,"TERMINAL",0,0,"4\t ",,terminal_output +1225,1354258,"TERMINAL",0,0,"5\t ",,terminal_output +1226,1355311,"TERMINAL",0,0,"6\t ",,terminal_output +1227,1356335,"TERMINAL",0,0,"7\t ",,terminal_output +1228,1357372,"TERMINAL",0,0,"8\t ",,terminal_output +1229,1358406,"TERMINAL",0,0,"9\t ",,terminal_output +1230,1359446,"TERMINAL",0,0,"20\t ",,terminal_output +1231,1360486,"TERMINAL",0,0,"1\t ",,terminal_output +1232,1361526,"TERMINAL",0,0,"2\t ",,terminal_output +1233,1362565,"TERMINAL",0,0,"3\t ",,terminal_output +1234,1363619,"TERMINAL",0,0,"4\t ",,terminal_output +1235,1364652,"TERMINAL",0,0,"5\t ",,terminal_output +1236,1365692,"TERMINAL",0,0,"6\t ",,terminal_output +1237,1366726,"TERMINAL",0,0,"7\t ",,terminal_output +1238,1367768,"TERMINAL",0,0,"8\t ",,terminal_output +1239,1368812,"TERMINAL",0,0,"9\t ",,terminal_output +1240,1370351,"TERMINAL",0,0,"30\t ",,terminal_output +1241,1371385,"TERMINAL",0,0,"2\t ",,terminal_output +1242,1372428,"TERMINAL",0,0,"3\t ",,terminal_output +1243,1373506,"TERMINAL",0,0,"4\t ",,terminal_output +1244,1374511,"TERMINAL",0,0,"58",,terminal_output +1245,1375544,"TERMINAL",0,0,"6\t ",,terminal_output +1246,1376580,"TERMINAL",0,0,"7\t ",,terminal_output +1247,1377623,"TERMINAL",0,0,"8\t ",,terminal_output +1248,1378661,"TERMINAL",0,0,"9\t ",,terminal_output +1249,1379704,"TERMINAL",0,0,"40\t ",,terminal_output +1250,1380745,"TERMINAL",0,0,"1\t ",,terminal_output +1251,1381784,"TERMINAL",0,0,"2\t ",,terminal_output +1252,1382824,"TERMINAL",0,0,"310",,terminal_output +1253,1383869,"TERMINAL",0,0,"4\t ",,terminal_output +1254,1384909,"TERMINAL",0,0,"5\t ",,terminal_output +1255,1385941,"TERMINAL",0,0,"6\t ",,terminal_output +1256,1386989,"TERMINAL",0,0,"7\t ",,terminal_output +1257,1388028,"TERMINAL",0,0,"8\t ",,terminal_output +1258,1389068,"TERMINAL",0,0,"9\t ",,terminal_output +1259,1390100,"TERMINAL",0,0,"50\t ",,terminal_output +1260,1391136,"TERMINAL",0,0,"1\t ",,terminal_output +1261,1392172,"TERMINAL",0,0,"2\t ",,terminal_output +1262,1393210,"TERMINAL",0,0,"4\t ",,terminal_output +1263,1394248,"TERMINAL",0,0,"5\t ",,terminal_output +1264,1395285,"TERMINAL",0,0,"6\t ",,terminal_output +1265,1396320,"TERMINAL",0,0,"7\t ",,terminal_output +1266,1397360,"TERMINAL",0,0,"81",,terminal_output +1267,1398404,"TERMINAL",0,0,"9\t ",,terminal_output +1268,1399438,"TERMINAL",0,0,"1:00\t ",,terminal_output +1269,1400514,"TERMINAL",0,0,"1\t ",,terminal_output +1270,1401515,"TERMINAL",0,0,"2\t ",,terminal_output +1271,1402558,"TERMINAL",0,0,"3\t ",,terminal_output +1272,1403592,"TERMINAL",0,0,"4\t ",,terminal_output +1273,1404631,"TERMINAL",0,0,"5\t ",,terminal_output +1274,1405669,"TERMINAL",0,0,"6\t ",,terminal_output +1275,1406709,"TERMINAL",0,0,"70",,terminal_output +1276,1407744,"TERMINAL",0,0,"8\t ",,terminal_output +1277,1408782,"TERMINAL",0,0,"9\t ",,terminal_output +1278,1409820,"TERMINAL",0,0,"10\t ",,terminal_output +1279,1410858,"TERMINAL",0,0,"1\t ",,terminal_output +1280,1411897,"TERMINAL",0,0,"2\t ",,terminal_output +1281,1412935,"TERMINAL",0,0,"3\t ",,terminal_output +1282,1413973,"TERMINAL",0,0,"4\t ",,terminal_output +1283,1415015,"TERMINAL",0,0,"5\t ",,terminal_output +1284,1416050,"TERMINAL",0,0,"6\t ",,terminal_output +1285,1417094,"TERMINAL",0,0,"7\t ",,terminal_output +1286,1418131,"TERMINAL",0,0,"8\t ",,terminal_output +1287,1419170,"TERMINAL",0,0,"9\t ",,terminal_output +1288,1420212,"TERMINAL",0,0,"21\t ",,terminal_output +1289,1421246,"TERMINAL",0,0,"2\t ",,terminal_output +1290,1422296,"TERMINAL",0,0,"3\t ",,terminal_output +1291,1423331,"TERMINAL",0,0,"4\t ",,terminal_output +1292,1424354,"TERMINAL",0,0,"5\t ",,terminal_output +1293,1425400,"TERMINAL",0,0,"6\t ",,terminal_output +1294,1426443,"TERMINAL",0,0,"7\t ",,terminal_output +1295,1427521,"TERMINAL",0,0,"8\t ",,terminal_output +1296,1428523,"TERMINAL",0,0,"9\t ",,terminal_output +1297,1429564,"TERMINAL",0,0,"30\t ",,terminal_output +1298,1430603,"TERMINAL",0,0,"1\t ",,terminal_output +1299,1431645,"TERMINAL",0,0,"2\t ",,terminal_output +1300,1432682,"TERMINAL",0,0,"3\t ",,terminal_output +1301,1433721,"TERMINAL",0,0,"4\t ",,terminal_output +1302,1434769,"TERMINAL",0,0,"5\t ",,terminal_output +1303,1435810,"TERMINAL",0,0,"6\t ",,terminal_output +1304,1436851,"TERMINAL",0,0,"7\t ",,terminal_output +1305,1437884,"TERMINAL",0,0,"8\t ",,terminal_output +1306,1438929,"TERMINAL",0,0,"9\t ",,terminal_output +1307,1439971,"TERMINAL",0,0,"40\t ",,terminal_output +1308,1441011,"TERMINAL",0,0,"1\t ",,terminal_output +1309,1442047,"TERMINAL",0,0,"2\t ",,terminal_output +1310,1443083,"TERMINAL",0,0,"3\t ",,terminal_output +1311,1444124,"TERMINAL",0,0,"4\t ",,terminal_output +1312,1445166,"TERMINAL",0,0,"5\t ",,terminal_output +1313,1446201,"TERMINAL",0,0,"7\t ",,terminal_output +1314,1447241,"TERMINAL",0,0,"8\t ",,terminal_output +1315,1448281,"TERMINAL",0,0,"9\t ",,terminal_output +1316,1449323,"TERMINAL",0,0,"50\t ",,terminal_output +1317,1450360,"TERMINAL",0,0,"1\t ",,terminal_output +1318,1451401,"TERMINAL",0,0,"2\t ",,terminal_output +1319,1452439,"TERMINAL",0,0,"3\t ",,terminal_output +1320,1453475,"TERMINAL",0,0,"4\t ",,terminal_output +1321,1454553,"TERMINAL",0,0,"5\t ",,terminal_output +1322,1455557,"TERMINAL",0,0,"6\t ",,terminal_output +1323,1456601,"TERMINAL",0,0,"7\t ",,terminal_output +1324,1457636,"TERMINAL",0,0,"8\t ",,terminal_output +1325,1458672,"TERMINAL",0,0,"9\t ",,terminal_output +1326,1459715,"TERMINAL",0,0,"2:00\t ",,terminal_output +1327,1460745,"TERMINAL",0,0,"1\t ",,terminal_output +1328,1461782,"TERMINAL",0,0,"2\t ",,terminal_output +1329,1462820,"TERMINAL",0,0,"3\t ",,terminal_output +1330,1463859,"TERMINAL",0,0,"4\t ",,terminal_output +1331,1464904,"TERMINAL",0,0,"5\t ",,terminal_output +1332,1465938,"TERMINAL",0,0,"6\t ",,terminal_output +1333,1466979,"TERMINAL",0,0,"7\t ",,terminal_output +1334,1468018,"TERMINAL",0,0,"8\t ",,terminal_output +1335,1469057,"TERMINAL",0,0,"9\t ",,terminal_output +1336,1470102,"TERMINAL",0,0,"10\t ",,terminal_output +1337,1471140,"TERMINAL",0,0,"1\t ",,terminal_output +1338,1472176,"TERMINAL",0,0,"2\t ",,terminal_output +1339,1473216,"TERMINAL",0,0,"4\t ",,terminal_output +1340,1474256,"TERMINAL",0,0,"5\t ",,terminal_output +1341,1475287,"TERMINAL",0,0,"6\t ",,terminal_output +1342,1476325,"TERMINAL",0,0,"7\t ",,terminal_output +1343,1477365,"TERMINAL",0,0,"8\t ",,terminal_output +1344,1478398,"TERMINAL",0,0,"9\t ",,terminal_output +1345,1479435,"TERMINAL",0,0,"20\t ",,terminal_output +1346,1480474,"TERMINAL",0,0,"1\t ",,terminal_output +1347,1481547,"TERMINAL",0,0,"2\t ",,terminal_output +1348,1482549,"TERMINAL",0,0,"3\t ",,terminal_output +1349,1483584,"TERMINAL",0,0,"4\t ",,terminal_output +1350,1484622,"TERMINAL",0,0,"5\t ",,terminal_output +1351,1485659,"TERMINAL",0,0,"6\t ",,terminal_output +1352,1486707,"TERMINAL",0,0,"7\t ",,terminal_output +1353,1487747,"TERMINAL",0,0,"8\t ",,terminal_output +1354,1488784,"TERMINAL",0,0,"9\t ",,terminal_output +1355,1489822,"TERMINAL",0,0,"301",,terminal_output +1356,1491161,"TERMINAL",0,0,"17",,terminal_output +1357,1492203,"TERMINAL",0,0,"3\t ",,terminal_output +1358,1493239,"TERMINAL",0,0,"4\t ",,terminal_output +1359,1494292,"TERMINAL",0,0,"5\t ",,terminal_output +1360,1495326,"TERMINAL",0,0,"6\t ",,terminal_output +1361,1496369,"TERMINAL",0,0,"7\t ",,terminal_output +1362,1497405,"TERMINAL",0,0,"8\t ",,terminal_output +1363,1498446,"TERMINAL",0,0,"9\t ",,terminal_output +1364,1499487,"TERMINAL",0,0,"402",,terminal_output +1365,1500571,"TERMINAL",0,0,"1\t ",,terminal_output +1366,1501571,"TERMINAL",0,0,"2\t ",,terminal_output +1367,1502614,"TERMINAL",0,0,"3\t ",,terminal_output +1368,1503649,"TERMINAL",0,0,"4\t ",,terminal_output +1369,1504687,"TERMINAL",0,0,"5\t ",,terminal_output +1370,1505725,"TERMINAL",0,0,"6\t ",,terminal_output +1371,1506765,"TERMINAL",0,0,"7\t ",,terminal_output +1372,1507802,"TERMINAL",0,0,"8\t ",,terminal_output +1373,1508850,"TERMINAL",0,0,"9\t ",,terminal_output +1374,1509892,"TERMINAL",0,0,"50\t ",,terminal_output +1375,1510922,"TERMINAL",0,0,"1\t ",,terminal_output +1376,1511959,"TERMINAL",0,0,"2\t ",,terminal_output +1377,1513001,"TERMINAL",0,0,"3\t ",,terminal_output +1378,1514039,"TERMINAL",0,0,"4\t ",,terminal_output +1379,1515080,"TERMINAL",0,0,"5\t ",,terminal_output +1380,1516124,"TERMINAL",0,0,"6\t ",,terminal_output +1381,1517160,"TERMINAL",0,0,"7\t ",,terminal_output +1382,1518202,"TERMINAL",0,0,"8\t ",,terminal_output +1383,1519242,"TERMINAL",0,0,"3:00\t ",,terminal_output +1384,1520285,"TERMINAL",0,0,"1\t ",,terminal_output +1385,1521323,"TERMINAL",0,0,"2\t ",,terminal_output +1386,1522365,"TERMINAL",0,0,"3\t ",,terminal_output +1387,1523408,"TERMINAL",0,0,"4\t ",,terminal_output +1388,1524450,"TERMINAL",0,0,"5\t ",,terminal_output +1389,1525491,"TERMINAL",0,0,"6\t ",,terminal_output +1390,1526566,"TERMINAL",0,0,"7\t ",,terminal_output +1391,1527568,"TERMINAL",0,0,"800",,terminal_output +1392,1528610,"TERMINAL",0,0,"9\t ",,terminal_output +1393,1529648,"TERMINAL",0,0,"10\t ",,terminal_output +1394,1530690,"TERMINAL",0,0,"1\t ",,terminal_output +1395,1531731,"TERMINAL",0,0,"2\t ",,terminal_output +1396,1532773,"TERMINAL",0,0,"3\t ",,terminal_output +1397,1533816,"TERMINAL",0,0,"4\t ",,terminal_output +1398,1534856,"TERMINAL",0,0,"5\t ",,terminal_output +1399,1535903,"TERMINAL",0,0,"6\t ",,terminal_output +1400,1536940,"TERMINAL",0,0,"7\t ",,terminal_output +1401,1537978,"TERMINAL",0,0,"83",,terminal_output +1402,1539014,"TERMINAL",0,0,"9\t ",,terminal_output +1403,1540065,"TERMINAL",0,0,"20\t ",,terminal_output +1404,1541104,"TERMINAL",0,0,"1\t ",,terminal_output +1405,1542125,"TERMINAL",0,0,"2\t ",,terminal_output +1406,1543171,"TERMINAL",0,0,"3\t ",,terminal_output +1407,1544212,"TERMINAL",0,0,"5\t ",,terminal_output +1408,1545250,"TERMINAL",0,0,"6\t ",,terminal_output +1409,1546291,"TERMINAL",0,0,"7\t ",,terminal_output +1410,1547322,"TERMINAL",0,0,"8\t ",,terminal_output +1411,1548358,"TERMINAL",0,0,"9\t ",,terminal_output +1412,1549402,"TERMINAL",0,0,"30\t ",,terminal_output +1413,1550466,"TERMINAL",0,0,"1\t ",,terminal_output +1414,1551492,"TERMINAL",0,0,"2\t ",,terminal_output +1415,1552567,"TERMINAL",0,0,"3\t ",,terminal_output +1416,1553569,"TERMINAL",0,0,"4\t ",,terminal_output +1417,1554613,"TERMINAL",0,0,"5\t ",,terminal_output +1418,1555658,"TERMINAL",0,0,"6\t ",,terminal_output +1419,1556690,"TERMINAL",0,0,"7\t ",,terminal_output +1420,1557729,"TERMINAL",0,0,"8\t ",,terminal_output +1421,1558763,"TERMINAL",0,0,"9\t ",,terminal_output +1422,1559803,"TERMINAL",0,0,"40\t ",,terminal_output +1423,1560934,"TERMINAL",0,0,"1\t ",,terminal_output +1424,1561885,"TERMINAL",0,0,"2\t ",,terminal_output +1425,1563025,"TERMINAL",0,0,"3\t ",,terminal_output +1426,1564011,"TERMINAL",0,0,"4\t ",,terminal_output +1427,1565073,"TERMINAL",0,0,"5\t ",,terminal_output +1428,1566156,"TERMINAL",0,0,"6\t ",,terminal_output +1429,1567082,"TERMINAL",0,0,"7\t ",,terminal_output +1430,1568117,"TERMINAL",0,0,"8\t ",,terminal_output +1431,1569164,"TERMINAL",0,0,"9\t ",,terminal_output +1432,1570309,"TERMINAL",0,0,"50\t ",,terminal_output +1433,1571354,"TERMINAL",0,0,"2\t ",,terminal_output +1434,1572346,"TERMINAL",0,0,"3\t ",,terminal_output +1435,1573446,"TERMINAL",0,0,"4\t ",,terminal_output +1436,1574630,"TERMINAL",0,0,"5\t ",,terminal_output +1437,1575497,"TERMINAL",0,0,"6\t ",,terminal_output +1438,1576442,"TERMINAL",0,0,"7\t ",,terminal_output +1439,1577460,"TERMINAL",0,0,"8\t ",,terminal_output +1440,1578505,"TERMINAL",0,0,"9\t ",,terminal_output +1441,1579551,"TERMINAL",0,0,"4:00\t ",,terminal_output +1442,1580611,"TERMINAL",0,0,"1\t ",,terminal_output +1443,1581612,"TERMINAL",0,0,"2\t ",,terminal_output +1444,1582648,"TERMINAL",0,0,"3\t ",,terminal_output +1445,1583686,"TERMINAL",0,0,"4\t ",,terminal_output +1446,1584725,"TERMINAL",0,0,"5\t ",,terminal_output +1447,1585762,"TERMINAL",0,0,"6\t ",,terminal_output +1448,1586815,"TERMINAL",0,0,"72",,terminal_output +1449,1587859,"TERMINAL",0,0,"8\t ",,terminal_output +1450,1588902,"TERMINAL",0,0,"9\t ",,terminal_output +1451,1589914,"TERMINAL",0,0,"10\t ",,terminal_output +1452,1590954,"TERMINAL",0,0,"1\t ",,terminal_output +1453,1591992,"TERMINAL",0,0,"2\t ",,terminal_output +1454,1593041,"TERMINAL",0,0,"3\t ",,terminal_output +1455,1594081,"TERMINAL",0,0,"4\t ",,terminal_output +1456,1595124,"TERMINAL",0,0,"5\t ",,terminal_output +1457,1596163,"TERMINAL",0,0,"6\t ",,terminal_output +1458,1597198,"TERMINAL",0,0,"7\t ",,terminal_output +1459,1598249,"TERMINAL",0,0,"9\t ",,terminal_output +1460,1599376,"TERMINAL",0,0,"20\t ",,terminal_output +1461,1600335,"TERMINAL",0,0,"1\t ",,terminal_output +1462,1601364,"TERMINAL",0,0,"2\t ",,terminal_output +1463,1602400,"TERMINAL",0,0,"3\t ",,terminal_output +1464,1603435,"TERMINAL",0,0,"4\t ",,terminal_output +1465,1604471,"TERMINAL",0,0,"5\t ",,terminal_output +1466,1605514,"TERMINAL",0,0,"6\t ",,terminal_output +1467,1606600,"TERMINAL",0,0,"7\t ",,terminal_output +1468,1607752,"TERMINAL",0,0,"81",,terminal_output +1469,1608784,"TERMINAL",0,0,"9\t ",,terminal_output +1470,1609824,"TERMINAL",0,0,"30\t ",,terminal_output +1471,1610868,"TERMINAL",0,0,"1\t ",,terminal_output +1472,1611972,"TERMINAL",0,0,"2\t ",,terminal_output +1473,1613013,"TERMINAL",0,0,"3\t ",,terminal_output +1474,1614046,"TERMINAL",0,0,"4\t ",,terminal_output +1475,1615087,"TERMINAL",0,0,"5\t ",,terminal_output +1476,1616126,"TERMINAL",0,0,"6\t ",,terminal_output +1477,1617164,"TERMINAL",0,0,"7\t ",,terminal_output +1478,1618214,"TERMINAL",0,0,"9\t ",,terminal_output +1479,1619252,"TERMINAL",0,0,"40\t ",,terminal_output +1480,1620290,"TERMINAL",0,0,"1\t ",,terminal_output +1481,1621331,"TERMINAL",0,0,"2\t ",,terminal_output +1482,1622375,"TERMINAL",0,0,"3\t ",,terminal_output +1483,1623412,"TERMINAL",0,0,"4\t ",,terminal_output +1484,1624453,"TERMINAL",0,0,"5\t ",,terminal_output +1485,1625497,"TERMINAL",0,0,"6\t ",,terminal_output +1486,1626543,"TERMINAL",0,0,"7\t ",,terminal_output +1487,1627624,"TERMINAL",0,0,"8\t ",,terminal_output +1488,1628623,"TERMINAL",0,0,"9\t ",,terminal_output +1489,1629666,"TERMINAL",0,0,"50\t ",,terminal_output +1490,1630705,"TERMINAL",0,0,"1\t ",,terminal_output +1491,1631742,"TERMINAL",0,0,"2\t ",,terminal_output +1492,1632779,"TERMINAL",0,0,"3\t ",,terminal_output +1493,1633816,"TERMINAL",0,0,"4\t ",,terminal_output +1494,1634854,"TERMINAL",0,0,"5\t ",,terminal_output +1495,1635892,"TERMINAL",0,0,"6\t ",,terminal_output +1496,1636928,"TERMINAL",0,0,"7\t ",,terminal_output +1497,1637968,"TERMINAL",0,0,"8\t ",,terminal_output +1498,1639007,"TERMINAL",0,0,"9\t ",,terminal_output +1499,1640043,"TERMINAL",0,0,"5:00\t ",,terminal_output +1500,1641081,"TERMINAL",0,0,"1\t ",,terminal_output +1501,1642118,"TERMINAL",0,0,"2\t ",,terminal_output +1502,1643159,"TERMINAL",0,0,"3\t ",,terminal_output +1503,1644196,"TERMINAL",0,0,"4\t ",,terminal_output +1504,1645244,"TERMINAL",0,0,"6\t ",,terminal_output +1505,1646279,"TERMINAL",0,0,"72",,terminal_output +1506,1647325,"TERMINAL",0,0,"8\t ",,terminal_output +1507,1648351,"TERMINAL",0,0,"9\t ",,terminal_output +1508,1649393,"TERMINAL",0,0,"10\t ",,terminal_output +1509,1650479,"TERMINAL",0,0,"1\t ",,terminal_output +1510,1651508,"TERMINAL",0,0,"2\t ",,terminal_output +1511,1652527,"TERMINAL",0,0,"3\t ",,terminal_output +1512,1653565,"TERMINAL",0,0,"4\t ",,terminal_output +1513,1654645,"TERMINAL",0,0,"5\t ",,terminal_output +1514,1655646,"TERMINAL",0,0,"6\t ",,terminal_output +1515,1656686,"TERMINAL",0,0,"7\t ",,terminal_output +1516,1657725,"TERMINAL",0,0,"8\t ",,terminal_output +1517,1658767,"TERMINAL",0,0,"9\t ",,terminal_output +1518,1659815,"TERMINAL",0,0,"20\t ",,terminal_output +1519,1660856,"TERMINAL",0,0,"1\t ",,terminal_output +1520,1661893,"TERMINAL",0,0,"2\t ",,terminal_output +1521,1662934,"TERMINAL",0,0,"3\t ",,terminal_output +1522,1663972,"TERMINAL",0,0,"4\t ",,terminal_output +1523,1665018,"TERMINAL",0,0,"5\t ",,terminal_output +1524,1666054,"TERMINAL",0,0,"6\t ",,terminal_output +1525,1667091,"TERMINAL",0,0,"7\t ",,terminal_output +1526,1668138,"TERMINAL",0,0,"8\t ",,terminal_output +1527,1669176,"TERMINAL",0,0,"9\t ",,terminal_output +1528,1670219,"TERMINAL",0,0,"31\t ",,terminal_output +1529,1671255,"TERMINAL",0,0,"2\t ",,terminal_output +1530,1672292,"TERMINAL",0,0,"3\t ",,terminal_output +1531,1673330,"TERMINAL",0,0,"4\t ",,terminal_output +1532,1674372,"TERMINAL",0,0,"5\t ",,terminal_output +1533,1675414,"TERMINAL",0,0,"6\t ",,terminal_output +1534,1676457,"TERMINAL",0,0,"7\t ",,terminal_output +1535,1677496,"TERMINAL",0,0,"8\t ",,terminal_output +1536,1678535,"TERMINAL",0,0,"9\t ",,terminal_output +1537,1679576,"TERMINAL",0,0,"40\t ",,terminal_output +1538,1680621,"TERMINAL",0,0,"1\t ",,terminal_output +1539,1681658,"TERMINAL",0,0,"2\t ",,terminal_output +1540,1682693,"TERMINAL",0,0,"3\t ",,terminal_output +1541,1683732,"TERMINAL",0,0,"4\t ",,terminal_output +1542,1684782,"TERMINAL",0,0,"5\t ",,terminal_output +1543,1685813,"TERMINAL",0,0,"6\t ",,terminal_output +1544,1686847,"TERMINAL",0,0,"7\t ",,terminal_output +1545,1687888,"TERMINAL",0,0,"8\t ",,terminal_output +1546,1688930,"TERMINAL",0,0,"93",,terminal_output +1547,1689973,"TERMINAL",0,0,"50\t ",,terminal_output +1548,1691005,"TERMINAL",0,0,"1\t ",,terminal_output +1549,1692043,"TERMINAL",0,0,"2\t ",,terminal_output +1550,1693076,"TERMINAL",0,0,"3\t ",,terminal_output +1551,1694113,"TERMINAL",0,0,"4\t ",,terminal_output +1552,1695152,"TERMINAL",0,0,"5\t ",,terminal_output +1553,1696202,"TERMINAL",0,0,"6\t ",,terminal_output +1554,1697225,"TERMINAL",0,0,"8\t ",,terminal_output +1555,1698266,"TERMINAL",0,0,"9\t ",,terminal_output +1556,1699304,"TERMINAL",0,0,"6:00\t ",,terminal_output +1557,1700343,"TERMINAL",0,0,"1\t ",,terminal_output +1558,1701382,"TERMINAL",0,0,"2\t ",,terminal_output +1559,1702417,"TERMINAL",0,0,"3\t ",,terminal_output +1560,1703457,"TERMINAL",0,0,"4\t ",,terminal_output +1561,1704492,"TERMINAL",0,0,"5\t ",,terminal_output +1562,1705530,"TERMINAL",0,0,"6\t ",,terminal_output +1563,1706573,"TERMINAL",0,0,"7 9",,terminal_output +1564,1707644,"TERMINAL",0,0,"8\t ",,terminal_output +1565,1708713,"TERMINAL",0,0,"9\t ",,terminal_output +1566,1709680,"TERMINAL",0,0,"10\t ",,terminal_output +1567,1710722,"TERMINAL",0,0,"1\t ",,terminal_output +1568,1711759,"TERMINAL",0,0,"2\t ",,terminal_output +1569,1712794,"TERMINAL",0,0,"3\t ",,terminal_output +1570,1713838,"TERMINAL",0,0,"4\t ",,terminal_output +1571,1714878,"TERMINAL",0,0,"5\t ",,terminal_output +1572,1715921,"TERMINAL",0,0,"6\t ",,terminal_output +1573,1716962,"TERMINAL",0,0,"7\t ",,terminal_output +1574,1717998,"TERMINAL",0,0,"8\t ",,terminal_output +1575,1719038,"TERMINAL",0,0,"9\t ",,terminal_output +1576,1720074,"TERMINAL",0,0,"20\t ",,terminal_output +1577,1721113,"TERMINAL",0,0,"1\t ",,terminal_output +1578,1722153,"TERMINAL",0,0,"2\t ",,terminal_output +1579,1723191,"TERMINAL",0,0,"3\t ",,terminal_output +1580,1724236,"TERMINAL",0,0,"5\t ",,terminal_output +1581,1725280,"TERMINAL",0,0,"6\t ",,terminal_output +1582,1726320,"TERMINAL",0,0,"7\t ",,terminal_output +1583,1727372,"TERMINAL",0,0,"8\t ",,terminal_output +1584,1728398,"TERMINAL",0,0,"9\t ",,terminal_output +1585,1729434,"TERMINAL",0,0,"30\t ",,terminal_output +1586,1730482,"TERMINAL",0,0,"1\t ",,terminal_output +1587,1731507,"TERMINAL",0,0,"2\t ",,terminal_output +1588,1732852,"TERMINAL",0,0,"3\t ",,terminal_output +1589,1733879,"TERMINAL",0,0,"4\t ",,terminal_output +1590,1734916,"TERMINAL",0,0,"5\t ",,terminal_output +1591,1735976,"TERMINAL",0,0,"6\t ",,terminal_output +1592,1736992,"TERMINAL",0,0,"7\t ",,terminal_output +1593,1738030,"TERMINAL",0,0,"8\t ",,terminal_output +1594,1739073,"TERMINAL",0,0,"9\t ",,terminal_output +1595,1740117,"TERMINAL",0,0,"40\t ",,terminal_output +1596,1741156,"TERMINAL",0,0,"1\t ",,terminal_output +1597,1742192,"TERMINAL",0,0,"2\t ",,terminal_output +1598,1743234,"TERMINAL",0,0,"4\t ",,terminal_output +1599,1744276,"TERMINAL",0,0,"5\t ",,terminal_output +1600,1745307,"TERMINAL",0,0,"6\t ",,terminal_output +1601,1746339,"TERMINAL",0,0,"7\t ",,terminal_output +1602,1747382,"TERMINAL",0,0,"8\t ",,terminal_output +1603,1748423,"TERMINAL",0,0,"9\t ",,terminal_output +1604,1749456,"TERMINAL",0,0,"50\t ",,terminal_output +1605,1750487,"TERMINAL",0,0,"1\t ",,terminal_output +1606,1751548,"TERMINAL",0,0,"2\t ",,terminal_output +1607,1752564,"TERMINAL",0,0,"3\t ",,terminal_output +1608,1753604,"TERMINAL",0,0,"4\t ",,terminal_output +1609,1754680,"TERMINAL",0,0,"5\t ",,terminal_output +1610,1755692,"TERMINAL",0,0,"6\t ",,terminal_output +1611,1756729,"TERMINAL",0,0,"7\t ",,terminal_output +1612,1757774,"TERMINAL",0,0,"8\t ",,terminal_output +1613,1758814,"TERMINAL",0,0,"9\t ",,terminal_output +1614,1759853,"TERMINAL",0,0,"7:00\t ",,terminal_output +1615,1760891,"TERMINAL",0,0,"1\t ",,terminal_output +1616,1761927,"TERMINAL",0,0,"2\t ",,terminal_output +1617,1762963,"TERMINAL",0,0,"3\t ",,terminal_output +1618,1764006,"TERMINAL",0,0,"4\t ",,terminal_output +1619,1765052,"TERMINAL",0,0,"5\t ",,terminal_output +1620,1766097,"TERMINAL",0,0,"6\t ",,terminal_output +1621,1767133,"TERMINAL",0,0,"7\t ",,terminal_output +1622,1768170,"TERMINAL",0,0,"8\t ",,terminal_output +1623,1769211,"TERMINAL",0,0,"10\t ",,terminal_output +1624,1770252,"TERMINAL",0,0,"1\t ",,terminal_output +1625,1771293,"TERMINAL",0,0,"2\t ",,terminal_output +1626,1772371,"TERMINAL",0,0,"310",,terminal_output +1627,1773369,"TERMINAL",0,0,"4\t ",,terminal_output +1628,1774408,"TERMINAL",0,0,"5\t ",,terminal_output +1629,1775486,"TERMINAL",0,0,"61",,terminal_output +1630,1776488,"TERMINAL",0,0,"7\t ",,terminal_output +1631,1777529,"TERMINAL",0,0,"8\t ",,terminal_output +1632,1778579,"TERMINAL",0,0,"9\t ",,terminal_output +1633,1779614,"TERMINAL",0,0,"20\t ",,terminal_output +1634,1780694,"TERMINAL",0,0,"13",,terminal_output +1635,1781696,"TERMINAL",0,0,"2\t ",,terminal_output +1636,1782733,"TERMINAL",0,0,"3\t ",,terminal_output +1637,1783773,"TERMINAL",0,0,"4\t ",,terminal_output +1638,1784809,"TERMINAL",0,0,"5\t ",,terminal_output +1639,1785848,"TERMINAL",0,0,"6\t ",,terminal_output +1640,1786882,"TERMINAL",0,0,"7\t ",,terminal_output +1641,1787921,"TERMINAL",0,0,"8\t ",,terminal_output +1642,1788967,"TERMINAL",0,0,"9\t ",,terminal_output +1643,1790002,"TERMINAL",0,0,"30\t ",,terminal_output +1644,1791041,"TERMINAL",0,0,"1\t ",,terminal_output +1645,1792080,"TERMINAL",0,0,"2\t ",,terminal_output +1646,1793116,"TERMINAL",0,0,"3\t ",,terminal_output +1647,1794152,"TERMINAL",0,0,"4\t ",,terminal_output +1648,1795190,"TERMINAL",0,0,"5\t ",,terminal_output +1649,1796229,"TERMINAL",0,0,"7\t ",,terminal_output +1650,1797269,"TERMINAL",0,0,"8\t ",,terminal_output +1651,1798310,"TERMINAL",0,0,"9\t ",,terminal_output +1652,1799348,"TERMINAL",0,0,"40\t ",,terminal_output +1653,1800389,"TERMINAL",0,0,"1\t ",,terminal_output +1654,1801425,"TERMINAL",0,0,"2\t ",,terminal_output +1655,1802464,"TERMINAL",0,0,"3\t ",,terminal_output +1656,1803509,"TERMINAL",0,0,"4\t ",,terminal_output +1657,1804562,"TERMINAL",0,0,"5\t ",,terminal_output +1658,1805597,"TERMINAL",0,0,"6\t ",,terminal_output +1659,1806674,"TERMINAL",0,0,"7\t ",,terminal_output +1660,1807680,"TERMINAL",0,0,"8\t ",,terminal_output +1661,1808723,"TERMINAL",0,0,"9\t ",,terminal_output +1662,1809763,"TERMINAL",0,0,"50\t ",,terminal_output +1663,1810807,"TERMINAL",0,0,"1\t ",,terminal_output +1664,1811836,"TERMINAL",0,0,"2\t ",,terminal_output +1665,1812876,"TERMINAL",0,0,"3\t ",,terminal_output +1666,1813919,"TERMINAL",0,0,"4\t ",,terminal_output +1667,1814961,"TERMINAL",0,0,"5\t ",,terminal_output +1668,1816000,"TERMINAL",0,0,"6\t ",,terminal_output +1669,1817043,"TERMINAL",0,0,"7\t ",,terminal_output +1670,1818077,"TERMINAL",0,0,"8\t ",,terminal_output +1671,1819129,"TERMINAL",0,0,"9\t ",,terminal_output +1672,1820152,"TERMINAL",0,0,"8:00\t ",,terminal_output +1673,1821190,"TERMINAL",0,0,"1\t ",,terminal_output +1674,1822233,"TERMINAL",0,0,"3\t ",,terminal_output +1675,1823277,"TERMINAL",0,0,"4\t ",,terminal_output +1676,1824315,"TERMINAL",0,0,"5\t ",,terminal_output +1677,1825374,"TERMINAL",0,0,"6\t ",,terminal_output +1678,1826677,"TERMINAL",0,0,"7 5",,terminal_output +1679,1827681,"TERMINAL",0,0,"8\t ",,terminal_output +1680,1828729,"TERMINAL",0,0,"9\t ",,terminal_output +1681,1829767,"TERMINAL",0,0,"10\t ",,terminal_output +1682,1830804,"TERMINAL",0,0,"1\t ",,terminal_output +1683,1831847,"TERMINAL",0,0,"2\t ",,terminal_output +1684,1832883,"TERMINAL",0,0,"3\t ",,terminal_output +1685,1833922,"TERMINAL",0,0,"4\t ",,terminal_output +1686,1834961,"TERMINAL",0,0,"5\t ",,terminal_output +1687,1835992,"TERMINAL",0,0,"6\t ",,terminal_output +1688,1837032,"TERMINAL",0,0,"7\t ",,terminal_output +1689,1838069,"TERMINAL",0,0,"8\t ",,terminal_output +1690,1839105,"TERMINAL",0,0,"9\t ",,terminal_output +1691,1840149,"TERMINAL",0,0,"20\t ",,terminal_output +1692,1841188,"TERMINAL",0,0,"1\t ",,terminal_output +1693,1842227,"TERMINAL",0,0,"3\t ",,terminal_output +1694,1843270,"TERMINAL",0,0,"42",,terminal_output +1695,1844310,"TERMINAL",0,0,"5\t ",,terminal_output +1696,1845355,"TERMINAL",0,0,"6\t ",,terminal_output +1697,1846382,"TERMINAL",0,0,"7\t ",,terminal_output +1698,1847425,"TERMINAL",0,0,"8\t ",,terminal_output +1699,1848462,"TERMINAL",0,0,"9\t ",,terminal_output +1700,1849505,"TERMINAL",0,0,"303",,terminal_output +1701,1850547,"TERMINAL",0,0,"1\t ",,terminal_output +1702,1851595,"TERMINAL",0,0,"2\t ",,terminal_output +1703,1852632,"TERMINAL",0,0,"3\t ",,terminal_output +1704,1853708,"TERMINAL",0,0,"4\t ",,terminal_output +1705,1854707,"TERMINAL",0,0,"5\t ",,terminal_output +1706,1855747,"TERMINAL",0,0,"6\t ",,terminal_output +1707,1856785,"TERMINAL",0,0,"7\t ",,terminal_output +1708,1857826,"TERMINAL",0,0,"8\t ",,terminal_output +1709,1858875,"TERMINAL",0,0,"9\t ",,terminal_output +1710,1859915,"TERMINAL",0,0,"40\t ",,terminal_output +1711,1860955,"TERMINAL",0,0,"1\t ",,terminal_output +1712,1861991,"TERMINAL",0,0,"2\t ",,terminal_output +1713,1863041,"TERMINAL",0,0,"3\t ",,terminal_output +1714,1864083,"TERMINAL",0,0,"4\t ",,terminal_output +1715,1865119,"TERMINAL",0,0,"5\t ",,terminal_output +1716,1866159,"TERMINAL",0,0,"6\t ",,terminal_output +1717,1867201,"TERMINAL",0,0,"7\t ",,terminal_output +1718,1868252,"TERMINAL",0,0,"9\t ",,terminal_output +1719,1869328,"TERMINAL",0,0,"50\t ",,terminal_output +1720,1870362,"TERMINAL",0,0,"1\t ",,terminal_output +1721,1871400,"TERMINAL",0,0,"2\t ",,terminal_output +1722,1872447,"TERMINAL",0,0,"3\t ",,terminal_output +1723,1873482,"TERMINAL",0,0,"4\t ",,terminal_output +1724,1874525,"TERMINAL",0,0,"5\t ",,terminal_output +1725,1875562,"TERMINAL",0,0,"6\t ",,terminal_output +1726,1876597,"TERMINAL",0,0,"7\t ",,terminal_output +1727,1877635,"TERMINAL",0,0,"8\t ",,terminal_output +1728,1878702,"TERMINAL",0,0,"9\t ",,terminal_output +1729,1879739,"TERMINAL",0,0,"9:00\t ",,terminal_output +1730,1880743,"TERMINAL",0,0,"1\t ",,terminal_output +1731,1881781,"TERMINAL",0,0,"2\t ",,terminal_output +1732,1882826,"TERMINAL",0,0,"3\t ",,terminal_output +1733,1883867,"TERMINAL",0,0,"4\t ",,terminal_output +1734,1884912,"TERMINAL",0,0,"5\t ",,terminal_output +1735,1885960,"TERMINAL",0,0,"6\t ",,terminal_output +1736,1886995,"TERMINAL",0,0,"7\t ",,terminal_output +1737,1888039,"TERMINAL",0,0,"8 9",,terminal_output +1738,1889080,"TERMINAL",0,0,"9\t ",,terminal_output +1739,1890122,"TERMINAL",0,0,"10\t ",,terminal_output +1740,1891165,"TERMINAL",0,0,"1\t ",,terminal_output +1741,1892206,"TERMINAL",0,0,"3\t ",,terminal_output +1742,1893240,"TERMINAL",0,0,"4\t ",,terminal_output +1743,1894282,"TERMINAL",0,0,"5\t ",,terminal_output +1744,1895327,"TERMINAL",0,0,"6\t ",,terminal_output +1745,1896367,"TERMINAL",0,0,"7\t ",,terminal_output +1746,1897408,"TERMINAL",0,0,"8\t ",,terminal_output +1747,1898447,"TERMINAL",0,0,"9\t ",,terminal_output +1748,1899488,"TERMINAL",0,0,"20\t ",,terminal_output +1749,1900534,"TERMINAL",0,0,"1\t ",,terminal_output +1750,1901574,"TERMINAL",0,0,"2\t ",,terminal_output +1751,1902612,"TERMINAL",0,0,"3\t ",,terminal_output +1752,1903647,"TERMINAL",0,0,"4\t ",,terminal_output +1753,1904724,"TERMINAL",0,0,"5\t ",,terminal_output +1754,1905721,"TERMINAL",0,0,"6\t ",,terminal_output +1755,1906765,"TERMINAL",0,0,"7\t ",,terminal_output +1756,1907802,"TERMINAL",0,0,"8\t ",,terminal_output +1757,1908836,"TERMINAL",0,0,"9\t ",,terminal_output +1758,1909867,"TERMINAL",0,0,"30\t ",,terminal_output +1759,1910901,"TERMINAL",0,0,"1\t ",,terminal_output +1760,1911936,"TERMINAL",0,0,"2\t ",,terminal_output +1761,1912978,"TERMINAL",0,0,"3\t ",,terminal_output +1762,1914015,"TERMINAL",0,0,"4\t ",,terminal_output +1763,1915049,"TERMINAL",0,0,"5\t ",,terminal_output +1764,1916093,"TERMINAL",0,0,"6\t ",,terminal_output +1765,1917127,"TERMINAL",0,0,"7\t ",,terminal_output +1766,1918164,"TERMINAL",0,0,"8\t ",,terminal_output +1767,1919201,"TERMINAL",0,0,"40\t ",,terminal_output +1768,1920238,"TERMINAL",0,0,"1\t ",,terminal_output +1769,1921280,"TERMINAL",0,0,"2\t ",,terminal_output +1770,1922340,"TERMINAL",0,0,"3\t ",,terminal_output +1771,1923451,"TERMINAL",0,0,"4\t ",,terminal_output +1772,1924560,"TERMINAL",0,0,"5\t ",,terminal_output +1773,1925436,"TERMINAL",0,0,"6\t ",,terminal_output +1774,1926464,"TERMINAL",0,0,"7\t ",,terminal_output +1775,1927501,"TERMINAL",0,0,"8\t ",,terminal_output +1776,1928546,"TERMINAL",0,0,"9\t ",,terminal_output +1777,1929591,"TERMINAL",0,0,"50\t ",,terminal_output +1778,1930635,"TERMINAL",0,0,"1\t ",,terminal_output +1779,1931667,"TERMINAL",0,0,"2\t ",,terminal_output +1780,1932737,"TERMINAL",0,0,"3\t ",,terminal_output +1781,1933736,"TERMINAL",0,0,"4\t ",,terminal_output +1782,1934773,"TERMINAL",0,0,"5\t ",,terminal_output +1783,1935818,"TERMINAL",0,0,"6\t ",,terminal_output +1784,1936855,"TERMINAL",0,0,"7\t ",,terminal_output +1785,1937895,"TERMINAL",0,0,"8\t ",,terminal_output +1786,1938937,"TERMINAL",0,0,"9\t ",,terminal_output +1787,1939982,"TERMINAL",0,0,"30:00\t ",,terminal_output +1788,1941024,"TERMINAL",0,0,"1\t ",,terminal_output +1789,1942058,"TERMINAL",0,0,"2\t ",,terminal_output +1790,1943105,"TERMINAL",0,0,"3\t ",,terminal_output +1791,1944137,"TERMINAL",0,0,"4\t ",,terminal_output +1792,1945177,"TERMINAL",0,0,"5\t ",,terminal_output +1793,1946215,"TERMINAL",0,0,"7\t ",,terminal_output +1794,1947259,"TERMINAL",0,0,"8\t ",,terminal_output +1795,1948290,"TERMINAL",0,0,"9\t ",,terminal_output +1796,1949333,"TERMINAL",0,0,"10\t ",,terminal_output +1797,1950390,"TERMINAL",0,0,"1\t ",,terminal_output +1798,1951446,"TERMINAL",0,0,"2\t ",,terminal_output +1799,1952458,"TERMINAL",0,0,"3\t ",,terminal_output +1800,1953551,"TERMINAL",0,0,"4\t ",,terminal_output +1801,1954540,"TERMINAL",0,0,"5\t ",,terminal_output +1802,1955606,"TERMINAL",0,0,"6\t ",,terminal_output +1803,1956619,"TERMINAL",0,0,"7\t ",,terminal_output +1804,1957654,"TERMINAL",0,0,"8\t ",,terminal_output +1805,1958699,"TERMINAL",0,0,"9\t ",,terminal_output +1806,1959744,"TERMINAL",0,0,"20\t ",,terminal_output +1807,1960787,"TERMINAL",0,0,"1\t ",,terminal_output +1808,1961823,"TERMINAL",0,0,"2\t ",,terminal_output +1809,1962866,"TERMINAL",0,0,"3\t ",,terminal_output +1810,1963900,"TERMINAL",0,0,"4\t ",,terminal_output +1811,1964952,"TERMINAL",0,0,"5\t ",,terminal_output +1812,1966025,"TERMINAL",0,0,"6\t ",,terminal_output +1813,1967041,"TERMINAL",0,0,"7\t ",,terminal_output +1814,1968065,"TERMINAL",0,0,"8\t ",,terminal_output +1815,1969099,"TERMINAL",0,0,"9\t ",,terminal_output +1816,1970145,"TERMINAL",0,0,"30\t ",,terminal_output +1817,1971188,"TERMINAL",0,0,"1\t ",,terminal_output +1818,1972222,"TERMINAL",0,0,"3\t ",,terminal_output +1819,1973261,"TERMINAL",0,0,"4\t ",,terminal_output +1820,1974541,"TERMINAL",0,0,"5\t ",,terminal_output +1821,1975577,"TERMINAL",0,0,"6\t ",,terminal_output +1822,1976621,"TERMINAL",0,0,"7\t ",,terminal_output +1823,1977664,"TERMINAL",0,0,"8\t ",,terminal_output +1824,1978704,"TERMINAL",0,0,"9\t ",,terminal_output +1825,1979784,"TERMINAL",0,0,"40\t ",,terminal_output +1826,1980789,"TERMINAL",0,0,"1\t ",,terminal_output +1827,1981826,"TERMINAL",0,0,"2\t ",,terminal_output +1828,1982862,"TERMINAL",0,0,"3\t ",,terminal_output +1829,1983904,"TERMINAL",0,0,"4\t ",,terminal_output +1830,1984943,"TERMINAL",0,0,"5\t ",,terminal_output +1831,1985985,"TERMINAL",0,0,"6\t ",,terminal_output +1832,1987021,"TERMINAL",0,0,"7\t ",,terminal_output +1833,1988067,"TERMINAL",0,0,"8\t ",,terminal_output +1834,1989096,"TERMINAL",0,0,"9\t ",,terminal_output +1835,1990133,"TERMINAL",0,0,"50\t ",,terminal_output +1836,1991168,"TERMINAL",0,0,"1\t ",,terminal_output +1837,1992203,"TERMINAL",0,0,"3\t ",,terminal_output +1838,1993238,"TERMINAL",0,0,"4\t ",,terminal_output +1839,1994279,"TERMINAL",0,0,"5\t ",,terminal_output +1840,1995324,"TERMINAL",0,0,"6\t ",,terminal_output +1841,1996356,"TERMINAL",0,0,"7\t ",,terminal_output +1842,1997392,"TERMINAL",0,0,"8\t ",,terminal_output +1843,1998428,"TERMINAL",0,0,"98",,terminal_output +1844,1999469,"TERMINAL",0,0,"1:00\t ",,terminal_output +1845,2000509,"TERMINAL",0,0,"1\t ",,terminal_output +1846,2001543,"TERMINAL",0,0,"2\t ",,terminal_output +1847,2002582,"TERMINAL",0,0,"3\t ",,terminal_output +1848,2003619,"TERMINAL",0,0,"4\t ",,terminal_output +1849,2004658,"TERMINAL",0,0,"5\t ",,terminal_output +1850,2005696,"TERMINAL",0,0,"6\t ",,terminal_output +1851,2006768,"TERMINAL",0,0,"7\t ",,terminal_output +1852,2007774,"TERMINAL",0,0,"8\t ",,terminal_output +1853,2008806,"TERMINAL",0,0,"9\t ",,terminal_output +1854,2009841,"TERMINAL",0,0,"10\t ",,terminal_output +1855,2010879,"TERMINAL",0,0,"1\t ",,terminal_output +1856,2011918,"TERMINAL",0,0,"2\t ",,terminal_output +1857,2012959,"TERMINAL",0,0,"3\t ",,terminal_output +1858,2013986,"TERMINAL",0,0,"4\t ",,terminal_output +1859,2015025,"TERMINAL",0,0,"5\t ",,terminal_output +1860,2016062,"TERMINAL",0,0,"6\t ",,terminal_output +1861,2017104,"TERMINAL",0,0,"7\t ",,terminal_output +1862,2018140,"TERMINAL",0,0,"8\t ",,terminal_output +1863,2019175,"TERMINAL",0,0,"9\t ",,terminal_output +1864,2020213,"TERMINAL",0,0,"21\t ",,terminal_output +1865,2021243,"TERMINAL",0,0,"2\t ",,terminal_output +1866,2022282,"TERMINAL",0,0,"3\t ",,terminal_output +1867,2023316,"TERMINAL",0,0,"4\t ",,terminal_output +1868,2024350,"TERMINAL",0,0,"5\t ",,terminal_output +1869,2025382,"TERMINAL",0,0,"6\t ",,terminal_output +1870,2026421,"TERMINAL",0,0,"7\t ",,terminal_output +1871,2027456,"TERMINAL",0,0,"8\t ",,terminal_output +1872,2028491,"TERMINAL",0,0,"9\t ",,terminal_output +1873,2029528,"TERMINAL",0,0,"30\t ",,terminal_output +1874,2030563,"TERMINAL",0,0,"1\t ",,terminal_output +1875,2031604,"TERMINAL",0,0,"2\t ",,terminal_output +1876,2032633,"TERMINAL",0,0,"3\t ",,terminal_output +1877,2033667,"TERMINAL",0,0,"4\t ",,terminal_output +1878,2034706,"TERMINAL",0,0,"5\t ",,terminal_output +1879,2035747,"TERMINAL",0,0,"6\t ",,terminal_output +1880,2036784,"TERMINAL",0,0,"7\t ",,terminal_output +1881,2037820,"TERMINAL",0,0,"8\t ",,terminal_output +1882,2038851,"TERMINAL",0,0,"9\t ",,terminal_output +1883,2039912,"TERMINAL",0,0,"40\t ",,terminal_output +1884,2040923,"TERMINAL",0,0,"1\t ",,terminal_output +1885,2041958,"TERMINAL",0,0,"2\t ",,terminal_output +1886,2043005,"TERMINAL",0,0,"3\t ",,terminal_output +1887,2044030,"TERMINAL",0,0,"4\t ",,terminal_output +1888,2045123,"TERMINAL",0,0,"5\t ",,terminal_output +1889,2046104,"TERMINAL",0,0,"6\t ",,terminal_output +1890,2047134,"TERMINAL",0,0,"7\t ",,terminal_output +1891,2048175,"TERMINAL",0,0,"8\t ",,terminal_output +1892,2049215,"TERMINAL",0,0,"50\t ",,terminal_output +1893,2050250,"TERMINAL",0,0,"1\t ",,terminal_output +1894,2051285,"TERMINAL",0,0,"2\t ",,terminal_output +1895,2052320,"TERMINAL",0,0,"3\t ",,terminal_output +1896,2053358,"TERMINAL",0,0,"4\t ",,terminal_output +1897,2054391,"TERMINAL",0,0,"5\t ",,terminal_output +1898,2055433,"TERMINAL",0,0,"6\t ",,terminal_output +1899,2056462,"TERMINAL",0,0,"7\t ",,terminal_output +1900,2057495,"TERMINAL",0,0,"8\t ",,terminal_output +1901,2058535,"TERMINAL",0,0,"9\t ",,terminal_output +1902,2059568,"TERMINAL",0,0,"2:00\t ",,terminal_output +1903,2060608,"TERMINAL",0,0,"1\t ",,terminal_output +1904,2061644,"TERMINAL",0,0,"2\t ",,terminal_output +1905,2062685,"TERMINAL",0,0,"3\t ",,terminal_output +1906,2063733,"TERMINAL",0,0,"4\t ",,terminal_output +1907,2064792,"TERMINAL",0,0,"5\t ",,terminal_output +1908,2065826,"TERMINAL",0,0,"6\t ",,terminal_output +1909,2066833,"TERMINAL",0,0,"7\t ",,terminal_output +1910,2067867,"TERMINAL",0,0,"8\t ",,terminal_output +1911,2068901,"TERMINAL",0,0,"9\t ",,terminal_output +1912,2069942,"TERMINAL",0,0,"10\t ",,terminal_output +1913,2070979,"TERMINAL",0,0,"1\t ",,terminal_output +1914,2072011,"TERMINAL",0,0,"210",,terminal_output +1915,2073045,"TERMINAL",0,0,"3\t ",,terminal_output +1916,2074084,"TERMINAL",0,0,"4\t ",,terminal_output +1917,2075131,"TERMINAL",0,0,"5\t ",,terminal_output +1918,2076162,"TERMINAL",0,0,"6\t ",,terminal_output +1919,2077190,"TERMINAL",0,0,"8\t ",,terminal_output +1920,2078229,"TERMINAL",0,0,"9\t ",,terminal_output +1921,2079270,"TERMINAL",0,0,"20\t ",,terminal_output +1922,2080309,"TERMINAL",0,0,"1\t ",,terminal_output +1923,2081345,"TERMINAL",0,0,"2\t ",,terminal_output +1924,2082388,"TERMINAL",0,0,"3\t ",,terminal_output +1925,2083429,"TERMINAL",0,0,"4\t ",,terminal_output +1926,2084463,"TERMINAL",0,0,"5\t ",,terminal_output +1927,2085515,"TERMINAL",0,0,"6\t ",,terminal_output +1928,2086547,"TERMINAL",0,0,"7\t ",,terminal_output +1929,2087582,"TERMINAL",0,0,"8\t ",,terminal_output +1930,2088614,"TERMINAL",0,0,"9\t ",,terminal_output +1931,2089654,"TERMINAL",0,0,"30\t ",,terminal_output +1932,2090684,"TERMINAL",0,0,"1\t ",,terminal_output +1933,2091724,"TERMINAL",0,0,"2\t ",,terminal_output +1934,2092759,"TERMINAL",0,0,"3\t ",,terminal_output +1935,2093837,"TERMINAL",0,0,"4\t ",,terminal_output +1936,2095404,"TERMINAL",0,0,"56 9",,terminal_output +1937,2096449,"TERMINAL",0,0,"7\t ",,terminal_output +1938,2097487,"TERMINAL",0,0,"8\t ",,terminal_output +1939,2098529,"TERMINAL",0,0,"91",,terminal_output +1940,2099566,"TERMINAL",0,0,"40\t ",,terminal_output +1941,2100600,"TERMINAL",0,0,"1\t ",,terminal_output +1942,2101636,"TERMINAL",0,0,"2\t ",,terminal_output +1943,2102683,"TERMINAL",0,0,"3\t ",,terminal_output +1944,2103727,"TERMINAL",0,0,"4\t ",,terminal_output +1945,2104809,"TERMINAL",0,0,"5\t ",,terminal_output +1946,2105849,"TERMINAL",0,0,"6\t ",,terminal_output +1947,2106851,"TERMINAL",0,0,"7\t ",,terminal_output +1948,2107886,"TERMINAL",0,0,"8\t ",,terminal_output +1949,2108924,"TERMINAL",0,0,"9\t ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0556481e-9711-4a16-8295-53ec72ff527b1757423165949-2025_09_09-15.06.24.820/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0556481e-9711-4a16-8295-53ec72ff527b1757423165949-2025_09_09-15.06.24.820/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..e5d53b72d0d90e95601379f813949d2d6c93ab8d --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0556481e-9711-4a16-8295-53ec72ff527b1757423165949-2025_09_09-15.06.24.820/source.csv @@ -0,0 +1,303 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun_og dynanmics 10m_dataset repro_mihir""\n\nnpy_records_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m""\n\n# TODO mihir: update the tokenizer and lam checkpoints\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/tokenizer/train_tokenizer_coinrun_og_reproduction/3466286/tokenizer_1757013407_280000""\nlam_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/lam/train_lam_coinrun_og_reproduction/3466287/lam_1757013407_200000""\n\nCHECKPOINT_DIR=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --patch_size=16 \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --log_image_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --data_dir $npy_records_dir \\n --wandb_id $slurm_job_id\n",shellscript,tab +2,1161,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"3:06:24 PM [info] Activating crowd-code\n3:06:24 PM [info] Recording started\n3:06:24 PM [info] Initializing git provider using file system watchers...\n3:06:25 PM [info] Git repository found\n3:06:25 PM [info] Git provider initialized successfully\n3:06:25 PM [info] Initial git state: [object Object]\n",Log,tab +3,26377,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",0,0,"",shellscript,tab +4,31392,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1487,0,"",shellscript,selection_mouse +5,31433,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1486,0,"",shellscript,selection_command +6,31987,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1744,0,"",shellscript,selection_mouse +7,32002,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1743,0,"",shellscript,selection_command +8,32439,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1855,0,"",shellscript,selection_mouse +9,32453,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",1854,0,"",shellscript,selection_command +10,45064,"sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\ndataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only\nfirst_video = video_batch[:1]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +11,114104,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch",0,0,"",shellscript,tab +12,121068,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-h100.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun_og dynanmics 10m_dataset repro_mihir""\n\nnpy_records_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m""\n\n# TODO mihir: update the tokenizer and lam checkpoints\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/tokenizer/train_tokenizer_coinrun_og_reproduction/3466286""\nlam_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/lam/train_lam_coinrun_og_reproduction/3466287""\n\nCHECKPOINT_DIR=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --patch_size=16 \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --log_image_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --data_dir $npy_records_dir \\n --wandb_id $slurm_job_id\n",shellscript,tab +13,121836,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-h100 copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun_og dynanmics 10m_dataset repro_mihir""\n\nnpy_records_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m""\n\n# TODO mihir: update the tokenizer and lam checkpoints\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/tokenizer/train_tokenizer_coinrun_og_reproduction/3466286""\nlam_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/lam/train_lam_coinrun_og_reproduction/3466287""\n\nCHECKPOINT_DIR=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --patch_size=16 \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --log_image_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --data_dir $npy_records_dir \\n --wandb_id $slurm_job_id\n",shellscript,tab +14,132472,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun_og dynanmics 10m_dataset repro_mihir""\n\nnpy_records_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m""\n\n# TODO mihir: update the tokenizer and lam checkpoints\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/tokenizer/train_tokenizer_coinrun_og_reproduction/3466286""\nlam_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/lam/train_lam_coinrun_og_reproduction/3466287""\n\nCHECKPOINT_DIR=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --patch_size=16 \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --log_image_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --data_dir $npy_records_dir \\n --wandb_id $slurm_job_id\n",shellscript,tab +15,134029,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",526,0,"",shellscript,selection_mouse +16,134182,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",525,1,"\n",shellscript,selection_mouse +17,134183,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",494,32,"\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +18,134184,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",177,349,"#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +19,134185,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",525,1,"\n",shellscript,selection_command +20,134263,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",140,386,"#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +21,134266,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",119,407,"#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +22,134328,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",93,433,"#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +23,134367,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",67,459,"#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +24,134418,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",39,487,"#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +25,134441,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",21,505,"#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +26,134482,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",20,506,"\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +27,134564,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,526,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --partition=accelerated-h100\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/jafar_og_reproduction/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_coinrun_og_reproduction\n\n# Log the sbatch script\ncat $0\n",shellscript,selection_mouse +28,135908,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,527,"",shellscript,content +29,136537,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",30,0,"",shellscript,selection_command +30,136723,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",60,0,"",shellscript,selection_command +31,136866,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",86,0,"",shellscript,selection_command +32,138675,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",86,26,"",shellscript,content +33,139297,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",86,27,"",shellscript,content +34,139687,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",86,1,"",shellscript,content +35,140062,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",86,52,"",shellscript,content +36,140378,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",87,0,"",shellscript,selection_command +37,140861,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",195,0,"",shellscript,selection_command +38,141070,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",196,0,"",shellscript,selection_command +39,141206,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",251,0,"",shellscript,selection_command +40,141391,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",426,0,"",shellscript,selection_command +41,141527,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",583,0,"",shellscript,selection_command +42,141750,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",584,0,"",shellscript,selection_command +43,141946,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",583,0,"",shellscript,selection_command +44,142105,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",426,0,"",shellscript,selection_command +45,142298,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",251,0,"",shellscript,selection_command +46,142912,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",196,0,"",shellscript,selection_command +47,144098,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",251,0,"",shellscript,selection_command +48,144303,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",426,0,"",shellscript,selection_command +49,144472,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",583,0,"",shellscript,selection_command +50,144650,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",584,0,"",shellscript,selection_command +51,144982,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",734,0,"",shellscript,selection_command +52,147289,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",584,0,"",shellscript,selection_command +53,147485,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",583,0,"",shellscript,selection_command +54,147608,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",426,0,"",shellscript,selection_command +55,148296,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",582,0,"\n",shellscript,content +56,150043,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",583,0,"\n",shellscript,content +57,150288,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",584,0,"d",shellscript,content +58,150289,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",585,0,"",shellscript,selection_keyboard +59,150526,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",585,0,"y",shellscript,content +60,150526,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",586,0,"",shellscript,selection_keyboard +61,150592,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",586,0,"n",shellscript,content +62,150593,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",587,0,"",shellscript,selection_keyboard +63,150939,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",587,0,"_",shellscript,content +64,150940,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",588,0,"",shellscript,selection_keyboard +65,151201,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",588,0,"c",shellscript,content +66,151202,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",589,0,"",shellscript,selection_keyboard +67,151306,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",589,0,"k",shellscript,content +68,151307,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",590,0,"",shellscript,selection_keyboard +69,151515,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",590,0,"p",shellscript,content +70,151516,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",591,0,"",shellscript,selection_keyboard +71,151648,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",591,0,"t",shellscript,content +72,151649,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",592,0,"",shellscript,selection_keyboard +73,151860,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",592,0,"_",shellscript,content +74,151861,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",593,0,"",shellscript,selection_keyboard +75,152047,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",593,0,"d",shellscript,content +76,152048,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",594,0,"",shellscript,selection_keyboard +77,152275,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",594,0,"r",shellscript,content +78,152276,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",595,0,"",shellscript,selection_keyboard +79,152595,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",594,1,"",shellscript,content +80,152727,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",594,0,"i",shellscript,content +81,152728,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",595,0,"",shellscript,selection_keyboard +82,152856,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",595,0,"r",shellscript,content +83,152857,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",596,0,"",shellscript,selection_keyboard +84,153300,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",596,0,"=",shellscript,content +85,153301,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",597,0,"",shellscript,selection_keyboard +86,153591,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",597,0,"""",shellscript,content +87,153592,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",598,0,"",shellscript,selection_keyboard +88,154498,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",598,0,"""",shellscript,content +89,154499,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",599,0,"",shellscript,selection_keyboard +90,155082,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",598,0,"",shellscript,selection_command +91,155269,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",600,0,"",shellscript,selection_command +92,155678,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",615,0,"",shellscript,selection_command +93,158939,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",765,0,"",shellscript,selection_command +94,159460,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",776,0,"",shellscript,selection_command +95,159508,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",791,0,"",shellscript,selection_command +96,159541,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",794,0,"",shellscript,selection_command +97,159576,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"",shellscript,selection_command +98,159604,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",841,0,"",shellscript,selection_command +99,159642,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",874,0,"",shellscript,selection_command +100,159675,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",927,0,"",shellscript,selection_command +101,159714,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",949,0,"",shellscript,selection_command +102,159715,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",990,0,"",shellscript,selection_command +103,159736,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1018,0,"",shellscript,selection_command +104,159759,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1034,0,"",shellscript,selection_command +105,159818,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1077,0,"",shellscript,selection_command +106,159819,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1098,0,"",shellscript,selection_command +107,159858,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1124,0,"",shellscript,selection_command +108,160048,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1146,0,"",shellscript,selection_command +109,160519,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1124,0,"",shellscript,selection_command +110,161053,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1098,0,"",shellscript,selection_command +111,161086,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1077,0,"",shellscript,selection_command +112,161100,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1034,0,"",shellscript,selection_command +113,161145,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1018,0,"",shellscript,selection_command +114,161183,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",990,0,"",shellscript,selection_command +115,161207,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",949,0,"",shellscript,selection_command +116,161253,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",927,0,"",shellscript,selection_command +117,161279,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",874,0,"",shellscript,selection_command +118,161312,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",841,0,"",shellscript,selection_command +119,161345,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"",shellscript,selection_command +120,161346,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",794,0,"",shellscript,selection_command +121,161894,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"",shellscript,selection_command +122,162235,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",808,0,"",shellscript,selection_command +123,162461,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",807,0,"",shellscript,selection_command +124,162986,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",807,14,"",shellscript,content +125,163491,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",807,0,"s",shellscript,content +126,163494,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",808,0,"",shellscript,selection_keyboard +127,163681,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",808,0,"a",shellscript,content +128,163682,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"",shellscript,selection_keyboard +129,163729,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"m",shellscript,content +130,163730,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",810,0,"",shellscript,selection_keyboard +131,163921,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",810,0,"p",shellscript,content +132,163922,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",811,0,"",shellscript,selection_keyboard +133,163986,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",811,0,"l",shellscript,content +134,163987,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",812,0,"",shellscript,selection_keyboard +135,164067,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",812,0,"e",shellscript,content +136,164068,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",813,0,"",shellscript,selection_keyboard +137,164613,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",813,0,"h",shellscript,content +138,164614,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",814,0,"",shellscript,selection_keyboard +139,165128,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",813,1,"",shellscript,content +140,165267,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",812,0,"",shellscript,selection_command +141,165432,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",811,0,"",shellscript,selection_command +142,165844,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",810,0,"",shellscript,selection_command +143,165918,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",809,0,"",shellscript,selection_command +144,165923,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",808,0,"",shellscript,selection_command +145,165944,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",807,0,"",shellscript,selection_command +146,166026,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",806,0,"",shellscript,selection_command +147,166027,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",805,0,"",shellscript,selection_command +148,166065,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",804,0,"",shellscript,selection_command +149,166100,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",803,0,"",shellscript,selection_command +150,166121,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",802,0,"",shellscript,selection_command +151,166143,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",801,0,"",shellscript,selection_command +152,166179,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",800,0,"",shellscript,selection_command +153,166191,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",799,0,"",shellscript,selection_command +154,166229,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",798,0,"",shellscript,selection_command +155,166341,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",797,0,"",shellscript,selection_command +156,166466,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",796,0,"",shellscript,selection_command +157,166647,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",795,0,"",shellscript,selection_command +158,166888,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",795,5,"",shellscript,content +159,167113,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",795,1,"",shellscript,content +160,168517,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",795,0,"p",shellscript,content +161,168518,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",796,0,"",shellscript,selection_keyboard +162,168632,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",795,0,"",shellscript,selection_command +163,169118,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",814,0,"",shellscript,selection_command +164,169254,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",847,0,"",shellscript,selection_command +165,169655,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",814,0,"",shellscript,selection_command +166,172983,"sample.py",0,0,"",python,tab +167,175230,"sample.py",0,0,"",python,tab +168,180098,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,0,"",shellscript,tab +169,180099,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1006,0,"",shellscript,selection_mouse +170,180184,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",1005,0,"",shellscript,selection_command +171,180623,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",832,0,"",shellscript,selection_mouse +172,182730,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",831,0,"",shellscript,selection_command +173,183050,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",830,0,"",shellscript,selection_command +174,189074,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",829,0,"",shellscript,selection_command +175,189301,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",828,0,"",shellscript,selection_command +176,189498,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",827,0,"",shellscript,selection_command +177,189639,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",826,0,"",shellscript,selection_command +178,189779,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",825,0,"",shellscript,selection_command +179,189928,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",824,0,"",shellscript,selection_command +180,190112,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",823,0,"",shellscript,selection_command +181,190242,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",822,0,"",shellscript,selection_command +182,190401,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",821,0,"",shellscript,selection_command +183,190622,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",820,0,"",shellscript,selection_command +184,190833,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",820,9,"",shellscript,content +185,191214,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",820,0,"c",shellscript,content +186,191215,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",821,0,"",shellscript,selection_keyboard +187,191337,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",821,0,"h",shellscript,content +188,191338,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",822,0,"",shellscript,selection_keyboard +189,191593,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",822,0,"e",shellscript,content +190,191594,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",823,0,"",shellscript,selection_keyboard +191,191704,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",823,0,"k",shellscript,content +192,191705,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",824,0,"",shellscript,selection_keyboard +193,192297,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",823,1,"",shellscript,content +194,192388,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",823,0,"c",shellscript,content +195,192389,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",824,0,"",shellscript,selection_keyboard +196,192490,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",824,0,"k",shellscript,content +197,192491,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",825,0,"",shellscript,selection_keyboard +198,192723,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",825,0,"p",shellscript,content +199,192724,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",826,0,"",shellscript,selection_keyboard +200,192919,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",826,0,"o",shellscript,content +201,192919,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",827,0,"",shellscript,selection_keyboard +202,193118,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",827,0,"i",shellscript,content +203,193119,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",828,0,"",shellscript,selection_keyboard +204,193189,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",828,0,"n",shellscript,content +205,193190,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",829,0,"",shellscript,selection_keyboard +206,193272,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",829,0,"t",shellscript,content +207,193273,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",830,0,"",shellscript,selection_keyboard +208,193611,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",830,0," ",shellscript,content +209,193611,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",831,0,"",shellscript,selection_keyboard +210,196125,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",831,1,"",shellscript,content +211,196409,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",831,14,"",shellscript,content +212,197941,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",831,0,"$",shellscript,content +213,197942,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",832,0,"",shellscript,selection_keyboard +214,198423,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",832,0,"d",shellscript,content +215,198424,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",833,0,"",shellscript,selection_keyboard +216,198658,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",833,0,"y",shellscript,content +217,198659,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",834,0,"",shellscript,selection_keyboard +218,198731,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",834,0,"n",shellscript,content +219,198732,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",835,0,"",shellscript,selection_keyboard +220,199216,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",832,3,"dyn_ckpt_dir",shellscript,content +221,199987,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",843,0,"",shellscript,selection_command +222,200118,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",812,0,"",shellscript,selection_command +223,200307,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",794,0,"",shellscript,selection_command +224,200464,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",792,0,"",shellscript,selection_command +225,200583,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",776,0,"",shellscript,selection_command +226,200723,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",774,0,"",shellscript,selection_command +227,201141,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",630,0,"",shellscript,selection_command +228,201401,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",601,175,"",shellscript,content +229,202345,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",601,1,"",shellscript,content +230,203851,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",601,17,"",shellscript,content +231,204314,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",601,1,"",shellscript,content +232,204656,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",620,0,"",shellscript,selection_command +233,204835,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",653,0,"",shellscript,selection_command +234,206551,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",653,53,"",shellscript,content +235,206599,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",657,0,"",shellscript,selection_command +236,209296,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +237,210151,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,41,"",shellscript,content +238,210200,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +239,210689,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,32,"",shellscript,content +240,210745,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +241,211152,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,12,"",shellscript,content +242,211204,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +243,212244,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,43,"",shellscript,content +244,212293,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +245,213392,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,21,"",shellscript,content +246,213453,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +247,214097,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,26,"",shellscript,content +248,214188,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +249,214702,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,22,"",shellscript,content +250,214731,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",679,0,"",shellscript,selection_command +251,215869,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",713,0,"",shellscript,selection_command +252,216376,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",709,29,"",shellscript,content +253,216926,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,0,"",shellscript,selection_command +254,217100,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",653,0,"",shellscript,selection_command +255,217411,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",620,0,"",shellscript,selection_command +256,217568,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",601,0,"",shellscript,selection_command +257,217800,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",620,0,"",shellscript,selection_command +258,217930,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",653,0,"",shellscript,selection_command +259,218077,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",675,0,"",shellscript,selection_command +260,219507,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",708,0,"",shellscript,selection_command +261,219598,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",707,1,"",shellscript,content +262,220000,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",706,1,"",shellscript,content +263,220121,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",705,0,"",shellscript,selection_command +264,297473,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-h100.sbatch",0,0,"",shellscript,tab +265,308728,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +266,315700,"TERMINAL",0,0,"cd checkpoints/mihir/jafar_og_reproduction/dynamics/",,terminal_command +267,315988,"TERMINAL",0,0,"ls",,terminal_command +268,317773,"TERMINAL",0,0,"cd train_dynamics_coinrun_og_reproduction/",,terminal_command +269,318295,"TERMINAL",0,0,"l",,terminal_command +270,319555,"TERMINAL",0,0,"ls",,terminal_command +271,335100,"TERMINAL",0,0,"CD 3472617",,terminal_command +272,338412,"TERMINAL",0,0,"cd 3472617",,terminal_command +273,339785,"TERMINAL",0,0,"ls",,terminal_command +274,339826,"TERMINAL",0,0,"]633;C",,terminal_output +275,339964,"TERMINAL",0,0,"genie_1757238544_100000 genie_1757238544_125000 genie_1757238544_150000 genie_1757238544_175000 genie_1757238544_200000 genie_1757238544_25000 genie_1757238544_50000 genie_1757238544_75000\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/train_dynamics_coinrun_og_reproduction/3472617",,terminal_output +276,346764,"TERMINAL",0,0,"cd genie_1757238544_200000/",,terminal_command +277,347634,"TERMINAL",0,0,"pwd",,terminal_command +278,352599,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,0,"",shellscript,tab +279,355300,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",598,0,"",shellscript,selection_mouse +280,356455,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",598,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/jafar_og_reproduction/dynamics/train_dynamics_coinrun_og_reproduction/3472617/genie_1757238544_200000",shellscript,content +281,844805,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",0,0,"",shellscript,tab +282,969620,"slurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction-sample.sh",882,0,"",shellscript,selection_mouse +283,1631269,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nimport optax\nimport orbax\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n name: str = """"\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n wandb_id: str = """"\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""dropout_rng""]}\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n rng = jax.random.PRNGKey(args.seed)\n if args.log:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_resolution, args.image_resolution, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n init_params = restore_genie_components(\n init_params, args.tokenizer_checkpoint, args.lam_checkpoint\n )\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n # --- TRAIN LOOP ---\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n step = 0\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _mask_rng = jax.random.split(rng, 3)\n inputs = dict(\n videos=videos,\n action=jnp.zeros((args.batch_size, args.seq_len), dtype=jnp.float32),\n dropout_rng=_rng,\n mask_rng=_mask_rng,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[15])),\n recon=wandb.Image(np.asarray(recon_seq[15])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +284,1635449,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\n\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n name: str = """"\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""rng""]}\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n rng = jax.random.PRNGKey(args.seed)\n if args.log:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_resolution, args.image_resolution, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n init_params[""params""].update(\n PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # --- TRAIN LOOP ---\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n inputs = dict(videos=videos, rng=_rng)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(\n os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""\n ),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +285,1645976,"train_tokenizer.py",970,0,"",python,selection_mouse +286,1646085,"train_tokenizer.py",963,10,"num_blocks",python,selection_mouse +287,1659041,"train_tokenizer.py",4566,0,"",python,selection_mouse +288,1659164,"train_tokenizer.py",4566,0,"num_blocks",python,content +289,1659166,"train_tokenizer.py",4576,0,"",python,selection_keyboard +290,1660752,"train_tokenizer.py",4566,10,"",python,content +291,1662024,"train_tokenizer.py",3944,0,"",python,selection_mouse +292,1662117,"train_tokenizer.py",3944,0,"num_blocks",python,content +293,1662119,"train_tokenizer.py",3954,0,"",python,selection_keyboard +294,1664011,"train_tokenizer.py",3944,10,"",python,content +295,1670702,"train_tokenizer.py",3915,0,"",python,selection_mouse +296,1671454,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = nn.sigmoid(recon)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = nn.sigmoid(recon)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +297,1671455,"models/tokenizer.py",172,0,"",python,selection_command +298,1672870,"models/tokenizer.py",330,0,"",python,selection_mouse +299,1673052,"models/tokenizer.py",326,10,"num_blocks",python,selection_mouse +300,1712759,"train_tokenizer.py",0,0,"",python,tab +301,1719310,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +302,1730133,"train_dynamics.py",0,0,"",python,tab diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-062f5530-8023-470c-a429-b23967d943e31758624637167-2025_09_23-12.50.59.446/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-062f5530-8023-470c-a429-b23967d943e31758624637167-2025_09_23-12.50.59.446/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..f28ea1bc8746dbe0fe2cbbb91ba994f8cb77f294 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-062f5530-8023-470c-a429-b23967d943e31758624637167-2025_09_23-12.50.59.446/source.csv @@ -0,0 +1,2417 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +2,403,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:50:59 PM [info] Activating crowd-code\n12:50:59 PM [info] Recording started\n12:50:59 PM [info] Initializing git provider using file system watchers...\n12:50:59 PM [info] Git repository found\n12:50:59 PM [info] Git provider initialized successfully\n",Log,tab +3,624,"extension-output-pdoom-org.crowd-code-#1-crowd-code",250,0,"12:50:59 PM [info] Initial git state: [object Object]\n",Log,content +4,176177,"sample.py",0,0,"",python,tab +5,176180,"sample.py",6395,0,"",python,selection_mouse +6,176197,"sample.py",6394,0,"",python,selection_command +7,176848,"sample.py",6282,0,"",python,selection_mouse +8,176865,"sample.py",6281,0,"",python,selection_command +9,177406,"sample.py",6282,0,"",python,selection_mouse +10,177421,"sample.py",6281,0,"",python,selection_command +11,178024,"sample.py",6250,0,"",python,selection_mouse +12,178038,"sample.py",6249,0,"",python,selection_command +13,178712,"sample.py",6346,0,"",python,selection_mouse +14,178727,"sample.py",6345,0,"",python,selection_command +15,179280,"sample.py",6346,0,"",python,selection_mouse +16,179285,"sample.py",6345,0,"",python,selection_command +17,179887,"sample.py",6395,0,"",python,selection_mouse +18,179904,"sample.py",6394,0,"",python,selection_command +19,180515,"sample.py",6346,0,"",python,selection_mouse +20,180530,"sample.py",6345,0,"",python,selection_command +21,181156,"sample.py",6250,0,"",python,selection_mouse +22,181159,"sample.py",6249,0,"",python,selection_command +23,181816,"sample.py",6282,0,"",python,selection_mouse +24,181819,"sample.py",6281,0,"",python,selection_command +25,183750,"TERMINAL",0,0,"",,terminal_focus +26,185468,"TERMINAL",0,0,"bash",,terminal_focus +27,185836,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +28,187319,"TERMINAL",0,0,"queue",,terminal_command +29,187403,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 12:54:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512647 dev_accel interact tum_cte0 R52:57\t 1 hkn04033512651 large preproce tum_cte0 R50:21\t 1 hkn1901",,terminal_output +30,188473,"TERMINAL",0,0,"782",,terminal_output +31,189498,"TERMINAL",0,0,"893",,terminal_output +32,190623,"TERMINAL",0,0,"93:004",,terminal_output +33,191644,"TERMINAL",0,0,"1015",,terminal_output +34,192175,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +35,206109,"TERMINAL",0,0,"idling",,terminal_command +36,206240,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Tue Sep 23 12:54:25 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 293 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 3 nodes idle",,terminal_output +37,207214,"TERMINAL",0,0,"6",,terminal_output +38,207964,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +39,212576,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +40,212651,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3512804\r\nsalloc: job 3512804 queued and waiting for resources\r\n",,terminal_output +41,215649,"TERMINAL",0,0,"",,terminal_focus +42,216504,"TERMINAL",0,0,"salloc",,terminal_focus +43,217532,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +44,217564,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +45,219188,"TERMINAL",0,0,"bash",,terminal_focus +46,378843,"models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n if training:\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n else:\n mask = jnp.ones_like(video_tokens_BTN)\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +47,378845,"models/dynamics.py",2959,11,"expand_dims",python,selection_command +48,384591,"sample.py",0,0,"",python,tab +49,384592,"sample.py",7674,11,"expand_dims",python,selection_command +50,394519,"TERMINAL",0,0,"bash",,terminal_focus +51,396224,"TERMINAL",0,0,"git status",,terminal_command +52,396283,"TERMINAL",0,0,"]633;COn branch generate-minatar-breakout-dataset\r\n",,terminal_output +53,396346,"TERMINAL",0,0,"Your branch is ahead of 'origin/generate-minatar-breakout-dataset' by 10 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: input_pipeline/generate_breakout_dataset.py\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +54,402971,"TERMINAL",0,0,"git stash",,terminal_command +55,403021,"TERMINAL",0,0,"]633;C",,terminal_output +56,403195,"TERMINAL",0,0,"Saved working directory and index state WIP on generate-minatar-breakout-dataset: 0d69ea3 Merge branch 'gt-actions' into generate-minatar-breakout-dataset\r\n",,terminal_output +57,403307,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +58,403753,"sample.py",7641,75," action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, 1:], -1)\n",python,content +59,407066,"TERMINAL",0,0,"git checkout gt-actions",,terminal_command +60,407115,"TERMINAL",0,0,"]633;C",,terminal_output +61,407144,"TERMINAL",0,0,"Switched to branch 'gt-actions'\r\nYour branch is up to date with 'origin/gt-actions'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +62,407389,"sample.py",7641,74," action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\n",python,content +63,409250,"sample.py",8117,0,"",python,selection_mouse +64,409266,"sample.py",8116,0,"",python,selection_command +65,410013,"sample.py",7786,0,"",python,selection_mouse +66,410031,"sample.py",7785,0,"",python,selection_command +67,410362,"sample.py",0,0,"Switched from branch 'generate-minatar-breakout-dataset' to 'gt-actions'",python,git_branch_checkout +68,412299,"sample.py",7815,0,"",python,selection_mouse +69,412300,"sample.py",7814,0,"",python,selection_command +70,412850,"sample.py",7751,0,"",python,selection_mouse +71,413525,"sample.py",7680,0,"",python,selection_mouse +72,414090,"sample.py",7669,0,"",python,selection_mouse +73,414388,"sample.py",7667,2," =",python,selection_mouse +74,414472,"sample.py",7668,1,"=",python,selection_mouse +75,414473,"sample.py",7640,29,"\n action_batch_BSm11 =",python,selection_mouse +76,414957,"sample.py",7640,0,"",python,selection_mouse +77,414986,"sample.py",7639,0,"",python,selection_command +78,415662,"sample.py",7754,0,"",python,selection_mouse +79,415663,"sample.py",7753,0,"",python,selection_command +80,415959,"sample.py",7753,1,":",python,selection_mouse +81,415962,"sample.py",7754,0,"",python,selection_command +82,416186,"sample.py",7686,68,"batch[""actions""][:, :-1], -1)\n for t, img in enumerate(imgs[1:]):",python,selection_mouse +83,416621,"sample.py",7686,0,"",python,selection_mouse +84,417174,"sample.py",7667,0,"",python,selection_mouse +85,417703,"sample.py",7670,0,"",python,selection_mouse +86,419875,"TERMINAL",0,0,"salloc",,terminal_focus +87,421160,"TERMINAL",0,0,"bash",,terminal_focus +88,422731,"TERMINAL",0,0,"salloc",,terminal_focus +89,424942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +90,427159,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",1969,0,"",python,selection_command +91,430940,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5956,0,"",python,selection_mouse +92,434165,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5984,0,"",python,selection_mouse +93,434341,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5969,24,"action_embeddings_BTm11L",python,selection_mouse +94,435035,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5990,0,"",python,selection_mouse +95,436026,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5983,0,"",python,selection_mouse +96,436509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5966,0,"",python,selection_mouse +97,436911,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5982,0,"",python,selection_mouse +98,437624,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5993,0,"",python,selection_mouse +99,438318,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5956,0,"",python,selection_mouse +100,438723,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5987,0,"",python,selection_mouse +101,440219,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5863,0,"",python,selection_command +102,441101,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",5987,0,"",python,selection_mouse +103,443067,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +104,445401,"sample.py",0,0,"",python,tab +105,472322,"sample.py",7587,0,"",python,selection_mouse +106,472447,"sample.py",7587,1," ",python,selection_mouse +107,472538,"sample.py",7587,2," j",python,selection_mouse +108,472571,"sample.py",7587,3," jn",python,selection_mouse +109,472572,"sample.py",7587,4," jnp",python,selection_mouse +110,472582,"sample.py",7587,5," jnp.",python,selection_mouse +111,472658,"sample.py",7587,6," jnp.r",python,selection_mouse +112,472720,"sample.py",7587,7," jnp.re",python,selection_mouse +113,472824,"sample.py",7587,8," jnp.res",python,selection_mouse +114,473024,"sample.py",7595,0,"",python,selection_mouse +115,473687,"sample.py",7598,0,"",python,selection_mouse +116,473929,"sample.py",7597,1,"p",python,selection_mouse +117,473930,"sample.py",7596,2,"ap",python,selection_mouse +118,473930,"sample.py",7595,3,"hap",python,selection_mouse +119,473930,"sample.py",7594,4,"shap",python,selection_mouse +120,473977,"sample.py",7593,5,"eshap",python,selection_mouse +121,474067,"sample.py",7592,6,"reshap",python,selection_mouse +122,474104,"sample.py",7591,7,".reshap",python,selection_mouse +123,474140,"sample.py",7590,8,"p.reshap",python,selection_mouse +124,474199,"sample.py",7589,9,"np.reshap",python,selection_mouse +125,474322,"sample.py",7588,10,"jnp.reshap",python,selection_mouse +126,475680,"sample.py",7600,0,"",python,selection_mouse +127,477306,"sample.py",7591,0,"",python,selection_mouse +128,478426,"sample.py",7587,0,"",python,selection_mouse +129,480445,"sample.py",7639,0,"",python,selection_command +130,480692,"sample.py",7669,0,"",python,selection_command +131,481193,"sample.py",7639,0,"",python,selection_command +132,481546,"sample.py",7640,0,"\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))",python,content +133,481580,"sample.py",7649,0,"",python,selection_command +134,482629,"sample.py",7721,0,"",python,selection_command +135,483575,"sample.py",7722,0,"",python,selection_command +136,484055,"sample.py",7723,0,"",python,selection_command +137,484114,"sample.py",7724,0,"",python,selection_command +138,484115,"sample.py",7725,0,"",python,selection_command +139,484134,"sample.py",7726,0,"",python,selection_command +140,484179,"sample.py",7727,0,"",python,selection_command +141,484276,"sample.py",7728,0,"",python,selection_command +142,484276,"sample.py",7729,0,"",python,selection_command +143,484277,"sample.py",7730,0,"",python,selection_command +144,484279,"sample.py",7731,0,"",python,selection_command +145,484322,"sample.py",7732,0,"",python,selection_command +146,484349,"sample.py",7733,0,"",python,selection_command +147,484376,"sample.py",7734,0,"",python,selection_command +148,484444,"sample.py",7735,0,"",python,selection_command +149,484445,"sample.py",7736,0,"",python,selection_command +150,484477,"sample.py",7737,0,"",python,selection_command +151,484534,"sample.py",7738,0,"",python,selection_command +152,484535,"sample.py",7739,0,"",python,selection_command +153,484561,"sample.py",7740,0,"",python,selection_command +154,484598,"sample.py",7741,0,"",python,selection_command +155,484674,"sample.py",7742,0,"",python,selection_command +156,484675,"sample.py",7743,0,"",python,selection_command +157,484682,"sample.py",7744,0,"",python,selection_command +158,484724,"sample.py",7745,0,"",python,selection_command +159,484790,"sample.py",7746,0,"",python,selection_command +160,484790,"sample.py",7747,0,"",python,selection_command +161,484792,"sample.py",7748,0,"",python,selection_command +162,484826,"sample.py",7749,0,"",python,selection_command +163,484894,"sample.py",7750,0,"",python,selection_command +164,484894,"sample.py",7751,0,"",python,selection_command +165,484982,"sample.py",7752,0,"",python,selection_command +166,484983,"sample.py",7753,0,"",python,selection_command +167,484983,"sample.py",7754,0,"",python,selection_command +168,485014,"sample.py",7755,0,"",python,selection_command +169,485043,"sample.py",7756,0,"",python,selection_command +170,485058,"sample.py",7757,0,"",python,selection_command +171,485097,"sample.py",7758,0,"",python,selection_command +172,485343,"sample.py",7759,0,"",python,selection_command +173,485579,"sample.py",7760,0,"",python,selection_command +174,486472,"sample.py",7759,0,"",python,selection_command +175,486594,"sample.py",7758,0,"",python,selection_command +176,487197,"sample.py",7759,0,"",python,selection_command +177,489146,"sample.py",7758,0,"",python,selection_command +178,489404,"sample.py",7758,1,"b",python,selection_command +179,489591,"sample.py",7758,2,"ba",python,selection_command +180,490033,"sample.py",7758,3,"bat",python,selection_command +181,490147,"sample.py",7758,4,"batc",python,selection_command +182,490148,"sample.py",7758,5,"batch",python,selection_command +183,490148,"sample.py",7758,6,"batch[",python,selection_command +184,490241,"sample.py",7758,7,"batch[""",python,selection_command +185,490242,"sample.py",7758,8,"batch[""a",python,selection_command +186,490243,"sample.py",7758,9,"batch[""ac",python,selection_command +187,490322,"sample.py",7758,10,"batch[""act",python,selection_command +188,490323,"sample.py",7758,11,"batch[""acti",python,selection_command +189,490324,"sample.py",7758,12,"batch[""actio",python,selection_command +190,490350,"sample.py",7758,13,"batch[""action",python,selection_command +191,490403,"sample.py",7758,14,"batch[""actions",python,selection_command +192,490416,"sample.py",7758,15,"batch[""actions""",python,selection_command +193,490520,"sample.py",7758,16,"batch[""actions""]",python,selection_command +194,490522,"sample.py",7758,17,"batch[""actions""][",python,selection_command +195,490532,"sample.py",7758,18,"batch[""actions""][:",python,selection_command +196,490533,"sample.py",7758,19,"batch[""actions""][:,",python,selection_command +197,490564,"sample.py",7758,20,"batch[""actions""][:, ",python,selection_command +198,490592,"sample.py",7758,21,"batch[""actions""][:, :",python,selection_command +199,490708,"sample.py",7758,22,"batch[""actions""][:, :-",python,selection_command +200,490895,"sample.py",7758,23,"batch[""actions""][:, :-1",python,selection_command +201,491281,"sample.py",7758,24,"batch[""actions""][:, :-1]",python,selection_command +202,491585,"sample.py",7758,24,"",python,content +203,491677,"sample.py",7686,0,"",python,selection_command +204,492165,"sample.py",7685,0,"",python,selection_command +205,492319,"sample.py",7684,0,"",python,selection_command +206,492485,"sample.py",7683,0,"",python,selection_command +207,492632,"sample.py",7682,0,"",python,selection_command +208,492775,"sample.py",7681,0,"",python,selection_command +209,493965,"sample.py",7682,0,"batch[""actions""][:, :-1]",python,content +210,493990,"sample.py",7705,0,"",python,selection_command +211,494824,"sample.py",7706,0,"",python,selection_command +212,495139,"sample.py",7706,14,"",python,content +213,496501,"sample.py",7772,0,"",python,selection_command +214,497863,"sample.py",7723,51,"",python,content +215,497919,"sample.py",7727,0,"",python,selection_command +216,537108,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=4 \\n --no-use-gt-actions \\n --start_frame=1 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit",shellscript,tab +217,541843,"TERMINAL",0,0,"bash",,terminal_focus +218,543654,"TERMINAL",0,0,"queue",,terminal_command +219,543734,"TERMINAL",0,0,"]633;C",,terminal_output +220,543907,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:00:03 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512647 dev_accel interact tum_cte0 R58:54\t 1 hkn04033512651 large preproce tum_cte0 R56:18\t 1 hkn1901",,terminal_output +221,545037,"TERMINAL",0,0,"459",,terminal_output +222,545412,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +223,548124,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +224,548190,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3512808\r\nsalloc: job 3512808 queued and waiting for resources\r\n",,terminal_output +225,549694,"TERMINAL",0,0,"bash",,terminal_focus +226,551208,"TERMINAL",0,0,"queue",,terminal_command +227,551279,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:00:10 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512808 dev_accel interact tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3512647 dev_accel interact tum_cte0 R59:01\t 1 hkn04033512651 large preproce tum_cte0 R56:25\t 1 hkn1901",,terminal_output +228,552383,"TERMINAL",0,0,"126",,terminal_output +229,553547,"TERMINAL",0,0,"237",,terminal_output +230,554452,"TERMINAL",0,0,"348",,terminal_output +231,555460,"TERMINAL",0,0,"459",,terminal_output +232,556500,"TERMINAL",0,0,"5630",,terminal_output +233,557627,"TERMINAL",0,0,"671",,terminal_output +234,558652,"TERMINAL",0,0,"782",,terminal_output +235,559298,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +236,566702,"TERMINAL",0,0,"scancel 3512647",,terminal_command +237,566743,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +238,571242,"TERMINAL",0,0,"queue",,terminal_command +239,571305,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:00:30 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512647 dev_accel interact tum_cte0 CG59:17\t 1 hkn04033512808 dev_accel interact tum_cte0 PD\t0:00\t 1 (None)3512651 large preproce tum_cte0 R56:45\t 1 hkn1901",,terminal_output +240,572380,"TERMINAL",0,0,"16",,terminal_output +241,573392,"TERMINAL",0,0,"27",,terminal_output +242,574437,"TERMINAL",0,0,"38",,terminal_output +243,575547,"TERMINAL",0,0,"49",,terminal_output +244,576570,"TERMINAL",0,0,"550",,terminal_output +245,577594,"TERMINAL",0,0,"61",,terminal_output +246,578614,"TERMINAL",0,0,"72",,terminal_output +247,579674,"TERMINAL",0,0,"84",,terminal_output +248,580709,"TERMINAL",0,0,"405",,terminal_output +249,581792,"TERMINAL",0,0,"16",,terminal_output +250,582822,"TERMINAL",0,0,"27",,terminal_output +251,583943,"TERMINAL",0,0,"38",,terminal_output +252,585014,"TERMINAL",0,0,"49",,terminal_output +253,585993,"TERMINAL",0,0,"57:00",,terminal_output +254,587017,"TERMINAL",0,0,"61",,terminal_output +255,588031,"TERMINAL",0,0,"72",,terminal_output +256,589167,"TERMINAL",0,0,"83",,terminal_output +257,590126,"TERMINAL",0,0,"94",,terminal_output +258,590703,"TERMINAL",0,0,"salloc: job 3512808 has been allocated resources\r\nsalloc: Granted job allocation 3512808\r\n",,terminal_output +259,590756,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +260,591215,"TERMINAL",0,0,"50 Rhkn04015",,terminal_output +261,592241,"TERMINAL",0,0,"116",,terminal_output +262,592786,"TERMINAL",0,0,"salloc",,terminal_focus +263,593266,"TERMINAL",0,0,"227",,terminal_output +264,594039,"TERMINAL",0,0,"so",,terminal_output +265,594136,"TERMINAL",0,0,"u",,terminal_output +266,594235,"TERMINAL",0,0,"r",,terminal_output +267,594312,"TERMINAL",0,0,"338",,terminal_output +268,594439,"TERMINAL",0,0,"c",,terminal_output +269,594505,"TERMINAL",0,0,"e",,terminal_output +270,594571,"TERMINAL",0,0," ",,terminal_output +271,594779,"TERMINAL",0,0,".",,terminal_output +272,594845,"TERMINAL",0,0,"v",,terminal_output +273,595084,"TERMINAL",0,0,"e",,terminal_output +274,595151,"TERMINAL",0,0,"b",,terminal_output +275,595350,"TERMINAL",0,0,"449",,terminal_output +276,595758,"TERMINAL",0,0," ",,terminal_output +277,595924,"TERMINAL",0,0,"n",,terminal_output +278,596133,"TERMINAL",0,0,"e",,terminal_output +279,596409,"TERMINAL",0,0," ",,terminal_output +280,596421,"TERMINAL",0,0,"5510",,terminal_output +281,596476,"TERMINAL",0,0,"v",,terminal_output +282,597146,"TERMINAL",0,0,"/",,terminal_output +283,597372,"TERMINAL",0,0,"b",,terminal_output +284,597458,"TERMINAL",0,0,"661",,terminal_output +285,597583,"TERMINAL",0,0,"i",,terminal_output +286,597647,"TERMINAL",0,0,"n",,terminal_output +287,598317,"TERMINAL",0,0,"/",,terminal_output +288,598445,"TERMINAL",0,0,"a",,terminal_output +289,598517,"TERMINAL",0,0,"772",,terminal_output +290,598587,"TERMINAL",0,0,"c",,terminal_output +291,598731,"TERMINAL",0,0,"t",,terminal_output +292,599247,"TERMINAL",0,0,"i",,terminal_output +293,599598,"TERMINAL",0,0,"883",,terminal_output +294,599661,"TERMINAL",0,0,"v",,terminal_output +295,599846,"TERMINAL",0,0,"a",,terminal_output +296,599919,"TERMINAL",0,0,"t",,terminal_output +297,599984,"TERMINAL",0,0,"e",,terminal_output +298,600196,"TERMINAL",0,0,"\r\n",,terminal_output +299,600635,"TERMINAL",0,0,"994",,terminal_output +300,601660,"TERMINAL",0,0,"1:00105",,terminal_output +301,602683,"TERMINAL",0,0,"127",,terminal_output +302,603812,"TERMINAL",0,0,"338",,terminal_output +303,604760,"TERMINAL",0,0,"449",,terminal_output +304,605811,"TERMINAL",0,0,"5520",,terminal_output +305,606881,"TERMINAL",0,0,"661",,terminal_output +306,607896,"TERMINAL",0,0,"\r7808 R 01651 large preproce57:2219",,terminal_output +307,609029,"TERMINAL",0,0,"883",,terminal_output +308,610066,"TERMINAL",0,0,"994",,terminal_output +309,611083,"TERMINAL",0,0,"10205",,terminal_output +310,612076,"TERMINAL",0,0,"116",,terminal_output +311,613232,"TERMINAL",0,0,"227",,terminal_output +312,614233,"TERMINAL",0,0,"338",,terminal_output +313,615295,"TERMINAL",0,0,"449",,terminal_output +314,616260,"TERMINAL",0,0,"5530",,terminal_output +315,617339,"TERMINAL",0,0,"661",,terminal_output +316,617839,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +317,617962,"TERMINAL",0,0,"source .venv/bin/activate\r\n",,terminal_output +318,618358,"TERMINAL",0,0,"772",,terminal_output +319,618682,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ source .venv/bin/activate\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +320,619404,"TERMINAL",0,0,"883",,terminal_output +321,620452,"TERMINAL",0,0,"994",,terminal_output +322,621527,"TERMINAL",0,0,"20305",,terminal_output +323,622652,"TERMINAL",0,0,"116",,terminal_output +324,623676,"TERMINAL",0,0,"227",,terminal_output +325,624659,"TERMINAL",0,0,"349",,terminal_output +326,625686,"TERMINAL",0,0,"5540",,terminal_output +327,626746,"TERMINAL",0,0,"661",,terminal_output +328,627878,"TERMINAL",0,0,"772",,terminal_output +329,628898,"TERMINAL",0,0,"883",,terminal_output +330,629922,"TERMINAL",0,0,"994",,terminal_output +331,630944,"TERMINAL",0,0,"30405",,terminal_output +332,631969,"TERMINAL",0,0,"116",,terminal_output +333,633055,"TERMINAL",0,0,"227",,terminal_output +334,634121,"TERMINAL",0,0,"338",,terminal_output +335,635148,"TERMINAL",0,0,"449",,terminal_output +336,636147,"TERMINAL",0,0,"5550",,terminal_output +337,637195,"TERMINAL",0,0,"661",,terminal_output +338,638334,"TERMINAL",0,0,"772",,terminal_output +339,639341,"TERMINAL",0,0,"883",,terminal_output +340,640368,"TERMINAL",0,0,"994",,terminal_output +341,641397,"TERMINAL",0,0,"40505",,terminal_output +342,642440,"TERMINAL",0,0,"116",,terminal_output +343,643487,"TERMINAL",0,0,"227",,terminal_output +344,644584,"TERMINAL",0,0,"338",,terminal_output +345,645608,"TERMINAL",0,0,"449",,terminal_output +346,646788,"TERMINAL",0,0,"558:00",,terminal_output +347,647654,"TERMINAL",0,0,"672",,terminal_output +348,648763,"TERMINAL",0,0,"883",,terminal_output +349,649741,"TERMINAL",0,0,"994",,terminal_output +350,650810,"TERMINAL",0,0,"501:005",,terminal_output +351,651835,"TERMINAL",0,0,"116",,terminal_output +352,652890,"TERMINAL",0,0,"227",,terminal_output +353,653931,"TERMINAL",0,0,"338",,terminal_output +354,654818,"TERMINAL",0,0,"s",,terminal_output +355,654887,"TERMINAL",0,0,"h",,terminal_output +356,654998,"TERMINAL",0,0," ",,terminal_output +357,654999,"TERMINAL",0,0,"449",,terminal_output +358,655892,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +359,656023,"TERMINAL",0,0,"5510",,terminal_output +360,656301,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch\r\n[?2004l\r",,terminal_output +361,656413,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\r\n",,terminal_output +362,656525,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +363,657160,"TERMINAL",0,0,"661",,terminal_output +364,658187,"TERMINAL",0,0,"772",,terminal_output +365,659212,"TERMINAL",0,0,"883",,terminal_output +366,660232,"TERMINAL",0,0,"994",,terminal_output +367,661360,"TERMINAL",0,0,"2:00105",,terminal_output +368,662386,"TERMINAL",0,0,"116",,terminal_output +369,663409,"TERMINAL",0,0,"227",,terminal_output +370,664415,"TERMINAL",0,0,"338",,terminal_output +371,665461,"TERMINAL",0,0,"449",,terminal_output +372,666507,"TERMINAL",0,0,"5520",,terminal_output +373,667557,"TERMINAL",0,0,"661",,terminal_output +374,668631,"TERMINAL",0,0,"772",,terminal_output +375,669654,"TERMINAL",0,0,"894",,terminal_output +376,670781,"TERMINAL",0,0,"10205",,terminal_output +377,671803,"TERMINAL",0,0,"116",,terminal_output +378,672879,"TERMINAL",0,0,"227",,terminal_output +379,672893,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 151, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1666, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 945, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'model': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}, 'drop': {'rngs': {'default': {'count': {'value': ShapeDtypeStruct(shape=(), dtype=uint32)}, 'key': {'value': ShapeDtypeStruct(shape=(), dtype=key)}}}}}}, rhs=None)}, 'opt_state': {'0': {'mu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=bfloat16)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=bfloat16)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=bfloat16)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=bfloat16)}}}, rhs=None)}, 'nu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}}}, rhs=None)}}}}\r\n",,terminal_output +380,673389,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +381,673851,"TERMINAL",0,0,"338",,terminal_output +382,674978,"TERMINAL",0,0,"449",,terminal_output +383,676002,"TERMINAL",0,0,"5530",,terminal_output +384,677037,"TERMINAL",0,0,"661",,terminal_output +385,678031,"TERMINAL",0,0,"772",,terminal_output +386,679185,"TERMINAL",0,0,"883",,terminal_output +387,680207,"TERMINAL",0,0,"994",,terminal_output +388,681235,"TERMINAL",0,0,"20305",,terminal_output +389,682211,"TERMINAL",0,0,"116",,terminal_output +390,683279,"TERMINAL",0,0,"227",,terminal_output +391,684310,"TERMINAL",0,0,"338",,terminal_output +392,685364,"TERMINAL",0,0,"449",,terminal_output +393,686401,"TERMINAL",0,0,"5540",,terminal_output +394,687444,"TERMINAL",0,0,"661",,terminal_output +395,688500,"TERMINAL",0,0,"772",,terminal_output +396,690749,"TERMINAL",0,0,"8405",,terminal_output +397,691772,"TERMINAL",0,0,"3116",,terminal_output +398,692801,"TERMINAL",0,0,"227",,terminal_output +399,693825,"TERMINAL",0,0,"338",,terminal_output +400,694845,"TERMINAL",0,0,"449",,terminal_output +401,695866,"TERMINAL",0,0,"5550",,terminal_output +402,696994,"TERMINAL",0,0,"661",,terminal_output +403,697947,"TERMINAL",0,0,"772",,terminal_output +404,699042,"TERMINAL",0,0,"883",,terminal_output +405,700038,"TERMINAL",0,0,"994",,terminal_output +406,701030,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",0,0,"",shellscript,tab +407,701032,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",1063,0,"",shellscript,selection_mouse +408,701221,"TERMINAL",0,0,"40505",,terminal_output +409,702187,"TERMINAL",0,0,"116",,terminal_output +410,702932,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",1054,26,"",shellscript,content +411,702966,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",1058,0,"",shellscript,selection_command +412,703194,"TERMINAL",0,0,"227",,terminal_output +413,704239,"TERMINAL",0,0,"338",,terminal_output +414,704528,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +415,704865,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +416,704976,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\r\n",,terminal_output +417,705095,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +418,705268,"TERMINAL",0,0,"449",,terminal_output +419,706415,"TERMINAL",0,0,"559:00",,terminal_output +420,707366,"TERMINAL",0,0,"661",,terminal_output +421,708417,"TERMINAL",0,0,"772",,terminal_output +422,709465,"TERMINAL",0,0,"883",,terminal_output +423,710616,"TERMINAL",0,0,"994",,terminal_output +424,711638,"TERMINAL",0,0,"502:005",,terminal_output +425,712689,"TERMINAL",0,0,"116",,terminal_output +426,713713,"TERMINAL",0,0,"238",,terminal_output +427,714695,"TERMINAL",0,0,"449",,terminal_output +428,714903,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 151, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1666, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 945, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'model': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}, 'drop': {'rngs': {'default': {'count': {'value': ShapeDtypeStruct(shape=(), dtype=uint32)}, 'key': {'value': ShapeDtypeStruct(shape=(), dtype=key)}}}}}}, rhs=None)}, 'opt_state': {'0': {'mu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=bfloat16)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=bfloat16)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=bfloat16)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=bfloat16)}}}, rhs=None)}, 'nu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}}}, rhs=None)}}}}\r\n",,terminal_output +429,715440,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +430,715741,"TERMINAL",0,0,"5510",,terminal_output +431,716860,"TERMINAL",0,0,"661",,terminal_output +432,717880,"TERMINAL",0,0,"772",,terminal_output +433,718911,"TERMINAL",0,0,"883",,terminal_output +434,720034,"TERMINAL",0,0,"994",,terminal_output +435,721062,"TERMINAL",0,0,"3:00105",,terminal_output +436,722086,"TERMINAL",0,0,"116",,terminal_output +437,723088,"TERMINAL",0,0,"227",,terminal_output +438,724183,"TERMINAL",0,0,"338",,terminal_output +439,725257,"TERMINAL",0,0,"449",,terminal_output +440,726281,"TERMINAL",0,0,"5520",,terminal_output +441,727318,"TERMINAL",0,0,"661",,terminal_output +442,728338,"TERMINAL",0,0,"772",,terminal_output +443,729352,"TERMINAL",0,0,"883",,terminal_output +444,730378,"TERMINAL",0,0,"994",,terminal_output +445,731415,"TERMINAL",0,0,"10205",,terminal_output +446,732458,"TERMINAL",0,0,"116",,terminal_output +447,733501,"TERMINAL",0,0,"227",,terminal_output +448,734536,"TERMINAL",0,0,"338",,terminal_output +449,735722,"TERMINAL",0,0,"449",,terminal_output +450,736624,"TERMINAL",0,0,"5530",,terminal_output +451,737315,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +452,737750,"TERMINAL",0,0,"672",,terminal_output +453,738466,"TERMINAL",0,0,"\r",,terminal_output +454,738776,"TERMINAL",0,0,"883",,terminal_output +455,739763,"TERMINAL",0,0,"994",,terminal_output +456,740874,"TERMINAL",0,0,"20305",,terminal_output +457,741948,"TERMINAL",0,0,"116",,terminal_output +458,743083,"TERMINAL",0,0,"227",,terminal_output +459,744104,"TERMINAL",0,0,"338",,terminal_output +460,745027,"TERMINAL",0,0,"449",,terminal_output +461,746047,"TERMINAL",0,0,"5540",,terminal_output +462,746856,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +463,747088,"TERMINAL",0,0,"661",,terminal_output +464,748198,"TERMINAL",0,0,"772",,terminal_output +465,749221,"TERMINAL",0,0,"883",,terminal_output +466,750246,"TERMINAL",0,0,"994",,terminal_output +467,751379,"TERMINAL",0,0,"30405",,terminal_output +468,752434,"TERMINAL",0,0,"116",,terminal_output +469,753419,"TERMINAL",0,0,"227",,terminal_output +470,754430,"TERMINAL",0,0,"338",,terminal_output +471,754487,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:03:33 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512808 dev_accel interact tum_cte0 R\t2:43\t 1 hkn04013512651 large preproce tum_cte0 R59:48\t 1 hkn1901",,terminal_output +472,754542,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ sh slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +473,755766,"TERMINAL",0,0,"449",,terminal_output +474,756596,"TERMINAL",0,0,"5550",,terminal_output +475,757665,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:03:36 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512808 dev_accel interact tum_cte0 R\t2:46\t 1 hkn04013512651 large preproce tum_cte0 R59:51\t 1 hkn1901",,terminal_output +476,757686,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ sh slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",,terminal_output +477,758676,"TERMINAL",0,0,"783",,terminal_output +478,759696,"TERMINAL",0,0,"994",,terminal_output +479,760789,"TERMINAL",0,0,"40505",,terminal_output +480,761814,"TERMINAL",0,0,"116",,terminal_output +481,762842,"TERMINAL",0,0,"227",,terminal_output +482,763963,"TERMINAL",0,0,"338",,terminal_output +483,765606,"TERMINAL",0,0,"449",,terminal_output +484,766583,"TERMINAL",0,0,"551:00:00",,terminal_output +485,767650,"TERMINAL",0,0,"661",,terminal_output +486,768779,"TERMINAL",0,0,"783",,terminal_output +487,769713,"TERMINAL",0,0,"994",,terminal_output +488,770823,"TERMINAL",0,0,"503:005",,terminal_output +489,771849,"TERMINAL",0,0,"116",,terminal_output +490,773031,"TERMINAL",0,0,"227",,terminal_output +491,774078,"TERMINAL",0,0,"338",,terminal_output +492,775229,"TERMINAL",0,0,"449",,terminal_output +493,776168,"TERMINAL",0,0,"5510",,terminal_output +494,777452,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",0,0,"",shellscript,tab +495,777453,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",1032,0,"",shellscript,selection_mouse +496,777498,"slurm/jobs/mihir/horeka/coinrun/sample_maskgit.sbatch",1031,0,"",shellscript,selection_command +497,777499,"TERMINAL",0,0,"661",,terminal_output +498,778296,"TERMINAL",0,0,"772",,terminal_output +499,779325,"TERMINAL",0,0,"883",,terminal_output +500,780353,"TERMINAL",0,0,"994",,terminal_output +501,781396,"TERMINAL",0,0,"4:00105",,terminal_output +502,782488,"TERMINAL",0,0,"116",,terminal_output +503,783490,"TERMINAL",0,0,"227",,terminal_output +504,784091,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=4 \\n --start_frame=1 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit",shellscript,tab +505,784531,"TERMINAL",0,0,"338",,terminal_output +506,785664,"TERMINAL",0,0,"449",,terminal_output +507,786550,"TERMINAL",0,0,"watch",,terminal_focus +508,786614,"TERMINAL",0,0,"5520",,terminal_output +509,787680,"TERMINAL",0,0,"672",,terminal_output +510,788113,"TERMINAL",0,0,"srun",,terminal_focus +511,788707,"TERMINAL",0,0,"883",,terminal_output +512,788912,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +513,789751,"TERMINAL",0,0,"994",,terminal_output +514,789848,"TERMINAL",0,0,"g",,terminal_output +515,790792,"TERMINAL",0,0,"10205",,terminal_output +516,790805,"TERMINAL",0,0,"it",,terminal_output +517,790969,"TERMINAL",0,0," ",,terminal_output +518,791142,"TERMINAL",0,0,"s",,terminal_output +519,791834,"TERMINAL",0,0,"ta",,terminal_output +520,791860,"TERMINAL",0,0,"116",,terminal_output +521,791981,"TERMINAL",0,0,"t",,terminal_output +522,792127,"TERMINAL",0,0,"u",,terminal_output +523,792283,"TERMINAL",0,0,"s",,terminal_output +524,792359,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +525,792559,"TERMINAL",0,0,"On branch gt-actions\r\nYour branch is up to date with 'origin/gt-actions'.\r\n\r\n",,terminal_output +526,792683,"TERMINAL",0,0,"Last commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: sample.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +527,792890,"TERMINAL",0,0,"227",,terminal_output +528,793934,"TERMINAL",0,0,"338",,terminal_output +529,793934,"TERMINAL",0,0,"gi",,terminal_output +530,794084,"TERMINAL",0,0,"t",,terminal_output +531,794212,"TERMINAL",0,0," ",,terminal_output +532,794689,"TERMINAL",0,0,"co",,terminal_output +533,794992,"TERMINAL",0,0,"449",,terminal_output +534,795558,"TERMINAL",0,0,"m",,terminal_output +535,795701,"TERMINAL",0,0,"m",,terminal_output +536,795828,"TERMINAL",0,0,"i",,terminal_output +537,795892,"TERMINAL",0,0,"t",,terminal_output +538,795960,"TERMINAL",0,0," ",,terminal_output +539,796024,"TERMINAL",0,0,"5530",,terminal_output +540,796251,"TERMINAL",0,0,"a",,terminal_output +541,796511,"TERMINAL",0,0,"",,terminal_output +542,796875,"TERMINAL",0,0,"a",,terminal_output +543,797086,"TERMINAL",0,0,"661",,terminal_output +544,797202,"TERMINAL",0,0,"",,terminal_output +545,797565,"TERMINAL",0,0,"-",,terminal_output +546,797690,"TERMINAL",0,0,"a",,terminal_output +547,797802,"TERMINAL",0,0,"m",,terminal_output +548,797867,"TERMINAL",0,0," ",,terminal_output +549,798110,"TERMINAL",0,0,"""",,terminal_output +550,798125,"TERMINAL",0,0,"772",,terminal_output +551,798891,"TERMINAL",0,0,"a",,terminal_output +552,799175,"TERMINAL",0,0,"883",,terminal_output +553,799378,"TERMINAL",0,0,"",,terminal_output +554,799768,"TERMINAL",0,0,"n",,terminal_output +555,799908,"TERMINAL",0,0,"i",,terminal_output +556,799975,"TERMINAL",0,0,"t",,terminal_output +557,800102,"TERMINAL",0,0," ",,terminal_output +558,800215,"TERMINAL",0,0,"994",,terminal_output +559,801099,"TERMINAL",0,0,"",,terminal_output +560,801269,"TERMINAL",0,0,"20305",,terminal_output +561,801682,"TERMINAL",0,0,"",,terminal_output +562,801839,"TERMINAL",0,0,"",,terminal_output +563,802261,"TERMINAL",0,0,"",,terminal_output +564,802393,"TERMINAL",0,0,"116",,terminal_output +565,803393,"TERMINAL",0,0,"227",,terminal_output +566,803752,"TERMINAL",0,0,"r",,terminal_output +567,803919,"TERMINAL",0,0,"e",,terminal_output +568,804073,"TERMINAL",0,0,"s",,terminal_output +569,804184,"TERMINAL",0,0,"h",,terminal_output +570,804250,"TERMINAL",0,0,"a",,terminal_output +571,804385,"TERMINAL",0,0,"338",,terminal_output +572,804519,"TERMINAL",0,0,"p",,terminal_output +573,804680,"TERMINAL",0,0,"e",,terminal_output +574,804787,"TERMINAL",0,0," ",,terminal_output +575,804920,"TERMINAL",0,0,"in",,terminal_output +576,805070,"TERMINAL",0,0,"s",,terminal_output +577,805279,"TERMINAL",0,0,"t",,terminal_output +578,805458,"TERMINAL",0,0,"e",,terminal_output +579,805489,"TERMINAL",0,0,"449",,terminal_output +580,805587,"TERMINAL",0,0,"a",,terminal_output +581,805756,"TERMINAL",0,0,"d ",,terminal_output +582,805809,"TERMINAL",0,0,"o",,terminal_output +583,806012,"TERMINAL",0,0,"f",,terminal_output +584,806241,"TERMINAL",0,0," ",,terminal_output +585,806498,"TERMINAL",0,0,"5540",,terminal_output +586,806562,"TERMINAL",0,0,"e",,terminal_output +587,806825,"TERMINAL",0,0,"x",,terminal_output +588,807160,"TERMINAL",0,0,"p",,terminal_output +589,807226,"TERMINAL",0,0,"a",,terminal_output +590,807440,"TERMINAL",0,0,"n",,terminal_output +591,807565,"TERMINAL",0,0,"661",,terminal_output +592,807592,"TERMINAL",0,0,"d",,terminal_output +593,807908,"TERMINAL",0,0,"_",,terminal_output +594,808113,"TERMINAL",0,0,"d",,terminal_output +595,808220,"TERMINAL",0,0,"i",,terminal_output +596,808274,"TERMINAL",0,0,"m",,terminal_output +597,808381,"TERMINAL",0,0,"s",,terminal_output +598,808615,"TERMINAL",0,0,"772",,terminal_output +599,808679,"TERMINAL",0,0,"""",,terminal_output +600,809014,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +601,809672,"TERMINAL",0,0,"883",,terminal_output +602,809783,"TERMINAL",0,0,"black....................................................................",,terminal_output +603,811173,"TERMINAL",0,0,"Passed\r\n",,terminal_output +604,811394,"TERMINAL",0,0,"[gt-actions 52887b0] reshape instead of expand_dims\r\n 1 file changed, 1 insertion(+), 1 deletion(-)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +605,811884,"TERMINAL",0,0,"gi",,terminal_output +606,812056,"TERMINAL",0,0,"t",,terminal_output +607,812121,"TERMINAL",0,0," ",,terminal_output +608,812250,"TERMINAL",0,0,"p",,terminal_output +609,812424,"TERMINAL",0,0,"30416",,terminal_output +610,812472,"TERMINAL",0,0,"u",,terminal_output +611,812607,"TERMINAL",0,0,"s",,terminal_output +612,812669,"TERMINAL",0,0,"h",,terminal_output +613,812805,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +614,813454,"TERMINAL",0,0,"227",,terminal_output +615,814503,"TERMINAL",0,0,"338",,terminal_output +616,814859,"TERMINAL",0,0,"Enumerating objects: 5, done.\r\nCounting objects: 20% (1/5)\rCounting objects: 40% (2/5)\rCounting objects: 60% (3/5)\rCounting objects: 80% (4/5)\rCounting objects: 100% (5/5)\rCounting objects: 100% (5/5), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 33% (1/3)\rCompressing objects: 66% (2/3)\rCompressing objects: 100% (3/3)\rCompressing objects: 100% (3/3), done.\r\nWriting objects: 33% (1/3)\rWriting objects: 66% (2/3)\rWriting objects: 100% (3/3)\rWriting objects: 100% (3/3), 359 bytes | 359.00 KiB/s, done.\r\nTotal 3 (delta 2), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +617,814923,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +618,815295,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 33b1895..52887b0 gt-actions -> gt-actions\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +619,815590,"TERMINAL",0,0,"449",,terminal_output +620,816247,"TERMINAL",0,0,"g",,terminal_output +621,816596,"TERMINAL",0,0,"5550",,terminal_output +622,816630,"TERMINAL",0,0,"it",,terminal_output +623,816854,"TERMINAL",0,0," ",,terminal_output +624,816977,"TERMINAL",0,0,"c",,terminal_output +625,817085,"TERMINAL",0,0,"h",,terminal_output +626,817150,"TERMINAL",0,0,"e",,terminal_output +627,817285,"TERMINAL",0,0,"c",,terminal_output +628,817352,"TERMINAL",0,0,"k",,terminal_output +629,817529,"TERMINAL",0,0,"o",,terminal_output +630,817643,"TERMINAL",0,0,"u",,terminal_output +631,817655,"TERMINAL",0,0,"672",,terminal_output +632,817726,"TERMINAL",0,0,"t",,terminal_output +633,817791,"TERMINAL",0,0," ",,terminal_output +634,818683,"TERMINAL",0,0,"883",,terminal_output +635,819728,"TERMINAL",0,0,"994",,terminal_output +636,820803,"TERMINAL",0,0,"40505",,terminal_output +637,821927,"TERMINAL",0,0,"116",,terminal_output +638,822054,"TERMINAL",0,0,"e",,terminal_output +639,822360,"TERMINAL",0,0,"",,terminal_output +640,822878,"TERMINAL",0,0,"227",,terminal_output +641,823431,"TERMINAL",0,0,"g",,terminal_output +642,823566,"TERMINAL",0,0,"e",,terminal_output +643,823769,"TERMINAL",0,0,"e",,terminal_output +644,823915,"TERMINAL",0,0,"338",,terminal_output +645,824112,"TERMINAL",0,0,"",,terminal_output +646,824371,"TERMINAL",0,0,"ne",,terminal_output +647,824680,"TERMINAL",0,0,"r",,terminal_output +648,824918,"TERMINAL",0,0,"a",,terminal_output +649,824996,"TERMINAL",0,0,"449",,terminal_output +650,825063,"TERMINAL",0,0,"t",,terminal_output +651,825158,"TERMINAL",0,0,"e",,terminal_output +652,826020,"TERMINAL",0,0,"551:00",,terminal_output +653,826116,"TERMINAL",0,0,"-",,terminal_output +654,826231,"TERMINAL",0,0,"a",,terminal_output +655,826521,"TERMINAL",0,0,"t",,terminal_output +656,826741,"TERMINAL",0,0,"a",,terminal_output +657,826876,"TERMINAL",0,0,"r",,terminal_output +658,827000,"TERMINAL",0,0,"i",,terminal_output +659,827067,"TERMINAL",0,0,"661",,terminal_output +660,827225,"TERMINAL",0,0,"-",,terminal_output +661,827524,"TERMINAL",0,0,"b",,terminal_output +662,827590,"TERMINAL",0,0,"r",,terminal_output +663,827754,"TERMINAL",0,0,"e",,terminal_output +664,827941,"TERMINAL",0,0,"a",,terminal_output +665,828003,"TERMINAL",0,0,"k",,terminal_output +666,828114,"TERMINAL",0,0,"772",,terminal_output +667,828245,"TERMINAL",0,0,"p",,terminal_output +668,828861,"TERMINAL",0,0,"",,terminal_output +669,829047,"TERMINAL",0,0,"o",,terminal_output +670,829161,"TERMINAL",0,0,"883",,terminal_output +671,829252,"TERMINAL",0,0,"u",,terminal_output +672,829403,"TERMINAL",0,0,"t",,terminal_output +673,829469,"TERMINAL",0,0,"-",,terminal_output +674,829690,"TERMINAL",0,0,"d",,terminal_output +675,829870,"TERMINAL",0,0,"a",,terminal_output +676,829935,"TERMINAL",0,0,"t",,terminal_output +677,830012,"TERMINAL",0,0,"a",,terminal_output +678,830122,"TERMINAL",0,0,"s",,terminal_output +679,830197,"TERMINAL",0,0,"994",,terminal_output +680,830263,"TERMINAL",0,0,"e",,terminal_output +681,830328,"TERMINAL",0,0,"t",,terminal_output +682,830626,"TERMINAL",0,0,"\r\n[?2004l\rerror: pathspec 'generate-atari-breakout-dataset' did not match any file(s) known to git\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +683,831222,"TERMINAL",0,0,"504:005",,terminal_output +684,831883,"TERMINAL",0,0,"gi",,terminal_output +685,832011,"TERMINAL",0,0,"t",,terminal_output +686,832065,"TERMINAL",0,0," ",,terminal_output +687,832274,"TERMINAL",0,0,"116",,terminal_output +688,832524,"TERMINAL",0,0,"b",,terminal_output +689,832599,"TERMINAL",0,0,"r",,terminal_output +690,832687,"TERMINAL",0,0,"a",,terminal_output +691,832795,"TERMINAL",0,0,"n",,terminal_output +692,832876,"TERMINAL",0,0,"c",,terminal_output +693,832962,"TERMINAL",0,0,"h",,terminal_output +694,833112,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r action-mapper\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n* gt-actions\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-startframe-indexing-fix\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n:",,terminal_output +695,833313,"TERMINAL",0,0,"227",,terminal_output +696,834357,"TERMINAL",0,0,"338",,terminal_output +697,835412,"TERMINAL",0,0,"449",,terminal_output +698,836573,"TERMINAL",0,0,"5510",,terminal_output +699,837024,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +700,837494,"TERMINAL",0,0,"g",,terminal_output +701,837594,"TERMINAL",0,0,"661",,terminal_output +702,837798,"TERMINAL",0,0,"i",,terminal_output +703,837951,"TERMINAL",0,0,"t",,terminal_output +704,838016,"TERMINAL",0,0," ",,terminal_output +705,838441,"TERMINAL",0,0,"ch",,terminal_output +706,838588,"TERMINAL",0,0,"e",,terminal_output +707,838589,"TERMINAL",0,0,"772",,terminal_output +708,838697,"TERMINAL",0,0,"c",,terminal_output +709,838765,"TERMINAL",0,0,"k",,terminal_output +710,838928,"TERMINAL",0,0,"o",,terminal_output +711,839036,"TERMINAL",0,0,"u",,terminal_output +712,839100,"TERMINAL",0,0,"t",,terminal_output +713,839207,"TERMINAL",0,0," ",,terminal_output +714,839624,"TERMINAL",0,0,"generate-minatar-breakout-dataset",,terminal_output +715,839625,"TERMINAL",0,0,"883",,terminal_output +716,839921,"TERMINAL",0,0,"generate-minatar-breakout-dataset\r\n[?2004l\r",,terminal_output +717,840031,"TERMINAL",0,0,"Switched to branch 'generate-minatar-breakout-dataset'\r\n",,terminal_output +718,840155,"TERMINAL",0,0,"Your branch is ahead of 'origin/generate-minatar-breakout-dataset' by 10 commits.\r\n (use ""git push"" to publish your local commits)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +719,840379,"",0,0,"Switched from branch 'gt-actions' to 'generate-minatar-breakout-dataset'",,git_branch_checkout +720,840658,"TERMINAL",0,0,"9105",,terminal_output +721,841717,"TERMINAL",0,0,"5:0116",,terminal_output +722,842902,"TERMINAL",0,0,"227",,terminal_output +723,843771,"TERMINAL",0,0,"338",,terminal_output +724,844862,"TERMINAL",0,0,"449",,terminal_output +725,845889,"TERMINAL",0,0,"5520",,terminal_output +726,846901,"TERMINAL",0,0,"661",,terminal_output +727,847936,"TERMINAL",0,0,"772",,terminal_output +728,849060,"TERMINAL",0,0,"883",,terminal_output +729,850085,"TERMINAL",0,0,"994",,terminal_output +730,851108,"TERMINAL",0,0,"10205",,terminal_output +731,852135,"TERMINAL",0,0,"116",,terminal_output +732,853278,"TERMINAL",0,0,"227",,terminal_output +733,854222,"TERMINAL",0,0,"338",,terminal_output +734,855308,"TERMINAL",0,0,"449",,terminal_output +735,856386,"TERMINAL",0,0,"5530",,terminal_output +736,857606,"TERMINAL",0,0,"661",,terminal_output +737,858410,"TERMINAL",0,0,"772",,terminal_output +738,859516,"TERMINAL",0,0,"883",,terminal_output +739,860496,"TERMINAL",0,0,"994",,terminal_output +740,861539,"TERMINAL",0,0,"20305",,terminal_output +741,862590,"TERMINAL",0,0,"116",,terminal_output +742,863707,"TERMINAL",0,0,"227",,terminal_output +743,864713,"TERMINAL",0,0,"349",,terminal_output +744,865723,"TERMINAL",0,0,"5540",,terminal_output +745,866769,"TERMINAL",0,0,"661",,terminal_output +746,867878,"TERMINAL",0,0,"772",,terminal_output +747,868924,"TERMINAL",0,0,"883",,terminal_output +748,869904,"TERMINAL",0,0,"994",,terminal_output +749,870972,"TERMINAL",0,0,"30405",,terminal_output +750,872100,"TERMINAL",0,0,"116",,terminal_output +751,873037,"TERMINAL",0,0,"227",,terminal_output +752,874085,"TERMINAL",0,0,"338",,terminal_output +753,875174,"TERMINAL",0,0,"449",,terminal_output +754,876298,"TERMINAL",0,0,"5550",,terminal_output +755,877248,"TERMINAL",0,0,"661",,terminal_output +756,878327,"TERMINAL",0,0,"772",,terminal_output +757,879369,"TERMINAL",0,0,"883",,terminal_output +758,880398,"TERMINAL",0,0,"994",,terminal_output +759,881442,"TERMINAL",0,0,"40505",,terminal_output +760,882482,"TERMINAL",0,0,"116",,terminal_output +761,883571,"TERMINAL",0,0,"227",,terminal_output +762,884594,"TERMINAL",0,0,"338",,terminal_output +763,885641,"TERMINAL",0,0,"449",,terminal_output +764,886743,"TERMINAL",0,0,"562:01",,terminal_output +765,887768,"TERMINAL",0,0,"772",,terminal_output +766,888759,"TERMINAL",0,0,"883",,terminal_output +767,889829,"TERMINAL",0,0,"994",,terminal_output +768,890948,"TERMINAL",0,0,"505:005",,terminal_output +769,891968,"TERMINAL",0,0,"116",,terminal_output +770,892956,"TERMINAL",0,0,"227",,terminal_output +771,894038,"TERMINAL",0,0,"338",,terminal_output +772,895142,"TERMINAL",0,0,"449",,terminal_output +773,896167,"TERMINAL",0,0,"5510",,terminal_output +774,897190,"TERMINAL",0,0,"661",,terminal_output +775,898213,"TERMINAL",0,0,"772",,terminal_output +776,899257,"TERMINAL",0,0,"883",,terminal_output +777,900267,"TERMINAL",0,0,"994",,terminal_output +778,901388,"TERMINAL",0,0,"6:00105",,terminal_output +779,902415,"TERMINAL",0,0,"116",,terminal_output +780,903434,"TERMINAL",0,0,"227",,terminal_output +781,904459,"TERMINAL",0,0,"338",,terminal_output +782,905584,"TERMINAL",0,0,"449",,terminal_output +783,906557,"TERMINAL",0,0,"5520",,terminal_output +784,907641,"TERMINAL",0,0,"661",,terminal_output +785,908659,"TERMINAL",0,0,"783",,terminal_output +786,909727,"TERMINAL",0,0,"994",,terminal_output +787,910731,"TERMINAL",0,0,"10205",,terminal_output +788,911776,"TERMINAL",0,0,"116",,terminal_output +789,912857,"TERMINAL",0,0,"227",,terminal_output +790,913880,"TERMINAL",0,0,"338",,terminal_output +791,915180,"TERMINAL",0,0,"449",,terminal_output +792,916034,"TERMINAL",0,0,"5530",,terminal_output +793,917053,"TERMINAL",0,0,"661",,terminal_output +794,918043,"TERMINAL",0,0,"772",,terminal_output +795,919102,"TERMINAL",0,0,"883",,terminal_output +796,920230,"TERMINAL",0,0,"994",,terminal_output +797,921318,"TERMINAL",0,0,"20305",,terminal_output +798,922309,"TERMINAL",0,0,"116",,terminal_output +799,923404,"TERMINAL",0,0,"227",,terminal_output +800,924334,"TERMINAL",0,0,"338",,terminal_output +801,925361,"TERMINAL",0,0,"449",,terminal_output +802,926414,"TERMINAL",0,0,"5540",,terminal_output +803,927463,"TERMINAL",0,0,"661",,terminal_output +804,928510,"TERMINAL",0,0,"772",,terminal_output +805,929541,"TERMINAL",0,0,"883",,terminal_output +806,930693,"TERMINAL",0,0,"994",,terminal_output +807,931698,"TERMINAL",0,0,"30405",,terminal_output +808,934260,"TERMINAL",0,0,"238",,terminal_output +809,935282,"TERMINAL",0,0,"449",,terminal_output +810,936307,"TERMINAL",0,0,"5550",,terminal_output +811,937463,"TERMINAL",0,0,"661",,terminal_output +812,938460,"TERMINAL",0,0,"772",,terminal_output +813,939482,"TERMINAL",0,0,"883",,terminal_output +814,940461,"TERMINAL",0,0,"994",,terminal_output +815,941554,"TERMINAL",0,0,"40505",,terminal_output +816,942547,"TERMINAL",0,0,"116",,terminal_output +817,943586,"TERMINAL",0,0,"227",,terminal_output +818,944738,"TERMINAL",0,0,"349",,terminal_output +819,945726,"TERMINAL",0,0,"553:00",,terminal_output +820,946854,"TERMINAL",0,0,"661",,terminal_output +821,947878,"TERMINAL",0,0,"772",,terminal_output +822,948902,"TERMINAL",0,0,"883",,terminal_output +823,949925,"TERMINAL",0,0,"994",,terminal_output +824,950953,"TERMINAL",0,0,"506:005",,terminal_output +825,951972,"TERMINAL",0,0,"116",,terminal_output +826,953000,"TERMINAL",0,0,"227",,terminal_output +827,954049,"TERMINAL",0,0,"338",,terminal_output +828,955149,"TERMINAL",0,0,"449",,terminal_output +829,956139,"TERMINAL",0,0,"5510",,terminal_output +830,957177,"TERMINAL",0,0,"661",,terminal_output +831,958430,"TERMINAL",0,0,"772",,terminal_output +832,959362,"TERMINAL",0,0,"883",,terminal_output +833,960415,"TERMINAL",0,0,"994",,terminal_output +834,961398,"TERMINAL",0,0,"7:00105",,terminal_output +835,962427,"TERMINAL",0,0,"116",,terminal_output +836,963099,"TERMINAL",0,0,"g",,terminal_output +837,963166,"TERMINAL",0,0,"i",,terminal_output +838,963231,"TERMINAL",0,0,"t",,terminal_output +839,963362,"TERMINAL",0,0," ",,terminal_output +840,963473,"TERMINAL",0,0,"227",,terminal_output +841,964426,"TERMINAL",0,0,"s",,terminal_output +842,964507,"TERMINAL",0,0,"338",,terminal_output +843,964594,"TERMINAL",0,0,"t",,terminal_output +844,964670,"TERMINAL",0,0,"a",,terminal_output +845,964737,"TERMINAL",0,0,"s",,terminal_output +846,964844,"TERMINAL",0,0,"h",,terminal_output +847,964970,"TERMINAL",0,0," ",,terminal_output +848,965127,"TERMINAL",0,0,"p",,terminal_output +849,965573,"TERMINAL",0,0,"o",,terminal_output +850,965585,"TERMINAL",0,0,"449",,terminal_output +851,965638,"TERMINAL",0,0,"p",,terminal_output +852,965872,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +853,966055,"TERMINAL",0,0,"On branch generate-minatar-breakout-dataset\r\nYour branch is ahead of 'origin/generate-minatar-breakout-dataset' by 10 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: input_pipeline/generate_breakout_dataset.py\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (a79570b624c9227ee38981ce258752964f963209)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +854,966610,"TERMINAL",0,0,"5520",,terminal_output +855,967745,"TERMINAL",0,0,"672",,terminal_output +856,968767,"TERMINAL",0,0,"883",,terminal_output +857,969731,"TERMINAL",0,0,"994",,terminal_output +858,970106,"TERMINAL",0,0,"gi",,terminal_output +859,970220,"TERMINAL",0,0,"t",,terminal_output +860,970297,"TERMINAL",0,0," ",,terminal_output +861,970390,"TERMINAL",0,0,"m",,terminal_output +862,970526,"TERMINAL",0,0,"er",,terminal_output +863,970641,"TERMINAL",0,0,"g",,terminal_output +864,970720,"TERMINAL",0,0,"e",,terminal_output +865,970816,"TERMINAL",0,0,"10205",,terminal_output +866,970830,"TERMINAL",0,0," ",,terminal_output +867,971838,"TERMINAL",0,0,"116",,terminal_output +868,972879,"TERMINAL",0,0,"227",,terminal_output +869,972973,"TERMINAL",0,0,"g",,terminal_output +870,973122,"TERMINAL",0,0,"t",,terminal_output +871,973300,"TERMINAL",0,0,"-",,terminal_output +872,973766,"TERMINAL",0,0,"a",,terminal_output +873,973898,"TERMINAL",0,0,"c",,terminal_output +874,973952,"TERMINAL",0,0,"338",,terminal_output +875,974141,"TERMINAL",0,0,"t",,terminal_output +876,974215,"TERMINAL",0,0,"i",,terminal_output +877,974317,"TERMINAL",0,0,"o",,terminal_output +878,974383,"TERMINAL",0,0,"n",,terminal_output +879,974497,"TERMINAL",0,0,"s",,terminal_output +880,975013,"TERMINAL",0,0,"449",,terminal_output +881,976039,"TERMINAL",0,0,"5530",,terminal_output +882,976947,"TERMINAL",0,0,"",,terminal_output +883,977053,"TERMINAL",0,0,"661",,terminal_output +884,977073,"TERMINAL",0,0,"",,terminal_output +885,977296,"TERMINAL",0,0,"",,terminal_output +886,978087,"TERMINAL",0,0,"772",,terminal_output +887,979165,"TERMINAL",0,0,"883",,terminal_output +888,979186,"TERMINAL",0,0,"g",,terminal_output +889,979238,"TERMINAL",0,0,"i",,terminal_output +890,979387,"TERMINAL",0,0,"t",,terminal_output +891,979497,"TERMINAL",0,0," ",,terminal_output +892,980209,"TERMINAL",0,0,"c",,terminal_output +893,980210,"TERMINAL",0,0,"994",,terminal_output +894,980261,"TERMINAL",0,0,"o",,terminal_output +895,980427,"TERMINAL",0,0,"m",,terminal_output +896,980548,"TERMINAL",0,0,"m",,terminal_output +897,980793,"TERMINAL",0,0,"it",,terminal_output +898,980908,"TERMINAL",0,0," ",,terminal_output +899,981268,"TERMINAL",0,0,"20305",,terminal_output +900,981524,"TERMINAL",0,0,"",,terminal_output +901,981761,"TERMINAL",0,0,"",,terminal_output +902,981828,"TERMINAL",0,0,"",,terminal_output +903,982297,"TERMINAL",0,0,"116",,terminal_output +904,982492,"TERMINAL",0,0,"a",,terminal_output +905,982604,"TERMINAL",0,0,"d",,terminal_output +906,982710,"TERMINAL",0,0," d",,terminal_output +907,983251,"TERMINAL",0,0,"g",,terminal_output +908,983316,"TERMINAL",0,0,"227",,terminal_output +909,983345,"TERMINAL",0,0,"e",,terminal_output +910,983572,"TERMINAL",0,0,"n",,terminal_output +911,984123,"TERMINAL",0,0,"",,terminal_output +912,984281,"TERMINAL",0,0,"",,terminal_output +913,984355,"TERMINAL",0,0,"338",,terminal_output +914,984408,"TERMINAL",0,0,"",,terminal_output +915,984565,"TERMINAL",0,0,"",,terminal_output +916,984680,"TERMINAL",0,0,"",,terminal_output +917,984835,"TERMINAL",0,0,"",,terminal_output +918,985410,"TERMINAL",0,0,"449",,terminal_output +919,986484,"TERMINAL",0,0,"",,terminal_output +920,986484,"TERMINAL",0,0,"5540",,terminal_output +921,986596,"TERMINAL",0,0,"",,terminal_output +922,986778,"TERMINAL",0,0,"",,terminal_output +923,986956,"TERMINAL",0,0,"",,terminal_output +924,987270,"TERMINAL",0,0,"g",,terminal_output +925,987338,"TERMINAL",0,0,"i",,terminal_output +926,987412,"TERMINAL",0,0,"t",,terminal_output +927,987560,"TERMINAL",0,0," ",,terminal_output +928,987561,"TERMINAL",0,0,"661",,terminal_output +929,987647,"TERMINAL",0,0,"d",,terminal_output +930,987712,"TERMINAL",0,0,"i",,terminal_output +931,987880,"TERMINAL",0,0,"f",,terminal_output +932,988085,"TERMINAL",0,0,"f\r\n[?2004l\r[?1h=\rdiff --git a/genie.py b/genie.py\r\nindex bcb23e6..7fa3f40 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -263,7 +263,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n else:\r\n assert self.lam is not None\r\n@@ -452,7 +452,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n else:\r\n assert self.lam is not None\r\ndiff --git a/input_pipeline/generate_breakout_dataset.py b/input_pipeline/generate_breakout_dataset.py\r\nindex 88928b9..4aa5586 100644\r\n--- a/input_pipeline/generate_breakout_dataset.py\r\n+++ b/input_pipeline/generate_breakout_dataset.py\r\n@@ -117,8 +117,8 @@ def generate_episodes(num_episodes: int, split: str):\r\n obs_chunks.extend(obs_chunks_data)\r\n act_chunks.extend(act_chunks_data)\r\n \r\n- ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\r\n- obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\r\n+ ep_metadata, file_idx, obs_chunks, act_chunks = save_chunks(\r\n+ file_idx, args.chunks_per_file, output_dir_split, obs_chunks, act_chunks\r\n )\r\n episode_metadata.extend(ep_metadata)\r\n \r\ndiff --git a/sample.py b/sample.py\r\nindex b5d5a22..6cf11b4 100644\r\n--- a/sample.py\r\n+++ b/sample.py\r\n@@ -237,7 +237,7 @@ if __name__ == ""__main__"":\r\n if action_batch_E is not None:\r\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n:",,terminal_output +933,988633,"TERMINAL",0,0,"772",,terminal_output +934,989588,"TERMINAL",0,0,"883",,terminal_output +935,989663,"TERMINAL",0,0,"\r else:\r\n:",,terminal_output +936,990284,"TERMINAL",0,0,"\r- action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, 1:], -1)\r\n:\r+ action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\r\n:\r for t, img in enumerate(imgs[1:]):\r\n:\r d = ImageDraw.Draw(img)\r\n:\r for row in range(B):\r\n:\rdiff --git a/train_dynamics.py b/train_dynamics.py\r\n:\rindex b34ba98..ad89aae 100644\r\n:",,terminal_output +937,990494,"TERMINAL",0,0,"\r--- a/train_dynamics.py\r\n:",,terminal_output +938,990629,"TERMINAL",0,0,"994",,terminal_output +939,990692,"TERMINAL",0,0,"\r+++ b/train_dynamics.py\r\n:",,terminal_output +940,990831,"TERMINAL",0,0,"\r@@ -482,12 +482,14 @@ def main(args: Args) -> None:\r\n:",,terminal_output +941,990955,"TERMINAL",0,0,"\r \r\n:",,terminal_output +942,991137,"TERMINAL",0,0,"\r # --- Evaluate full frame prediction (sampling) ---\r\n:",,terminal_output +943,991706,"TERMINAL",0,0,"30416",,terminal_output +944,992131,"TERMINAL",0,0,"\r if args.eval_full_frame:\r\n:",,terminal_output +945,992729,"TERMINAL",0,0,"227",,terminal_output +946,993855,"TERMINAL",0,0,"338",,terminal_output +947,994804,"TERMINAL",0,0,"449",,terminal_output +948,995334,"TERMINAL",0,0,"\r- lam_indices = genie.vq_encode(inputs, training=False)\r\n:",,terminal_output +949,995854,"TERMINAL",0,0,"5550",,terminal_output +950,996163,"TERMINAL",0,0,"\r tokenizer_outputs = genie.tokenizer.vq_encode(\r\n:\r inputs[""videos""], training=False\r\n:\r )\r\n:\r tokens_full_frame = tokenizer_outputs[""indices""]\r\n:\r- inputs[""latent_actions""] = lam_indices\r\n:\r+ lam_indices = None\r\n:\r+ if not args.use_gt_actions:\r\n:\r+ lam_indices = genie.vq_encode(inputs, training=False)\r\n:\r+ inputs[""latent_actions""] = lam_indices\r\n:\r gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\r\n:\r inputs[""videos""] = gt[:, :-1].astype(\r\n:\r args.dtype\r\n:\r@@ -504,8 +506,10 @@ def main(args: Args) -> None:\r\n:\r ""token_logits"": logits_full_frame,\r\n:",,terminal_output +951,996518,"TERMINAL",0,0,"\r ""video_tokens"": tokens_full_frame,\r\n:",,terminal_output +952,996894,"TERMINAL",0,0,"661",,terminal_output +953,997166,"TERMINAL",0,0,"\r ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\r\n:\r- ""lam_indices"": lam_indices,\r\n:\r }\r\n:\r+ if lam_indices is not None:\r\n:\r+ step_outputs[""lam_indices""] = lam_indices\r\n:\r+\r\n:\r loss_full_frame, metrics_full_frame = _calculate_step_metrics(\r\n:\r step_outputs, gt, args.num_actions, args.num_patch_latents\r\n:",,terminal_output +954,997926,"TERMINAL",0,0,"772",,terminal_output +955,998976,"TERMINAL",0,0,"883",,terminal_output +956,999225,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +957,999561,"TERMINAL",0,0,"\rdiff --git a/train_lam.py b/train_lam.py\r\n:",,terminal_output +958,1000029,"TERMINAL",0,0,"994",,terminal_output +959,1000171,"TERMINAL",0,0,"\rindex 7fe605d..f1913dd 100644\r\n:\r--- a/train_lam.py\r\n:",,terminal_output +960,1000171,"TERMINAL",0,0,"\r+++ b/train_lam.py\r\n:",,terminal_output +961,1001125,"TERMINAL",0,0,"40505",,terminal_output +962,1002149,"TERMINAL",0,0,"116",,terminal_output +963,1003174,"TERMINAL",0,0,"227",,terminal_output +964,1003380,"TERMINAL",0,0,"\r@@ -479,8 +479,8 @@ def main(args: Args) -> None:\r\n:",,terminal_output +965,1004184,"TERMINAL",0,0,"338",,terminal_output +966,1004435,"TERMINAL",0,0,"\r )\r\n:\r if step == first_step:\r\n:\r print_mem_stats(""After params initialized"")\r\n:\r- metrics[""lr""] = lr_schedule(step)\r\n:\r- print(f""Step {step}, loss: {loss}"")\r\n:\r+ # metrics[""lr""] = lr_schedule(step)\r\n:\r+ # print(f""Step {step}, loss: {loss}"")\r\n:\r step += 1\r\n:\r \r\n:\r # --- Validation loss ---\r\n:\rdiff --git a/train_tokenizer.py b/train_tokenizer.py\r\n:\rindex a2adcb6..2ba875d 100644\r\n:\r--- a/train_tokenizer.py\r\n:\r+++ b/train_tokenizer.py\r\n:\r@@ -455,8 +455,8 @@ def main(args: Args) -> None:\r\n:\r loss, recon, metrics = train_step(optimizer, batch)\r\n:\r if step == first_step:\r\n:\r print_mem_stats(""After params initialized"")\r\n:\r- metrics[""lr""] = lr_schedule(step)\r\n:\r- print(f""Step {step}, loss: {loss}"")\r\n:",,terminal_output +967,1004513,"TERMINAL",0,0,"\r+ # metrics[""lr""] = lr_schedule(step)\r\n:\r+ # print(f""Step {step}, loss: {loss}"")\r\n:\r step += 1\r\n:",,terminal_output +968,1005325,"TERMINAL",0,0,"449",,terminal_output +969,1006258,"TERMINAL",0,0,"554:00",,terminal_output +970,1007144,"TERMINAL",0,0,"\rM+ lam_indices = genie.vq_encode(inputs, training=False)\r\n\r:",,terminal_output +971,1007300,"TERMINAL",0,0,"661",,terminal_output +972,1007702,"TERMINAL",0,0,"\rM+ if not args.use_gt_actions:\r\n\r:\rM+ lam_indices = None\r\n\r:\rM- inputs[""latent_actions""] = lam_indices\r\n\r:",,terminal_output +973,1008025,"TERMINAL",0,0,"\rM tokens_full_frame = tokenizer_outputs[""indices""]\r\n\r:\rM )\r\n\r:\rM inputs[""videos""], training=False\r\n\r:\rM tokenizer_outputs = genie.tokenizer.vq_encode(\r\n\r:\rM- lam_indices = genie.vq_encode(inputs, training=False)\r\n\r:\rM if args.eval_full_frame:\r\n\r:\rM # --- Evaluate full frame prediction (sampling) ---\r\n\r:\rM \r\n\r:\rM@@ -482,12 +482,14 @@ def main(args: Args) -> None:\r\n\r:\rM+++ b/train_dynamics.py\r\n\r:\rM--- a/train_dynamics.py\r\n\r:",,terminal_output +974,1008247,"TERMINAL",0,0,"\rMindex b34ba98..ad89aae 100644\r\n\r:\rMdiff --git a/train_dynamics.py b/train_dynamics.py\r\n\r:\rM for row in range(B):\r\n\r:\rM d = ImageDraw.Draw(img)\r\n\r:\rM for t, img in enumerate(imgs[1:]):\r\n\r:\rM+ action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\r\n\r:\rM- action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, 1:], -1)\r\n\r:\rM else:\r\n\r:",,terminal_output +975,1008313,"TERMINAL",0,0,"\rM action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n\r:",,terminal_output +976,1008341,"TERMINAL",0,0,"772",,terminal_output +977,1008751,"TERMINAL",0,0,"\rM if action_batch_E is not None:\r\n\r:",,terminal_output +978,1009154,"TERMINAL",0,0,"\rM@@ -237,7 +237,7 @@ if __name__ == ""__main__"":\r\n\r:",,terminal_output +979,1009267,"TERMINAL",0,0,"\rM+++ b/sample.py\r\n\r:",,terminal_output +980,1009366,"TERMINAL",0,0,"883",,terminal_output +981,1009391,"TERMINAL",0,0,"\rM--- a/sample.py\r\n\r:",,terminal_output +982,1010409,"TERMINAL",0,0,"994",,terminal_output +983,1011334,"TERMINAL",0,0,"\rdiff --git a/train_lam.py b/train_lam.py\r\n:",,terminal_output +984,1011539,"TERMINAL",0,0,"507:005",,terminal_output +985,1011665,"TERMINAL",0,0,"\rindex 7fe605d..f1913dd 100644\r\n:",,terminal_output +986,1011731,"TERMINAL",0,0,"\r--- a/train_lam.py\r\n:",,terminal_output +987,1011888,"TERMINAL",0,0,"\r+++ b/train_lam.py\r\n:",,terminal_output +988,1012071,"TERMINAL",0,0,"\r@@ -479,8 +479,8 @@ def main(args: Args) -> None:\r\n:",,terminal_output +989,1012551,"TERMINAL",0,0,"116",,terminal_output +990,1012719,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +991,1012876,"TERMINAL",0,0,"\r if step == first_step:\r\n:",,terminal_output +992,1013273,"TERMINAL",0,0,"\r print_mem_stats(""After params initialized"")\r\n:",,terminal_output +993,1013483,"TERMINAL",0,0,"\r- metrics[""lr""] = lr_schedule(step)\r\n:",,terminal_output +994,1013549,"TERMINAL",0,0,"227",,terminal_output +995,1013578,"TERMINAL",0,0,"\r- print(f""Step {step}, loss: {loss}"")\r\n:",,terminal_output +996,1013858,"TERMINAL",0,0,"\r+ # metrics[""lr""] = lr_schedule(step)\r\n:",,terminal_output +997,1014039,"TERMINAL",0,0,"\r+ # print(f""Step {step}, loss: {loss}"")\r\n:",,terminal_output +998,1014276,"TERMINAL",0,0,"\r step += 1\r\n:",,terminal_output +999,1014577,"TERMINAL",0,0,"338",,terminal_output +1000,1015222,"TERMINAL",0,0,"\r \r\n:\r # --- Validation loss ---\r\n:\rdiff --git a/train_tokenizer.py b/train_tokenizer.py\r\n:\rindex a2adcb6..2ba875d 100644\r\n:\r--- a/train_tokenizer.py\r\n:\r+++ b/train_tokenizer.py\r\n:\r@@ -455,8 +455,8 @@ def main(args: Args) -> None:\r\n:\r loss, recon, metrics = train_step(optimizer, batch)\r\n:\r if step == first_step:\r\n:\r print_mem_stats(""After params initialized"")\r\n:\r- metrics[""lr""] = lr_schedule(step)\r\n:\r- print(f""Step {step}, loss: {loss}"")\r\n:\r+ # metrics[""lr""] = lr_schedule(step)\r\n:\r+ # print(f""Step {step}, loss: {loss}"")\r\n:\r step += 1\r\n:\r \r\n:\r # --- Validation loss ---\r\n:\r\r(END)",,terminal_output +1001,1015381,"TERMINAL",0,0,"\r\r(END)",,terminal_output +1002,1015651,"TERMINAL",0,0,"449",,terminal_output +1003,1015671,"TERMINAL",0,0,"\rM gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\r\n\r:",,terminal_output +1004,1016429,"TERMINAL",0,0,"\rM+ inputs[""latent_actions""] = lam_indices\r\n\r:\rM+ lam_indices = genie.vq_encode(inputs, training=False)\r\n\r:\rM+ if not args.use_gt_actions:\r\n\r:\rM+ lam_indices = None\r\n\r:\rM- inputs[""latent_actions""] = lam_indices\r\n\r:\rM tokens_full_frame = tokenizer_outputs[""indices""]\r\n\r:\rM )\r\n\r:\rM inputs[""videos""], training=False\r\n\r:\rM tokenizer_outputs = genie.tokenizer.vq_encode(\r\n\r:",,terminal_output +1005,1016716,"TERMINAL",0,0,"\rM- lam_indices = genie.vq_encode(inputs, training=False)\r\n\r:\rM if args.eval_full_frame:\r\n\r:\rM # --- Evaluate full frame prediction (sampling) ---\r\n\r:\rM \r\n\r:\rM@@ -482,12 +482,14 @@ def main(args: Args) -> None:\r\n\r:\rM+++ b/train_dynamics.py\r\n\r:\rM--- a/train_dynamics.py\r\n\r:\rMindex b34ba98..ad89aae 100644\r\n\r:",,terminal_output +1006,1016746,"TERMINAL",0,0,"5611",,terminal_output +1007,1016829,"TERMINAL",0,0,"\rMdiff --git a/train_dynamics.py b/train_dynamics.py\r\n\r:\rM for row in range(B):\r\n\r:\rM d = ImageDraw.Draw(img)\r\n\r:",,terminal_output +1008,1016887,"TERMINAL",0,0,"\rM for t, img in enumerate(imgs[1:]):\r\n\r:\rM+ action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\r\n\r:\rM- action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, 1:], -1)\r\n\r:",,terminal_output +1009,1016952,"TERMINAL",0,0,"\rM else:\r\n\r:\rM action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n\r:\rM if action_batch_E is not None:\r\n\r:",,terminal_output +1010,1017717,"TERMINAL",0,0,"772",,terminal_output +1011,1018858,"TERMINAL",0,0,"883",,terminal_output +1012,1019786,"TERMINAL",0,0,"994",,terminal_output +1013,1020891,"TERMINAL",0,0,"8:00105",,terminal_output +1014,1021913,"TERMINAL",0,0,"116",,terminal_output +1015,1022929,"TERMINAL",0,0,"227",,terminal_output +1016,1024064,"TERMINAL",0,0,"338",,terminal_output +1017,1025088,"TERMINAL",0,0,"449",,terminal_output +1018,1026112,"TERMINAL",0,0,"5520",,terminal_output +1019,1026702,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1020,1027136,"TERMINAL",0,0,"661",,terminal_output +1021,1028163,"TERMINAL",0,0,"772",,terminal_output +1022,1029287,"TERMINAL",0,0,"883",,terminal_output +1023,1030310,"TERMINAL",0,0,"994",,terminal_output +1024,1030364,"TERMINAL",0,0,"g",,terminal_output +1025,1030549,"TERMINAL",0,0,"it",,terminal_output +1026,1030709,"TERMINAL",0,0," ",,terminal_output +1027,1030950,"TERMINAL",0,0,"s",,terminal_output +1028,1031335,"TERMINAL",0,0,"10205",,terminal_output +1029,1031848,"TERMINAL",0,0,"",,terminal_output +1030,1032048,"TERMINAL",0,0,"",,terminal_output +1031,1032124,"TERMINAL",0,0,"",,terminal_output +1032,1032310,"TERMINAL",0,0,"116",,terminal_output +1033,1032404,"TERMINAL",0,0,"",,terminal_output +1034,1032557,"TERMINAL",0,0,"",,terminal_output +1035,1032756,"TERMINAL",0,0,"git diff",,terminal_output +1036,1032966,"TERMINAL",0,0,"stash pop",,terminal_output +1037,1033384,"TERMINAL",0,0,"227",,terminal_output +1038,1033843,"TERMINAL",0,0,"",,terminal_output +1039,1033979,"TERMINAL",0,0,"",,terminal_output +1040,1034105,"TERMINAL",0,0,"",,terminal_output +1041,1034260,"TERMINAL",0,0,"",,terminal_output +1042,1034389,"TERMINAL",0,0,"338",,terminal_output +1043,1034583,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1044,1034684,"TERMINAL",0,0,"Saved working directory and index state WIP on generate-minatar-breakout-dataset: 0d69ea3 Merge branch 'gt-actions' into generate-minatar-breakout-dataset\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1045,1034961,"TERMINAL",0,0,"g",,terminal_output +1046,1035104,"TERMINAL",0,0,"i",,terminal_output +1047,1035171,"TERMINAL",0,0,"t",,terminal_output +1048,1035239,"TERMINAL",0,0," ",,terminal_output +1049,1035305,"TERMINAL",0,0,"c",,terminal_output +1050,1035439,"TERMINAL",0,0,"h",,terminal_output +1051,1035450,"TERMINAL",0,0,"449",,terminal_output +1052,1035503,"TERMINAL",0,0,"e",,terminal_output +1053,1035624,"TERMINAL",0,0,"c",,terminal_output +1054,1035696,"TERMINAL",0,0,"k",,terminal_output +1055,1035818,"TERMINAL",0,0,"o",,terminal_output +1056,1035962,"TERMINAL",0,0,"u",,terminal_output +1057,1036033,"TERMINAL",0,0,"t",,terminal_output +1058,1036113,"TERMINAL",0,0," ",,terminal_output +1059,1036178,"TERMINAL",0,0,"g",,terminal_output +1060,1036380,"TERMINAL",0,0,"t",,terminal_output +1061,1036490,"TERMINAL",0,0,"-",,terminal_output +1062,1036505,"TERMINAL",0,0,"5530",,terminal_output +1063,1036955,"TERMINAL",0,0,"a",,terminal_output +1064,1037121,"TERMINAL",0,0,"c",,terminal_output +1065,1037273,"TERMINAL",0,0,"t",,terminal_output +1066,1037349,"TERMINAL",0,0,"i",,terminal_output +1067,1037414,"TERMINAL",0,0,"o",,terminal_output +1068,1037576,"TERMINAL",0,0,"n",,terminal_output +1069,1037576,"TERMINAL",0,0,"661",,terminal_output +1070,1037641,"TERMINAL",0,0,"s",,terminal_output +1071,1037796,"TERMINAL",0,0,"\r\n[?2004l\rSwitched to branch 'gt-actions'\r\nYour branch is up to date with 'origin/gt-actions'.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1072,1038589,"TERMINAL",0,0,"772",,terminal_output +1073,1039664,"TERMINAL",0,0,"883",,terminal_output +1074,1040399,"",0,0,"Switched from branch 'generate-minatar-breakout-dataset' to 'gt-actions'",,git_branch_checkout +1075,1040767,"TERMINAL",0,0,"9305",,terminal_output +1076,1041780,"TERMINAL",0,0,"2116",,terminal_output +1077,1042733,"TERMINAL",0,0,"227",,terminal_output +1078,1043827,"TERMINAL",0,0,"338",,terminal_output +1079,1044812,"TERMINAL",0,0,"449",,terminal_output +1080,1045879,"TERMINAL",0,0,"5540",,terminal_output +1081,1046939,"TERMINAL",0,0,"661",,terminal_output +1082,1047929,"TERMINAL",0,0,"772",,terminal_output +1083,1049050,"TERMINAL",0,0,"883",,terminal_output +1084,1050071,"TERMINAL",0,0,"994",,terminal_output +1085,1051097,"TERMINAL",0,0,"30405",,terminal_output +1086,1052121,"TERMINAL",0,0,"116",,terminal_output +1087,1053167,"TERMINAL",0,0,"227",,terminal_output +1088,1054271,"TERMINAL",0,0,"338",,terminal_output +1089,1056013,"TERMINAL",0,0,"4550",,terminal_output +1090,1057141,"TERMINAL",0,0,"661",,terminal_output +1091,1058086,"TERMINAL",0,0,"772",,terminal_output +1092,1059187,"TERMINAL",0,0,"883",,terminal_output +1093,1060211,"TERMINAL",0,0,"994",,terminal_output +1094,1061073,"train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> tuple[nnx.Optimizer, optax.Schedule]:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(genie, tx)\n return optimizer, lr_schedule\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.Optimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> ocp.CheckpointManager:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: ocp.CheckpointManager,\n optimizer: nnx.Optimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.Optimizer, grain.DataLoaderIterator, grain.DataLoaderIterator, jax.Array\n]:\n step = 0\n if restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n # NOTE: We have to remove the (unused) tokenizer vq dropout due flax.nnx lazily initializing modules.\n # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\n # but the first full restore will fail due to nnx not initializing the module when\n # dropout is set to 0.0.\n del optimizer.model.tokenizer.vq.drop\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer, lr_schedule = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n training: bool = False,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.Optimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs, training=False)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n lam_indices = genie.vq_encode(inputs, training=False)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n }\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt, args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_loss_full_frame""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +1095,1061432,"TERMINAL",0,0,"40505",,terminal_output +1096,1062270,"TERMINAL",0,0,"116",,terminal_output +1097,1063384,"TERMINAL",0,0,"227",,terminal_output +1098,1063528,"train_dynamics.py",22043,0,"",python,selection_mouse +1099,1063544,"train_dynamics.py",22042,0,"",python,selection_command +1100,1064413,"TERMINAL",0,0,"338",,terminal_output +1101,1065379,"TERMINAL",0,0,"449",,terminal_output +1102,1066402,"TERMINAL",0,0,"555:00",,terminal_output +1103,1067464,"TERMINAL",0,0,"661",,terminal_output +1104,1068481,"TERMINAL",0,0,"772",,terminal_output +1105,1069538,"TERMINAL",0,0,"883",,terminal_output +1106,1070694,"train_dynamics.py",16711,0,"",python,selection_command +1107,1070727,"TERMINAL",0,0,"994",,terminal_output +1108,1071183,"train_dynamics.py",16771,0,"",python,selection_command +1109,1071380,"train_dynamics.py",16804,0,"",python,selection_command +1110,1071508,"train_dynamics.py",16870,0,"",python,selection_command +1111,1071663,"train_dynamics.py",16929,0,"",python,selection_command +1112,1071680,"TERMINAL",0,0,"508:005",,terminal_output +1113,1071882,"train_dynamics.py",16870,0,"",python,selection_command +1114,1072061,"train_dynamics.py",16804,0,"",python,selection_command +1115,1072806,"TERMINAL",0,0,"127",,terminal_output +1116,1073728,"TERMINAL",0,0,"338",,terminal_output +1117,1074721,"TERMINAL",0,0,"449",,terminal_output +1118,1075685,"train_dynamics.py",16870,0,"",python,selection_command +1119,1075769,"TERMINAL",0,0,"5510",,terminal_output +1120,1076517,"train_dynamics.py",16804,0,"",python,selection_command +1121,1076903,"TERMINAL",0,0,"661",,terminal_output +1122,1077855,"train_dynamics.py",16790,66,"",python,content +1123,1077931,"train_dynamics.py",16802,0,"",python,selection_command +1124,1078001,"train_dynamics.py",16861,0,"",python,selection_command +1125,1078013,"TERMINAL",0,0,"772",,terminal_output +1126,1078234,"train_dynamics.py",16910,0,"",python,selection_command +1127,1078416,"train_dynamics.py",16924,0,"",python,selection_command +1128,1078725,"train_dynamics.py",16972,0,"\n lam_indices = genie.vq_encode(inputs, training=False)",python,content +1129,1078735,"train_dynamics.py",16985,0,"",python,selection_command +1130,1078904,"TERMINAL",0,0,"883",,terminal_output +1131,1079951,"TERMINAL",0,0,"994",,terminal_output +1132,1080958,"TERMINAL",0,0,"9:00105",,terminal_output +1133,1082007,"TERMINAL",0,0,"116",,terminal_output +1134,1082930,"train_dynamics.py",16972,0,"\n ",python,content +1135,1083085,"TERMINAL",0,0,"227",,terminal_output +1136,1083556,"train_dynamics.py",16985,0,"l",python,content +1137,1083558,"train_dynamics.py",16986,0,"",python,selection_keyboard +1138,1083931,"train_dynamics.py",16986,0,"a",python,content +1139,1083932,"train_dynamics.py",16987,0,"",python,selection_keyboard +1140,1084031,"train_dynamics.py",16987,0,"m",python,content +1141,1084033,"train_dynamics.py",16988,0,"",python,selection_keyboard +1142,1084100,"TERMINAL",0,0,"338",,terminal_output +1143,1084391,"train_dynamics.py",16988,0,"_",python,content +1144,1084393,"train_dynamics.py",16989,0,"",python,selection_keyboard +1145,1084688,"train_dynamics.py",16989,0,"i",python,content +1146,1084690,"train_dynamics.py",16990,0,"",python,selection_keyboard +1147,1084797,"train_dynamics.py",16990,0,"n",python,content +1148,1084799,"train_dynamics.py",16991,0,"",python,selection_keyboard +1149,1084862,"train_dynamics.py",16991,0,"d",python,content +1150,1084864,"train_dynamics.py",16992,0,"",python,selection_keyboard +1151,1084952,"train_dynamics.py",16992,0,"i",python,content +1152,1084954,"train_dynamics.py",16993,0,"",python,selection_keyboard +1153,1085114,"train_dynamics.py",16993,0,"c",python,content +1154,1085116,"train_dynamics.py",16994,0,"",python,selection_keyboard +1155,1085146,"TERMINAL",0,0,"449",,terminal_output +1156,1085280,"train_dynamics.py",16994,0,"e",python,content +1157,1085281,"train_dynamics.py",16995,0,"",python,selection_keyboard +1158,1085464,"train_dynamics.py",16995,0,"s",python,content +1159,1085466,"train_dynamics.py",16996,0,"",python,selection_keyboard +1160,1085624,"train_dynamics.py",16996,0," ",python,content +1161,1085626,"train_dynamics.py",16997,0,"",python,selection_keyboard +1162,1085833,"train_dynamics.py",16997,0,"=",python,content +1163,1085834,"train_dynamics.py",16998,0,"",python,selection_keyboard +1164,1085921,"train_dynamics.py",16998,0," ",python,content +1165,1085922,"train_dynamics.py",16999,0,"",python,selection_keyboard +1166,1086211,"TERMINAL",0,0,"5520",,terminal_output +1167,1086306,"train_dynamics.py",16999,0,"N",python,content +1168,1086307,"train_dynamics.py",17000,0,"",python,selection_keyboard +1169,1086437,"train_dynamics.py",17000,0,"o",python,content +1170,1086439,"train_dynamics.py",17001,0,"",python,selection_keyboard +1171,1086580,"train_dynamics.py",17001,0,"n",python,content +1172,1086581,"train_dynamics.py",17002,0,"",python,selection_keyboard +1173,1086599,"train_dynamics.py",17002,0,"e",python,content +1174,1086601,"train_dynamics.py",17003,0,"",python,selection_keyboard +1175,1086803,"train_dynamics.py",17003,0,"\n ",python,content +1176,1087134,"train_dynamics.py",17016,0,"i",python,content +1177,1087135,"train_dynamics.py",17017,0,"",python,selection_keyboard +1178,1087218,"TERMINAL",0,0,"661",,terminal_output +1179,1087277,"train_dynamics.py",17017,0,"f",python,content +1180,1087279,"train_dynamics.py",17018,0,"",python,selection_keyboard +1181,1087403,"train_dynamics.py",17018,0," ",python,content +1182,1087404,"train_dynamics.py",17019,0,"",python,selection_keyboard +1183,1088250,"TERMINAL",0,0,"772",,terminal_output +1184,1089394,"TERMINAL",0,0,"883",,terminal_output +1185,1090346,"TERMINAL",0,0,"994",,terminal_output +1186,1090583,"train_dynamics.py",17019,0,"n",python,content +1187,1090584,"train_dynamics.py",17020,0,"",python,selection_keyboard +1188,1090736,"train_dynamics.py",17020,0,"o",python,content +1189,1090738,"train_dynamics.py",17021,0,"",python,selection_keyboard +1190,1090838,"train_dynamics.py",17021,0,"t",python,content +1191,1090838,"train_dynamics.py",17022,0,"",python,selection_keyboard +1192,1090918,"train_dynamics.py",17022,0," ",python,content +1193,1090919,"train_dynamics.py",17023,0,"",python,selection_keyboard +1194,1091472,"TERMINAL",0,0,"10205",,terminal_output +1195,1091497,"train_dynamics.py",17023,0,"a",python,content +1196,1091499,"train_dynamics.py",17024,0,"",python,selection_keyboard +1197,1091678,"train_dynamics.py",17024,0,"r",python,content +1198,1091680,"train_dynamics.py",17025,0,"",python,selection_keyboard +1199,1091796,"train_dynamics.py",17025,0,"g",python,content +1200,1091798,"train_dynamics.py",17026,0,"",python,selection_keyboard +1201,1091911,"train_dynamics.py",17026,0,"s",python,content +1202,1091913,"train_dynamics.py",17027,0,"",python,selection_keyboard +1203,1092050,"train_dynamics.py",17027,0,".",python,content +1204,1092051,"train_dynamics.py",17028,0,"",python,selection_keyboard +1205,1092338,"train_dynamics.py",17028,0,"u",python,content +1206,1092339,"train_dynamics.py",17029,0,"",python,selection_keyboard +1207,1092415,"train_dynamics.py",17029,0,"s",python,content +1208,1092417,"train_dynamics.py",17030,0,"",python,selection_keyboard +1209,1092443,"TERMINAL",0,0,"116",,terminal_output +1210,1092578,"train_dynamics.py",17030,0,"e",python,content +1211,1092579,"train_dynamics.py",17031,0,"",python,selection_keyboard +1212,1092718,"train_dynamics.py",17031,0,"_",python,content +1213,1092720,"train_dynamics.py",17032,0,"",python,selection_keyboard +1214,1093004,"train_dynamics.py",17032,0,"g",python,content +1215,1093006,"train_dynamics.py",17033,0,"",python,selection_keyboard +1216,1093137,"train_dynamics.py",17033,0,"t",python,content +1217,1093139,"train_dynamics.py",17034,0,"",python,selection_keyboard +1218,1093233,"train_dynamics.py",17034,0,"_",python,content +1219,1093234,"train_dynamics.py",17035,0,"",python,selection_keyboard +1220,1093405,"train_dynamics.py",17035,0,"a",python,content +1221,1093406,"train_dynamics.py",17036,0,"",python,selection_keyboard +1222,1093469,"TERMINAL",0,0,"227",,terminal_output +1223,1093511,"train_dynamics.py",17036,0,"c",python,content +1224,1093513,"train_dynamics.py",17037,0,"",python,selection_keyboard +1225,1093716,"train_dynamics.py",17037,0,"t",python,content +1226,1093719,"train_dynamics.py",17038,0,"",python,selection_keyboard +1227,1093801,"train_dynamics.py",17038,0,"i",python,content +1228,1093802,"train_dynamics.py",17039,0,"",python,selection_keyboard +1229,1093864,"train_dynamics.py",17039,0,"o",python,content +1230,1093865,"train_dynamics.py",17040,0,"",python,selection_keyboard +1231,1093990,"train_dynamics.py",17040,0,"n",python,content +1232,1093991,"train_dynamics.py",17041,0,"",python,selection_keyboard +1233,1094071,"train_dynamics.py",17041,0,"s",python,content +1234,1094073,"train_dynamics.py",17042,0,"",python,selection_keyboard +1235,1094290,"train_dynamics.py",17042,0,":",python,content +1236,1094291,"train_dynamics.py",17043,0,"",python,selection_keyboard +1237,1094488,"TERMINAL",0,0,"338",,terminal_output +1238,1094752,"train_dynamics.py",17042,0,"",python,selection_command +1239,1095021,"train_dynamics.py",17082,0,"",python,selection_command +1240,1095547,"TERMINAL",0,0,"449",,terminal_output +1241,1096479,"train_dynamics.py",17044,65," lam_indices = genie.vq_encode(inputs, training=False)",python,selection_command +1242,1096563,"TERMINAL",0,0,"5530",,terminal_output +1243,1096681,"train_dynamics.py",17044,116," lam_indices = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices",python,selection_command +1244,1096888,"train_dynamics.py",17056,0,"",python,selection_command +1245,1097264,"train_dynamics.py",17122,0," ",python,content +1246,1097265,"train_dynamics.py",17056,0," ",python,content +1247,1097629,"train_dynamics.py",17059,0,"",python,selection_command +1248,1097644,"TERMINAL",0,0,"661",,terminal_output +1249,1097783,"train_dynamics.py",17019,0,"",python,selection_command +1250,1098045,"train_dynamics.py",16988,0,"",python,selection_command +1251,1098432,"train_dynamics.py",17019,0,"",python,selection_command +1252,1098639,"TERMINAL",0,0,"783",,terminal_output +1253,1098804,"train_dynamics.py",17020,0,"",python,selection_command +1254,1099271,"train_dynamics.py",17021,0,"",python,selection_command +1255,1099331,"train_dynamics.py",17022,0,"",python,selection_command +1256,1099331,"train_dynamics.py",17023,0,"",python,selection_command +1257,1099407,"train_dynamics.py",17024,0,"",python,selection_command +1258,1099511,"train_dynamics.py",17025,0,"",python,selection_command +1259,1099512,"train_dynamics.py",17026,0,"",python,selection_command +1260,1099635,"train_dynamics.py",17027,0,"",python,selection_command +1261,1099636,"train_dynamics.py",17028,0,"",python,selection_command +1262,1099672,"train_dynamics.py",17029,0,"",python,selection_command +1263,1099673,"train_dynamics.py",17030,0,"",python,selection_command +1264,1099673,"train_dynamics.py",17031,0,"",python,selection_command +1265,1099744,"TERMINAL",0,0,"994",,terminal_output +1266,1100498,"train_dynamics.py",17071,0,"",python,selection_command +1267,1100667,"train_dynamics.py",17141,0,"",python,selection_command +1268,1100727,"TERMINAL",0,0,"20305",,terminal_output +1269,1101759,"TERMINAL",0,0,"116",,terminal_output +1270,1102876,"TERMINAL",0,0,"227",,terminal_output +1271,1103896,"train_dynamics.py",17196,0,"",python,selection_command +1272,1103987,"TERMINAL",0,0,"338",,terminal_output +1273,1104392,"train_dynamics.py",17270,0,"",python,selection_command +1274,1104465,"train_dynamics.py",17318,0,"",python,selection_command +1275,1104492,"train_dynamics.py",17347,0,"",python,selection_command +1276,1104571,"train_dynamics.py",17397,0,"",python,selection_command +1277,1104591,"train_dynamics.py",17456,0,"",python,selection_command +1278,1104592,"train_dynamics.py",17485,0,"",python,selection_command +1279,1104661,"train_dynamics.py",17515,0,"",python,selection_command +1280,1104661,"train_dynamics.py",17553,0,"",python,selection_command +1281,1104711,"train_dynamics.py",17593,0,"",python,selection_command +1282,1104790,"train_dynamics.py",17618,0,"",python,selection_command +1283,1104791,"train_dynamics.py",17647,0,"",python,selection_command +1284,1104853,"train_dynamics.py",17676,0,"",python,selection_command +1285,1104854,"train_dynamics.py",17719,0,"",python,selection_command +1286,1104854,"train_dynamics.py",17770,0,"",python,selection_command +1287,1104938,"train_dynamics.py",17821,0,"",python,selection_command +1288,1104939,"train_dynamics.py",17900,0,"",python,selection_command +1289,1105030,"train_dynamics.py",17929,0,"",python,selection_command +1290,1105031,"train_dynamics.py",17958,0,"",python,selection_command +1291,1105032,"train_dynamics.py",18033,0,"",python,selection_command +1292,1105079,"train_dynamics.py",18093,0,"",python,selection_command +1293,1105080,"train_dynamics.py",18122,0,"",python,selection_command +1294,1105080,"train_dynamics.py",18142,0,"",python,selection_command +1295,1105117,"TERMINAL",0,0,"449",,terminal_output +1296,1105482,"train_dynamics.py",18122,0,"",python,selection_command +1297,1105622,"train_dynamics.py",18093,0,"",python,selection_command +1298,1105769,"train_dynamics.py",18033,0,"",python,selection_command +1299,1105929,"train_dynamics.py",17958,0,"",python,selection_command +1300,1105944,"TERMINAL",0,0,"5540",,terminal_output +1301,1106255,"train_dynamics.py",17929,0,"",python,selection_command +1302,1106615,"train_dynamics.py",17900,0,"",python,selection_command +1303,1107007,"TERMINAL",0,0,"661",,terminal_output +1304,1108015,"TERMINAL",0,0,"772",,terminal_output +1305,1108441,"train_dynamics.py",17929,0,"",python,selection_command +1306,1108653,"train_dynamics.py",17930,0,"\n ",python,content +1307,1109050,"TERMINAL",0,0,"883",,terminal_output +1308,1110139,"TERMINAL",0,0,"994",,terminal_output +1309,1111146,"TERMINAL",0,0,"30405",,terminal_output +1310,1111826,"train_dynamics.py",17943,0,"i",python,content +1311,1111828,"train_dynamics.py",17944,0,"",python,selection_keyboard +1312,1111913,"train_dynamics.py",17944,0,"f",python,content +1313,1111914,"train_dynamics.py",17945,0,"",python,selection_keyboard +1314,1111999,"train_dynamics.py",17945,0," ",python,content +1315,1112001,"train_dynamics.py",17946,0,"",python,selection_keyboard +1316,1112191,"TERMINAL",0,0,"116",,terminal_output +1317,1112298,"train_dynamics.py",17946,0,"l",python,content +1318,1112299,"train_dynamics.py",17947,0,"",python,selection_keyboard +1319,1112575,"train_dynamics.py",17947,0,"a",python,content +1320,1112577,"train_dynamics.py",17948,0,"",python,selection_keyboard +1321,1112578,"train_dynamics.py",17948,0,"m",python,content +1322,1112578,"train_dynamics.py",17949,0,"",python,selection_keyboard +1323,1112863,"train_dynamics.py",17949,0,"_",python,content +1324,1112865,"train_dynamics.py",17950,0,"",python,selection_keyboard +1325,1113183,"train_dynamics.py",17950,0,"i",python,content +1326,1113184,"train_dynamics.py",17951,0,"",python,selection_keyboard +1327,1113327,"TERMINAL",0,0,"227",,terminal_output +1328,1113394,"train_dynamics.py",17951,0,"n",python,content +1329,1113395,"train_dynamics.py",17952,0,"",python,selection_keyboard +1330,1113552,"train_dynamics.py",17952,0,"c",python,content +1331,1113553,"train_dynamics.py",17953,0,"",python,selection_keyboard +1332,1113624,"train_dynamics.py",17953,0,"i",python,content +1333,1113626,"train_dynamics.py",17954,0,"",python,selection_keyboard +1334,1114337,"train_dynamics.py",17953,1,"",python,content +1335,1114437,"TERMINAL",0,0,"338",,terminal_output +1336,1114477,"train_dynamics.py",17952,1,"",python,content +1337,1115335,"TERMINAL",0,0,"449",,terminal_output +1338,1115560,"train_dynamics.py",17946,6,"lam_indices",python,content +1339,1116027,"train_dynamics.py",17957,0," ",python,content +1340,1116029,"train_dynamics.py",17958,0,"",python,selection_keyboard +1341,1116155,"train_dynamics.py",17958,0,"n",python,content +1342,1116156,"train_dynamics.py",17959,0,"",python,selection_keyboard +1343,1116302,"train_dynamics.py",17959,0,"o",python,content +1344,1116304,"train_dynamics.py",17960,0,"",python,selection_keyboard +1345,1116359,"train_dynamics.py",17960,0,"t",python,content +1346,1116361,"train_dynamics.py",17961,0,"",python,selection_keyboard +1347,1116397,"TERMINAL",0,0,"5550",,terminal_output +1348,1116454,"train_dynamics.py",17961,0," ",python,content +1349,1116455,"train_dynamics.py",17962,0,"",python,selection_keyboard +1350,1116904,"train_dynamics.py",17962,0,"N",python,content +1351,1116906,"train_dynamics.py",17963,0,"",python,selection_keyboard +1352,1117038,"train_dynamics.py",17963,0,"o",python,content +1353,1117040,"train_dynamics.py",17964,0,"",python,selection_keyboard +1354,1117142,"train_dynamics.py",17964,0,"n",python,content +1355,1117144,"train_dynamics.py",17965,0,"",python,selection_keyboard +1356,1117227,"train_dynamics.py",17965,0,"e",python,content +1357,1117229,"train_dynamics.py",17966,0,"",python,selection_keyboard +1358,1117411,"TERMINAL",0,0,"661",,terminal_output +1359,1117855,"train_dynamics.py",17966,0,":",python,content +1360,1117856,"train_dynamics.py",17967,0,"",python,selection_keyboard +1361,1118065,"train_dynamics.py",17967,0,"\n ",python,content +1362,1118453,"TERMINAL",0,0,"772",,terminal_output +1363,1118638,"train_dynamics.py",17984,0,"s",python,content +1364,1118639,"train_dynamics.py",17985,0,"",python,selection_keyboard +1365,1118923,"train_dynamics.py",17985,0,"t",python,content +1366,1118924,"train_dynamics.py",17986,0,"",python,selection_keyboard +1367,1119093,"train_dynamics.py",17986,0,"e",python,content +1368,1119094,"train_dynamics.py",17987,0,"",python,selection_keyboard +1369,1119237,"train_dynamics.py",17987,0,"p",python,content +1370,1119239,"train_dynamics.py",17988,0,"",python,selection_keyboard +1371,1119544,"TERMINAL",0,0,"883",,terminal_output +1372,1119552,"train_dynamics.py",17988,0,"_",python,content +1373,1119554,"train_dynamics.py",17989,0,"",python,selection_keyboard +1374,1119898,"train_dynamics.py",17989,0,"o",python,content +1375,1119900,"train_dynamics.py",17990,0,"",python,selection_keyboard +1376,1120038,"train_dynamics.py",17990,0,"u",python,content +1377,1120039,"train_dynamics.py",17991,0,"",python,selection_keyboard +1378,1120159,"train_dynamics.py",17991,0,"t",python,content +1379,1120161,"train_dynamics.py",17992,0,"",python,selection_keyboard +1380,1120548,"train_dynamics.py",17992,0,"p",python,content +1381,1120550,"train_dynamics.py",17993,0,"",python,selection_keyboard +1382,1120552,"TERMINAL",0,0,"994",,terminal_output +1383,1120929,"train_dynamics.py",17993,0,"u",python,content +1384,1120930,"train_dynamics.py",17994,0,"",python,selection_keyboard +1385,1120975,"train_dynamics.py",17994,0,"t",python,content +1386,1120977,"train_dynamics.py",17995,0,"",python,selection_keyboard +1387,1121206,"train_dynamics.py",17995,0,"s",python,content +1388,1121208,"train_dynamics.py",17996,0,"",python,selection_keyboard +1389,1121497,"train_dynamics.py",17996,0,"[]",python,content +1390,1121499,"train_dynamics.py",17997,0,"",python,selection_keyboard +1391,1121663,"TERMINAL",0,0,"40505",,terminal_output +1392,1121830,"train_dynamics.py",17997,0,"""""",python,content +1393,1121832,"train_dynamics.py",17998,0,"",python,selection_keyboard +1394,1122312,"train_dynamics.py",17998,0,"l",python,content +1395,1122314,"train_dynamics.py",17999,0,"",python,selection_keyboard +1396,1122565,"train_dynamics.py",17999,0,"a",python,content +1397,1122567,"train_dynamics.py",18000,0,"",python,selection_keyboard +1398,1122567,"train_dynamics.py",18000,0,"m",python,content +1399,1122568,"train_dynamics.py",18001,0,"",python,selection_keyboard +1400,1122780,"TERMINAL",0,0,"116",,terminal_output +1401,1122829,"train_dynamics.py",18001,0,"_",python,content +1402,1122830,"train_dynamics.py",18002,0,"",python,selection_keyboard +1403,1123113,"train_dynamics.py",18002,0,"i",python,content +1404,1123115,"train_dynamics.py",18003,0,"",python,selection_keyboard +1405,1123355,"train_dynamics.py",18003,0,"d",python,content +1406,1123356,"train_dynamics.py",18004,0,"",python,selection_keyboard +1407,1123609,"train_dynamics.py",18003,1,"",python,content +1408,1123735,"TERMINAL",0,0,"238",,terminal_output +1409,1123862,"train_dynamics.py",18002,1,"",python,content +1410,1124194,"train_dynamics.py",18002,0,"i",python,content +1411,1124195,"train_dynamics.py",18003,0,"",python,selection_keyboard +1412,1124437,"train_dynamics.py",18003,0,"n",python,content +1413,1124438,"train_dynamics.py",18004,0,"",python,selection_keyboard +1414,1124531,"train_dynamics.py",18004,0,"d",python,content +1415,1124532,"train_dynamics.py",18005,0,"",python,selection_keyboard +1416,1124612,"train_dynamics.py",18005,0,"i",python,content +1417,1124613,"train_dynamics.py",18006,0,"",python,selection_keyboard +1418,1124705,"TERMINAL",0,0,"449",,terminal_output +1419,1124815,"train_dynamics.py",18006,0,"c",python,content +1420,1124816,"train_dynamics.py",18007,0,"",python,selection_keyboard +1421,1125014,"train_dynamics.py",18007,0,"e",python,content +1422,1125015,"train_dynamics.py",18008,0,"",python,selection_keyboard +1423,1125115,"train_dynamics.py",18008,0,"s",python,content +1424,1125116,"train_dynamics.py",18009,0,"",python,selection_keyboard +1425,1125584,"train_dynamics.py",18010,0,"",python,selection_command +1426,1125746,"TERMINAL",0,0,"556:00",,terminal_output +1427,1126110,"train_dynamics.py",18010,0," ",python,content +1428,1126112,"train_dynamics.py",18011,0,"",python,selection_keyboard +1429,1126562,"train_dynamics.py",18010,1,"",python,content +1430,1126703,"train_dynamics.py",18011,0,"",python,selection_command +1431,1126824,"TERMINAL",0,0,"661",,terminal_output +1432,1126870,"train_dynamics.py",18011,0," ",python,content +1433,1126872,"train_dynamics.py",18012,0,"",python,selection_keyboard +1434,1127356,"train_dynamics.py",18012,0,"=",python,content +1435,1127358,"train_dynamics.py",18013,0,"",python,selection_keyboard +1436,1127481,"train_dynamics.py",18013,0," ",python,content +1437,1127482,"train_dynamics.py",18014,0,"",python,selection_keyboard +1438,1127748,"train_dynamics.py",18014,0,"l",python,content +1439,1127751,"train_dynamics.py",18015,0,"",python,selection_keyboard +1440,1127888,"train_dynamics.py",18015,0,"a",python,content +1441,1127889,"train_dynamics.py",18016,0,"",python,selection_keyboard +1442,1127890,"TERMINAL",0,0,"772",,terminal_output +1443,1127939,"train_dynamics.py",18016,0,"m",python,content +1444,1127940,"train_dynamics.py",18017,0,"",python,selection_keyboard +1445,1128202,"train_dynamics.py",18017,0,"_",python,content +1446,1128204,"train_dynamics.py",18018,0,"",python,selection_keyboard +1447,1128414,"train_dynamics.py",18018,0,"i",python,content +1448,1128415,"train_dynamics.py",18019,0,"",python,selection_keyboard +1449,1128523,"train_dynamics.py",18019,0,"n",python,content +1450,1128525,"train_dynamics.py",18020,0,"",python,selection_keyboard +1451,1128664,"train_dynamics.py",18020,0,"d",python,content +1452,1128666,"train_dynamics.py",18021,0,"",python,selection_keyboard +1453,1128761,"train_dynamics.py",18021,0,"i",python,content +1454,1128762,"train_dynamics.py",18022,0,"",python,selection_keyboard +1455,1128887,"TERMINAL",0,0,"883",,terminal_output +1456,1128995,"train_dynamics.py",18022,0,"c",python,content +1457,1128996,"train_dynamics.py",18023,0,"",python,selection_keyboard +1458,1129153,"train_dynamics.py",18023,0,"e",python,content +1459,1129154,"train_dynamics.py",18024,0,"",python,selection_keyboard +1460,1129266,"train_dynamics.py",18024,0,"s",python,content +1461,1129268,"train_dynamics.py",18025,0,"",python,selection_keyboard +1462,1129790,"train_dynamics.py",18024,0,"",python,selection_command +1463,1129945,"train_dynamics.py",17966,0,"",python,selection_command +1464,1130019,"TERMINAL",0,0,"994",,terminal_output +1465,1130101,"train_dynamics.py",17929,0,"",python,selection_command +1466,1130237,"train_dynamics.py",17915,0,"",python,selection_command +1467,1130801,"train_dynamics.py",17794,123,"",python,content +1468,1130839,"train_dynamics.py",17806,0,"",python,selection_command +1469,1130902,"train_dynamics.py",17792,0,"",python,selection_command +1470,1130998,"TERMINAL",0,0,"509:005",,terminal_output +1471,1131687,"train_dynamics.py",17806,0," ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n ",python,content +1472,1131689,"train_dynamics.py",17915,0,"",python,selection_command +1473,1132262,"TERMINAL",0,0,"116",,terminal_output +1474,1132350,"train_dynamics.py",17873,44,"",python,content +1475,1132354,"train_dynamics.py",17885,0,"",python,selection_command +1476,1132478,"train_dynamics.py",17806,0,"",python,selection_command +1477,1132650,"train_dynamics.py",17755,0,"",python,selection_command +1478,1132861,"train_dynamics.py",17704,0,"",python,selection_command +1479,1133100,"TERMINAL",0,0,"227",,terminal_output +1480,1133229,"train_dynamics.py",17661,0,"",python,selection_command +1481,1133261,"train_dynamics.py",17632,0,"",python,selection_command +1482,1133339,"train_dynamics.py",17618,0,"",python,selection_command +1483,1133410,"train_dynamics.py",17578,0,"",python,selection_command +1484,1133465,"train_dynamics.py",17538,0,"",python,selection_command +1485,1133466,"train_dynamics.py",17500,0,"",python,selection_command +1486,1133466,"train_dynamics.py",17470,0,"",python,selection_command +1487,1133481,"train_dynamics.py",17446,0,"",python,selection_command +1488,1133710,"train_dynamics.py",17382,0,"",python,selection_command +1489,1133801,"train_dynamics.py",17332,0,"",python,selection_command +1490,1133940,"train_dynamics.py",17305,0,"",python,selection_command +1491,1134118,"train_dynamics.py",17255,0,"",python,selection_command +1492,1134194,"train_dynamics.py",17181,0,"",python,selection_command +1493,1134196,"TERMINAL",0,0,"338",,terminal_output +1494,1134346,"train_dynamics.py",17126,0,"",python,selection_command +1495,1134465,"train_dynamics.py",17056,0,"",python,selection_command +1496,1134663,"train_dynamics.py",17016,0,"",python,selection_command +1497,1134748,"train_dynamics.py",16985,0,"",python,selection_command +1498,1135174,"TERMINAL",0,0,"449",,terminal_output +1499,1136176,"TERMINAL",0,0,"5510",,terminal_output +1500,1136301,"train_dynamics.py",16929,0,"",python,selection_mouse +1501,1136448,"train_dynamics.py",16924,17,"tokens_full_frame",python,selection_mouse +1502,1136844,"train_dynamics.py",16989,0,"",python,selection_mouse +1503,1136994,"train_dynamics.py",16985,11,"lam_indices",python,selection_mouse +1504,1137223,"TERMINAL",0,0,"661",,terminal_output +1505,1138320,"TERMINAL",0,0,"772",,terminal_output +1506,1139368,"TERMINAL",0,0,"883",,terminal_output +1507,1140458,"TERMINAL",0,0,"994",,terminal_output +1508,1141419,"TERMINAL",0,0,"10:00105",,terminal_output +1509,1142523,"TERMINAL",0,0,"116",,terminal_output +1510,1142566,"train_dynamics.py",17913,0,"",python,selection_mouse +1511,1143514,"train_dynamics.py",17914,0,"",python,selection_command +1512,1143528,"TERMINAL",0,0,"227",,terminal_output +1513,1144151,"train_dynamics.py",17914,0,"i",python,content +1514,1144153,"train_dynamics.py",17915,0,"",python,selection_keyboard +1515,1144251,"train_dynamics.py",17915,0,"s",python,content +1516,1144253,"train_dynamics.py",17916,0,"",python,selection_keyboard +1517,1144331,"train_dynamics.py",17916,0," ",python,content +1518,1144332,"train_dynamics.py",17917,0,"",python,selection_keyboard +1519,1144620,"TERMINAL",0,0,"338",,terminal_output +1520,1144695,"train_dynamics.py",17916,0,"",python,selection_command +1521,1145606,"TERMINAL",0,0,"449",,terminal_output +1522,1145984,"train_dynamics.py",17032,0,"",python,selection_mouse +1523,1146139,"train_dynamics.py",17028,14,"use_gt_actions",python,selection_mouse +1524,1146792,"TERMINAL",0,0,"5621",,terminal_output +1525,1146893,"train_dynamics.py",17063,0,"",python,selection_mouse +1526,1147056,"train_dynamics.py",17060,11,"lam_indices",python,selection_mouse +1527,1147867,"TERMINAL",0,0,"772",,terminal_output +1528,1148726,"TERMINAL",0,0,"883",,terminal_output +1529,1149767,"TERMINAL",0,0,"994",,terminal_output +1530,1150841,"TERMINAL",0,0,"10205",,terminal_output +1531,1151986,"TERMINAL",0,0,"116",,terminal_output +1532,1153017,"TERMINAL",0,0,"227",,terminal_output +1533,1154098,"TERMINAL",0,0,"338",,terminal_output +1534,1155001,"TERMINAL",0,0,"449",,terminal_output +1535,1156125,"TERMINAL",0,0,"5530",,terminal_output +1536,1156550,"TERMINAL",0,0,"watch",,terminal_focus +1537,1157070,"TERMINAL",0,0,"661",,terminal_output +1538,1158212,"TERMINAL",0,0,"772",,terminal_output +1539,1159348,"TERMINAL",0,0,"883",,terminal_output +1540,1160313,"TERMINAL",0,0,"994",,terminal_output +1541,1161394,"TERMINAL",0,0,"20305",,terminal_output +1542,1162333,"TERMINAL",0,0,"116",,terminal_output +1543,1162965,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1544,1165835,"TERMINAL",0,0,"git status",,terminal_command +1545,1165847,"TERMINAL",0,0,"]633;COn branch gt-actions\r\nYour branch is up to date with 'origin/gt-actions'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: train_dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1546,1167120,"TERMINAL",0,0,"git diff",,terminal_command +1547,1167185,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/train_dynamics.py b/train_dynamics.py\r\nindex 62ed900..c9ad7bf 100644\r\n--- a/train_dynamics.py\r\n+++ b/train_dynamics.py\r\n@@ -482,12 +482,14 @@ def main(args: Args) -> None:\r\n \r\n # --- Evaluate full frame prediction (sampling) ---\r\n if args.eval_full_frame:\r\n- lam_indices = genie.vq_encode(inputs, training=False)\r\n tokenizer_outputs = genie.tokenizer.vq_encode(\r\n inputs[""videos""], training=False\r\n )\r\n tokens_full_frame = tokenizer_outputs[""indices""]\r\n- inputs[""latent_actions""] = lam_indices\r\n+ lam_indices = None\r\n+ if not args.use_gt_actions:\r\n+ lam_indices = genie.vq_encode(inputs, training=False)\r\n:",,terminal_output +1548,1167864,"TERMINAL",0,0,"\r+ inputs[""latent_actions""] = lam_indices\r\n:",,terminal_output +1549,1168005,"TERMINAL",0,0,"\r gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\r\n:",,terminal_output +1550,1168152,"TERMINAL",0,0,"\r inputs[""videos""] = gt[:, :-1].astype(\r\n:",,terminal_output +1551,1168295,"TERMINAL",0,0,"\r args.dtype\r\n:",,terminal_output +1552,1168435,"TERMINAL",0,0,"\r@@ -504,8 +506,9 @@ def main(args: Args) -> None:\r\n:",,terminal_output +1553,1168520,"TERMINAL",0,0,"\r ""token_logits"": logits_full_frame,\r\n:",,terminal_output +1554,1168696,"TERMINAL",0,0,"\r ""video_tokens"": tokens_full_frame,\r\n:",,terminal_output +1555,1168823,"TERMINAL",0,0,"\r ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\r\n:",,terminal_output +1556,1168984,"TERMINAL",0,0,"\r- ""lam_indices"": lam_indices,\r\n:",,terminal_output +1557,1169109,"TERMINAL",0,0,"\r }\r\n:",,terminal_output +1558,1169235,"TERMINAL",0,0,"\r+ if lam_indices is not None:\r\n:",,terminal_output +1559,1170088,"TERMINAL",0,0,"\r+ step_outputs[""lam_indices""] = lam_indices\r\n:\r loss_full_frame, metrics_full_frame = _calculate_step_metrics(\r\n:\r step_outputs, gt, args.num_actions, args.num_patch_latents\r\n:\r )\r\n:\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +1560,1170163,"TERMINAL",0,0,"\r\r(END)\r\r(END)\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1561,1196388,"TERMINAL",0,0,"git commit -am ""lam indices metrics handling for full frame val loss when using gt actions""",,terminal_command +1562,1196451,"TERMINAL",0,0,"]633;C",,terminal_output +1563,1199592,"TERMINAL",0,0,"black....................................................................",,terminal_output +1564,1199662,"TERMINAL",0,0,"g",,terminal_output +1565,1199771,"TERMINAL",0,0,"i",,terminal_output +1566,1199855,"TERMINAL",0,0,"t",,terminal_output +1567,1200068,"TERMINAL",0,0," ",,terminal_output +1568,1200431,"TERMINAL",0,0," ",,terminal_output +1569,1200558,"TERMINAL",0,0," ",,terminal_output +1570,1200742,"TERMINAL",0,0," ",,terminal_output +1571,1200980,"TERMINAL",0,0," ",,terminal_output +1572,1201045,"TERMINAL",0,0,"Passed\r\n",,terminal_output +1573,1201202,"TERMINAL",0,0,"[gt-actions 60693f0] lam indices metrics handling for full frame val loss when using gt actions\r\n 1 file changed, 6 insertions(+), 3 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1574,1202735,"TERMINAL",0,0,"git push",,terminal_command +1575,1202785,"TERMINAL",0,0,"]633;C",,terminal_output +1576,1204204,"TERMINAL",0,0,"Enumerating objects: 5, done.\r\nCounting objects: 20% (1/5)\rCounting objects: 40% (2/5)\rCounting objects: 60% (3/5)\rCounting objects: 80% (4/5)\rCounting objects: 100% (5/5)\rCounting objects: 100% (5/5), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 33% (1/3)\rCompressing objects: 66% (2/3)\rCompressing objects: 100% (3/3)\rCompressing objects: 100% (3/3), done.\r\nWriting objects: 33% (1/3)\rWriting objects: 66% (2/3)\rWriting objects: 100% (3/3)\rWriting objects: 100% (3/3), 436 bytes | 436.00 KiB/s, done.\r\nTotal 3 (delta 2), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1577,1204257,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +1578,1204502,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 52887b0..60693f0 gt-actions -> gt-actions\r\n",,terminal_output +1579,1204520,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1580,1242855,"TERMINAL",0,0,"git branch",,terminal_command +1581,1242903,"TERMINAL",0,0,"]633;C[?1h=\r action-mapper\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n:",,terminal_output +1582,1244476,"TERMINAL",0,0,"\r fix-transformer-forwardpass\r\n:",,terminal_output +1583,1244721,"TERMINAL",0,0,"\r fix/spatiotemporal-pe-once-in-STTransformer\r\n:",,terminal_output +1584,1244872,"TERMINAL",0,0,"\r generate-minatar-breakout-dataset\r\n:",,terminal_output +1585,1244964,"TERMINAL",0,0,"\r grad-norm-log-and-clip\r\n:",,terminal_output +1586,1245030,"TERMINAL",0,0,"\r grain-dataloader\r\n:",,terminal_output +1587,1245192,"TERMINAL",0,0,"\r* gt-actions\r\n:",,terminal_output +1588,1245322,"TERMINAL",0,0,"\r input_pipeline/add-npy2array_record\r\n:",,terminal_output +1589,1245447,"TERMINAL",0,0,"\r logging-variants\r\n:",,terminal_output +1590,1245619,"TERMINAL",0,0,"\r lr-schedules\r\n:",,terminal_output +1591,1245742,"TERMINAL",0,0,"\r main\r\n:",,terminal_output +1592,1245857,"TERMINAL",0,0,"\r maskgit-different-maskprob-per-sample\r\n:",,terminal_output +1593,1245991,"TERMINAL",0,0,"\r maskgit-sampling-iterative-unmasking-fix\r\n:",,terminal_output +1594,1248864,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1595,1250828,"TERMINAL",0,0,"git checkout generate-minatar-breakout-dataset",,terminal_command +1596,1250868,"TERMINAL",0,0,"]633;C",,terminal_output +1597,1250921,"TERMINAL",0,0,"Switched to branch 'generate-minatar-breakout-dataset'\r\nYour branch is ahead of 'origin/generate-minatar-breakout-dataset' by 10 commits.\r\n (use ""git push"" to publish your local commits)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1598,1251216,"train_dynamics.py",16790,5541," lam_indices = genie.vq_encode(inputs, training=False)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n }\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt, args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_loss_full_frame""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n # metrics[""lr""] = lr_schedule(step)\n # print(f""Step {step}, loss: {loss}"")\n",python,content +1599,1253674,"TERMINAL",0,0,"git stash pop",,terminal_command +1600,1253725,"TERMINAL",0,0,"]633;C",,terminal_output +1601,1253868,"TERMINAL",0,0,"On branch generate-minatar-breakout-dataset\r\nYour branch is ahead of 'origin/generate-minatar-breakout-dataset' by 10 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: input_pipeline/generate_breakout_dataset.py\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (5d253ef7d6b4e19475050c752ec7884cd530c0ef)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1602,1254076,"train_dynamics.py",16790,1062," tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices = None\n if not args.use_gt_actions:\n lam_indices = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n }\n if lam_indices is not None:\n step_outputs[""lam_indices""] = lam_indices\n\n",python,content +1603,1255384,"train_dynamics.py",0,0,"Switched from branch 'gt-actions' to 'generate-minatar-breakout-dataset'",python,git_branch_checkout +1604,1258429,"TERMINAL",0,0,"git diff",,terminal_command +1605,1258486,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/genie.py b/genie.py\r\nindex bcb23e6..7fa3f40 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -263,7 +263,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n else:\r\n assert self.lam is not None\r\n@@ -452,7 +452,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n:",,terminal_output +1606,1259353,"TERMINAL",0,0,"\r- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n:",,terminal_output +1607,1259465,"TERMINAL",0,0,"\r+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n:",,terminal_output +1608,1259664,"TERMINAL",0,0,"\r action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n:",,terminal_output +1609,1259790,"TERMINAL",0,0,"\r else:\r\n:",,terminal_output +1610,1259939,"TERMINAL",0,0,"\r assert self.lam is not None\r\n:",,terminal_output +1611,1260068,"TERMINAL",0,0,"\rdiff --git a/input_pipeline/generate_breakout_dataset.py b/input_pipeline/generate_breakout_dataset.py\r\n:",,terminal_output +1612,1260241,"TERMINAL",0,0,"\rindex 88928b9..4aa5586 100644\r\n:",,terminal_output +1613,1260389,"TERMINAL",0,0,"\r--- a/input_pipeline/generate_breakout_dataset.py\r\n:",,terminal_output +1614,1260510,"TERMINAL",0,0,"\r+++ b/input_pipeline/generate_breakout_dataset.py\r\n:",,terminal_output +1615,1261120,"TERMINAL",0,0,"\r@@ -117,8 +117,8 @@ def generate_episodes(num_episodes: int, split: str):\r\n:\r obs_chunks.extend(obs_chunks_data)\r\n:\r act_chunks.extend(act_chunks_data)\r\n:\r \r\n:",,terminal_output +1616,1261127,"TERMINAL",0,0,"\r- ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\r\n:",,terminal_output +1617,1261259,"TERMINAL",0,0,"\r- obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\r\n:\r+ ep_metadata, file_idx, obs_chunks, act_chunks = save_chunks(\r\n:\r+ file_idx, args.chunks_per_file, output_dir_split, obs_chunks, act_chunks\r\n:",,terminal_output +1618,1261500,"TERMINAL",0,0,"\r )\r\n:\r episode_metadata.extend(ep_metadata)\r\n:",,terminal_output +1619,1261733,"TERMINAL",0,0,"\r \r\n:",,terminal_output +1620,1261815,"TERMINAL",0,0,"\rdiff --git a/sample.py b/sample.py\r\n:",,terminal_output +1621,1261996,"TERMINAL",0,0,"\rindex b5d5a22..6cf11b4 100644\r\n:",,terminal_output +1622,1262115,"TERMINAL",0,0,"\r--- a/sample.py\r\n:\r+++ b/sample.py\r\n:",,terminal_output +1623,1262361,"TERMINAL",0,0,"\r@@ -237,7 +237,7 @@ if __name__ == ""__main__"":\r\n:",,terminal_output +1624,1262574,"TERMINAL",0,0,"\r if action_batch_E is not None:\r\n:",,terminal_output +1625,1262955,"TERMINAL",0,0,"\rM+++ b/input_pipeline/generate_breakout_dataset.py\r\n\r:",,terminal_output +1626,1263131,"TERMINAL",0,0,"\rM--- a/input_pipeline/generate_breakout_dataset.py\r\n\r:",,terminal_output +1627,1263255,"TERMINAL",0,0,"\rMindex 88928b9..4aa5586 100644\r\n\r:",,terminal_output +1628,1263619,"TERMINAL",0,0,"\rMdiff --git a/input_pipeline/generate_breakout_dataset.py b/input_pipeline/generate_breakout_dataset.py\r\n\r:",,terminal_output +1629,1266852,"TERMINAL",0,0,"\rM )\r\nM *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\nM latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\nM@@ -452,7 +452,7 @@ class Genie(nnx.Module):\r\nM assert self.lam is not None\r\nM else:\r\nM action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\nM+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\nM- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\nM )\r\nM *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\nM latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\nM@@ -263,7 +263,7 @@ class Genie(nnx.Module):\r\nM+++ b/genie.py\r\nM--- a/genie.py\r\nMindex bcb23e6..7fa3f40 100644\r\nMdiff --git a/genie.py b/genie.py\r\n\r:",,terminal_output +1630,1266976,"TERMINAL",0,0,"\r:\rNo next tag (press RETURN)",,terminal_output +1631,1267106,"TERMINAL",0,0,"\r:",,terminal_output +1632,1267738,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1633,1269858,"TERMINAL",0,0,"git add input_pipeline/generate_breakout_dataset.py",,terminal_command +1634,1269873,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1635,1271844,"TERMINAL",0,0,"git diff",,terminal_command +1636,1271903,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/genie.py b/genie.py\r\nindex bcb23e6..7fa3f40 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -263,7 +263,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n else:\r\n assert self.lam is not None\r\n@@ -452,7 +452,7 @@ class Genie(nnx.Module):\r\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\r\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\r\n )\r\n:",,terminal_output +1637,1273578,"TERMINAL",0,0,"\r- latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\r\n:",,terminal_output +1638,1274289,"TERMINAL",0,0,"\r+ latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\r\n:\r action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\r\n:\r else:\r\n:\r assert self.lam is not None\r\n:\rdiff --git a/sample.py b/sample.py\r\n:\rindex b5d5a22..6cf11b4 100644\r\n:\r--- a/sample.py\r\n:\r+++ b/sample.py\r\n:\r@@ -237,7 +237,7 @@ if __name__ == ""__main__"":\r\n:",,terminal_output +1639,1274360,"TERMINAL",0,0,"\r if action_batch_E is not None:\r\n:",,terminal_output +1640,1274468,"TERMINAL",0,0,"\r action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n:",,terminal_output +1641,1274667,"TERMINAL",0,0,"\r else:\r\n:",,terminal_output +1642,1274815,"TERMINAL",0,0,"\r- action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, 1:], -1)\r\n:",,terminal_output +1643,1275463,"TERMINAL",0,0,"\r+ action_batch_BSm11 = jnp.expand_dims(batch[""actions""][:, :-1], -1)\r\n:\r for t, img in enumerate(imgs[1:]):\r\n:\r d = ImageDraw.Draw(img)\r\n:\r for row in range(B):\r\n:\rdiff --git a/train_dynamics.py b/train_dynamics.py\r\n:\rindex b34ba98..ad89aae 100644\r\n:\r--- a/train_dynamics.py\r\n:",,terminal_output +1644,1276450,"TERMINAL",0,0,"\r+++ b/train_dynamics.py\r\n:",,terminal_output +1645,1276941,"TERMINAL",0,0,"\r@@ -482,12 +482,14 @@ def main(args: Args) -> None:\r\n:",,terminal_output +1646,1277111,"TERMINAL",0,0,"\r \r\n:",,terminal_output +1647,1277362,"TERMINAL",0,0,"\r # --- Evaluate full frame prediction (sampling) ---\r\n:",,terminal_output +1648,1277573,"TERMINAL",0,0,"\r if args.eval_full_frame:\r\n:",,terminal_output +1649,1278023,"TERMINAL",0,0,"\r- lam_indices = genie.vq_encode(inputs, training=False)\r\n:",,terminal_output +1650,1278442,"TERMINAL",0,0,"\r tokenizer_outputs = genie.tokenizer.vq_encode(\r\n:\r inputs[""videos""], training=False\r\n:\r )\r\n:\r tokens_full_frame = tokenizer_outputs[""indices""]\r\n:\r- inputs[""latent_actions""] = lam_indices\r\n:\r+ lam_indices = None\r\n:\r+ if not args.use_gt_actions:\r\n:\r+ lam_indices = genie.vq_encode(inputs, training=False)\r\n:\r+ inputs[""latent_actions""] = lam_indices\r\n:\r gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\r\n:\r inputs[""videos""] = gt[:, :-1].astype(\r\n:\r args.dtype\r\n:\r@@ -504,8 +506,10 @@ def main(args: Args) -> None:\r\n:\r ""token_logits"": logits_full_frame,\r\n:",,terminal_output +1651,1279247,"TERMINAL",0,0,"\r ""video_tokens"": tokens_full_frame,\r\n:",,terminal_output +1652,1279809,"TERMINAL",0,0,"\r ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\r\n:\r- ""lam_indices"": lam_indices,\r\n:\r }\r\n:\r+ if lam_indices is not None:\r\n:\r+ step_outputs[""lam_indices""] = lam_indices\r\n:\r+\r\n:\r loss_full_frame, metrics_full_frame = _calculate_step_metrics(\r\n:\r step_outputs, gt, args.num_actions, args.num_patch_latents\r\n:\r )\r\n:",,terminal_output +1653,1280090,"TERMINAL",0,0,"\rdiff --git a/train_lam.py b/train_lam.py\r\n:\rindex 7fe605d..f1913dd 100644\r\n:\r--- a/train_lam.py\r\n:\r+++ b/train_lam.py\r\n:\r@@ -479,8 +479,8 @@ def main(args: Args) -> None:\r\n:\r )\r\n:\r if step == first_step:\r\n:\r print_mem_stats(""After params initialized"")\r\n:\r- metrics[""lr""] = lr_schedule(step)\r\n:",,terminal_output +1654,1280213,"TERMINAL",0,0,"\r- print(f""Step {step}, loss: {loss}"")\r\n:",,terminal_output +1655,1280394,"TERMINAL",0,0,"\r+ # metrics[""lr""] = lr_schedule(step)\r\n:",,terminal_output +1656,1280475,"TERMINAL",0,0,"\r+ # print(f""Step {step}, loss: {loss}"")\r\n:",,terminal_output +1657,1281027,"TERMINAL",0,0,"\r step += 1\r\n:",,terminal_output +1658,1281184,"TERMINAL",0,0,"\r \r\n:",,terminal_output +1659,1281489,"TERMINAL",0,0,"\r # --- Validation loss ---\r\n:",,terminal_output +1660,1281913,"TERMINAL",0,0,"\rdiff --git a/train_tokenizer.py b/train_tokenizer.py\r\n:",,terminal_output +1661,1281993,"TERMINAL",0,0,"\rindex a2adcb6..2ba875d 100644\r\n:\r--- a/train_tokenizer.py\r\n:\r+++ b/train_tokenizer.py\r\n:",,terminal_output +1662,1282594,"TERMINAL",0,0,"\r@@ -455,8 +455,8 @@ def main(args: Args) -> None:\r\n:\r loss, recon, metrics = train_step(optimizer, batch)\r\n:\r if step == first_step:\r\n:\r print_mem_stats(""After params initialized"")\r\n:\r- metrics[""lr""] = lr_schedule(step)\r\n:\r- print(f""Step {step}, loss: {loss}"")\r\n:\r+ # metrics[""lr""] = lr_schedule(step)\r\n:\r+ # print(f""Step {step}, loss: {loss}"")\r\n:\r step += 1\r\n:\r \r\n:\r # --- Validation loss ---\r\n:\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +1663,1282595,"TERMINAL",0,0,"\r\r(END)\r\r(END)\r\r(END)",,terminal_output +1664,1283565,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1665,1294839,"TERMINAL",0,0,"git commit -m ""datagen refactor""",,terminal_command +1666,1294887,"TERMINAL",0,0,"]633;C",,terminal_output +1667,1295434,"TERMINAL",0,0,"[WARNING] Unstaged files detected.\r\n[INFO] Stashing unstaged files to /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cache/pre-commit/patch1758625954-3121105.\r\n",,terminal_output +1668,1295573,"train_dynamics.py",16790,1196," lam_indices = genie.vq_encode(inputs, training=False)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n }\n",python,content +1669,1295579,"TERMINAL",0,0,"black....................................................................",,terminal_output +1670,1295897,"TERMINAL",0,0,"Passed\r\n[INFO] Restored changes from /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cache/pre-commit/patch1758625954-3121105.\r\n",,terminal_output +1671,1295981,"TERMINAL",0,0,"[generate-minatar-breakout-dataset 7d3fc59] datagen refactor\r\n 1 file changed, 2 insertions(+), 2 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1672,1296137,"train_dynamics.py",16790,1062," tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices = None\n if not args.use_gt_actions:\n lam_indices = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n }\n if lam_indices is not None:\n step_outputs[""lam_indices""] = lam_indices\n\n",python,content +1673,1299877,"TERMINAL",0,0,"git push",,terminal_command +1674,1299927,"TERMINAL",0,0,"]633;C",,terminal_output +1675,1300410,"TERMINAL",0,0,"g",,terminal_output +1676,1300477,"TERMINAL",0,0,"i",,terminal_output +1677,1300555,"TERMINAL",0,0,"t",,terminal_output +1678,1300666,"TERMINAL",0,0," ",,terminal_output +1679,1300815,"TERMINAL",0,0,"a",,terminal_output +1680,1300939,"TERMINAL",0,0,"t",,terminal_output +1681,1301098,"TERMINAL",0,0,"s",,terminal_output +1682,1301318,"TERMINAL",0,0,"Enumerating objects: 12, done.\r\nCounting objects: 8% (1/12)\rCounting objects: 16% (2/12)\rCounting objects: 25% (3/12)\rCounting objects: 33% (4/12)\rCounting objects: 41% (5/12)\rCounting objects: 50% (6/12)\rCounting objects: 58% (7/12)\rCounting objects: 66% (8/12)\rCounting objects: 75% (9/12)\rCounting objects: 83% (10/12)\rCounting objects: 91% (11/12)\rCounting objects: 100% (12/12)\rCounting objects: 100% (12/12), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 14% (1/7)\rCompressing objects: 28% (2/7)\rCompressing objects: 42% (3/7)\rCompressing objects: 57% (4/7)\rCompressing objects: 71% (5/7)\rCompressing objects: 85% (6/7)\rCompressing objects: 100% (7/7)\rCompressing objects: 100% (7/7), done.\r\nWriting objects: 14% (1/7)\rWriting objects: 28% (2/7)\rWriting objects: 42% (3/7)\rWriting objects: 57% (4/7)\rWriting objects: 71% (5/7)\rWriting objects: 85% (6/7)\rWriting objects: 100% (7/7)\rWriting objects: 100% (7/7), 745 bytes | 745.00 KiB/s, done.\r\nTotal 7 (delta 5), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1683,1301429,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/5)\rremote: Resolving deltas: 20% (1/5)\rremote: Resolving deltas: 40% (2/5)\rremote: Resolving deltas: 60% (3/5)\rremote: Resolving deltas: 80% (4/5)\rremote: Resolving deltas: 100% (5/5)\rremote: Resolving deltas: 100% (5/5), completed with 3 local objects.\r\n",,terminal_output +1684,1301648,"TERMINAL",0,0," To github.com:p-doom/jasmine.git\r\n 590c2b8..7d3fc59 generate-minatar-breakout-dataset -> generate-minatar-breakout-dataset\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1685,1304622,"TERMINAL",0,0,"git status",,terminal_command +1686,1304642,"TERMINAL",0,0,"]633;COn branch generate-minatar-breakout-dataset\r\nYour branch is up to date with 'origin/generate-minatar-breakout-dataset'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1687,1309198,"TERMINAL",0,0,"git stash",,terminal_command +1688,1309230,"TERMINAL",0,0,"]633;C",,terminal_output +1689,1309342,"TERMINAL",0,0,"Saved working directory and index state WIP on generate-minatar-breakout-dataset: 7d3fc59 datagen refactor\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1690,1309647,"train_dynamics.py",16790,1196," lam_indices = genie.vq_encode(inputs, training=False)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n }\n",python,content +1691,1313165,"TERMINAL",0,0,"git merge gt-actions",,terminal_command +1692,1313245,"TERMINAL",0,0,"]633;Chint: Waiting for your editor to close the file... ",,terminal_output +1693,1313445,"TERMINAL",0,0,"[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""~/Projects/jasmine/.git/MERGE_MSG"" 6L, 291B▽ Pzz\[0%m [>c]10;?]11;?Merge branch 'gt-actions' into generate-minatar-breakout-dataset\r\n# Please enter a commit message to explain why this merge is necessary,# especially if it merges an updated upstream into a topic branch.#\r\n# Lines starting with '#' will be ignored, and an empty message aborts\r\n# the commit.\r\n~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ 1,1All[?25hP+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +1694,1313504,"train_dynamics.py",16790,5412," tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices = None\n if not args.use_gt_actions:\n lam_indices = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n }\n if lam_indices is not None:\n step_outputs[""lam_indices""] = lam_indices\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt, args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_loss_full_frame""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n",python,content +1695,1315099,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +1696,1315199,"TERMINAL",0,0,"w",,terminal_output +1697,1315265,"TERMINAL",0,0,"q",,terminal_output +1698,1315605,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m"".git/MERGE_MSG"" 6L, 291B written\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l\r",,terminal_output +1699,1315664,"TERMINAL",0,0,"Merge made by the 'ort' strategy.\r\n",,terminal_output +1700,1315678,"TERMINAL",0,0," genie.py | 4 ++--\r\n sample.py | 2 +-\r\n train_dynamics.py | 13 ++++++++-----\r\n 3 files changed, 11 insertions(+), 8 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1701,1557072,"TERMINAL",0,0,"python",,terminal_command +1702,1557119,"TERMINAL",0,0,"]633;C",,terminal_output +1703,1557354,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +1704,1557830,"TERMINAL",0,0,">>> ",,terminal_output +1705,1558201,"TERMINAL",0,0,"1",,terminal_output +1706,1558266,"TERMINAL",0,0,"0",,terminal_output +1707,1558653,"TERMINAL",0,0,"_",,terminal_output +1708,1559127,"TERMINAL",0,0,"0",,terminal_output +1709,1559228,"TERMINAL",0,0,"0",,terminal_output +1710,1559391,"TERMINAL",0,0,"0",,terminal_output +1711,1561013,"TERMINAL",0,0,"\r",,terminal_output +1712,1561774,"TERMINAL",0,0,"0",,terminal_output +1713,1562100,"TERMINAL",0,0,"_",,terminal_output +1714,1562383,"TERMINAL",0,0,"0",,terminal_output +1715,1562513,"TERMINAL",0,0,"0",,terminal_output +1716,1562579,"TERMINAL",0,0,"0",,terminal_output +1717,1563870,"TERMINAL",0,0," ",,terminal_output +1718,1564031,"TERMINAL",0,0,"*",,terminal_output +1719,1564084,"TERMINAL",0,0," ",,terminal_output +1720,1567657,"TERMINAL",0,0,"\r",,terminal_output +1721,1567799,"TERMINAL",0,0,"\r",,terminal_output +1722,1567956,"TERMINAL",0,0,"\r",,terminal_output +1723,1568756,"TERMINAL",0,0,"\r\n10000000\r\n>>> ",,terminal_output +1724,1572974,"TERMINAL",0,0,"2",,terminal_output +1725,1573039,"TERMINAL",0,0,"0",,terminal_output +1726,1573174,"TERMINAL",0,0,"0",,terminal_output +1727,1574470,"TERMINAL",0,0,"_",,terminal_output +1728,1574762,"TERMINAL",0,0,"0",,terminal_output +1729,1574879,"TERMINAL",0,0,"0",,terminal_output +1730,1574987,"TERMINAL",0,0,"0",,terminal_output +1731,1575617,"TERMINAL",0,0," ",,terminal_output +1732,1575750,"TERMINAL",0,0,"*",,terminal_output +1733,1575815,"TERMINAL",0,0," ",,terminal_output +1734,1576560,"TERMINAL",0,0,"48",,terminal_output +1735,1577677,"TERMINAL",0,0," ",,terminal_output +1736,1577801,"TERMINAL",0,0,"*",,terminal_output +1737,1577976,"TERMINAL",0,0," ",,terminal_output +1738,1578326,"TERMINAL",0,0,"1",,terminal_output +1739,1578440,"TERMINAL",0,0,"6",,terminal_output +1740,1579687,"TERMINAL",0,0,"",,terminal_output +1741,1579832,"TERMINAL",0,0,"",,terminal_output +1742,1579951,"TERMINAL",0,0,"",,terminal_output +1743,1580156,"TERMINAL",0,0,"",,terminal_output +1744,1580296,"TERMINAL",0,0,"",,terminal_output +1745,1580706,"TERMINAL",0,0,"\r ",,terminal_output +1746,1580831,"TERMINAL",0,0,"\r ",,terminal_output +1747,1581626,"TERMINAL",0,0,"\r[1@3 ",,terminal_output +1748,1581973,"TERMINAL",0,0,"\r[1@6 ",,terminal_output +1749,1582478,"TERMINAL",0,0," ",,terminal_output +1750,1582680,"TERMINAL",0,0,"*",,terminal_output +1751,1582833,"TERMINAL",0,0," ",,terminal_output +1752,1582990,"TERMINAL",0,0,"1",,terminal_output +1753,1583149,"TERMINAL",0,0,"6",,terminal_output +1754,1584172,"TERMINAL",0,0,"\r\n115200000\r\n>>> ",,terminal_output +1755,1586474,"TERMINAL",0,0,"\r>>> 200_000 * 36 * 16",,terminal_output +1756,1588559,"TERMINAL",0,0," ",,terminal_output +1757,1589624,"TERMINAL",0,0,"\r",,terminal_output +1758,1590051,"TERMINAL",0,0,")",,terminal_output +1759,1590444,"TERMINAL",0,0,"",,terminal_output +1760,1590607,"TERMINAL",0,0,"",,terminal_output +1761,1590842,"TERMINAL",0,0,"",,terminal_output +1762,1591044,"TERMINAL",0,0,"",,terminal_output +1763,1591216,"TERMINAL",0,0,"",,terminal_output +1764,1591428,"TERMINAL",0,0,"",,terminal_output +1765,1592084,"TERMINAL",0,0,"\r>>> [1@(2",,terminal_output +1766,1592563,"TERMINAL",0,0,"",,terminal_output +1767,1592690,"TERMINAL",0,0," *",,terminal_output +1768,1592884,"TERMINAL",0,0," 36",,terminal_output +1769,1593028,"TERMINAL",0,0," *",,terminal_output +1770,1593247,"TERMINAL",0,0," 16",,terminal_output +1771,1593410,"TERMINAL",0,0,")",,terminal_output +1772,1593830,"TERMINAL",0,0," ",,terminal_output +1773,1594397,"TERMINAL",0,0,"/",,terminal_output +1774,1594624,"TERMINAL",0,0," ",,terminal_output +1775,1595649,"TERMINAL",0,0,"1",,terminal_output +1776,1595715,"TERMINAL",0,0,"0",,terminal_output +1777,1595955,"TERMINAL",0,0,"_",,terminal_output +1778,1596268,"TERMINAL",0,0,"0",,terminal_output +1779,1596345,"TERMINAL",0,0,"0",,terminal_output +1780,1596464,"TERMINAL",0,0,"0",,terminal_output +1781,1596775,"TERMINAL",0,0,"_",,terminal_output +1782,1596997,"TERMINAL",0,0,"0",,terminal_output +1783,1597111,"TERMINAL",0,0,"0",,terminal_output +1784,1597607,"TERMINAL",0,0,"0",,terminal_output +1785,1599325,"TERMINAL",0,0,"\r\n11.52\r\n>>> ",,terminal_output +1786,2645810,"TERMINAL",0,0,"srun",,terminal_focus +1787,2646195,"TERMINAL",0,0,"q",,terminal_output +1788,2646320,"TERMINAL",0,0,"u",,terminal_output +1789,2646388,"TERMINAL",0,0,"e",,terminal_output +1790,2646471,"TERMINAL",0,0,"u",,terminal_output +1791,2646537,"TERMINAL",0,0,"e",,terminal_output +1792,2646698,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Tue Sep 23 13:35:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512808 dev_accel interact tum_cte0 R34:16\t 1 hkn04013512651 large preproce tum_cte0 R 1:31:21\t 1 hkn1901",,terminal_output +1793,2647722,"TERMINAL",0,0,"772",,terminal_output +1794,2647792,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1795,2675535,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=4 \\n --start_frame=1 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit",shellscript,tab +1796,2682609,"TERMINAL",0,0,"python",,terminal_focus +1797,2683644,"TERMINAL",0,0,"^D\r\n",,terminal_output +1798,2683662,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1799,2698770,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,0,"",shellscript,selection_mouse +1800,2698854,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,1,"h",shellscript,selection_mouse +1801,2698855,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,100,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n",shellscript,selection_mouse +1802,2699018,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,101,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n",shellscript,selection_mouse +1803,2699213,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,100,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n",shellscript,selection_mouse +1804,2699679,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,101,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n",shellscript,selection_mouse +1805,2699822,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,135,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from checkpoint: $",shellscript,selection_mouse +1806,2699858,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,131,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from checkpoin",shellscript,selection_mouse +1807,2699859,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,126,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from chec",shellscript,selection_mouse +1808,2699886,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,123,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from c",shellscript,selection_mouse +1809,2699887,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,122,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n\necho ""Sampling from ",shellscript,selection_mouse +1810,2699912,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,101,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n\n",shellscript,selection_mouse +1811,2699996,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,100,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285\n",shellscript,selection_mouse +1812,2700070,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,33,"hkfs/work/workspace/scratch/tum_i",shellscript,selection_mouse +1813,2700071,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",632,131,"\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\nCHECKPOINT_PATH=/",shellscript,selection_mouse +1814,2700247,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",745,18,"\nCHECKPOINT_PATH=/",shellscript,selection_mouse +1815,2700423,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,99,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3500285",shellscript,selection_mouse +1816,2701146,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,99,"",shellscript,content +1817,2701173,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",762,0,"",shellscript,selection_command +1818,2701642,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,0,"\n",shellscript,content +1819,2702390,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",763,1,"",shellscript,content +1820,2702549,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",762,1,"",shellscript,content +1821,2702928,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",762,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647",shellscript,content +1822,2716915,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1099,0,"",shellscript,selection_mouse +1823,2722388,"TERMINAL",0,0,"srun",,terminal_focus +1824,2723130,"TERMINAL",0,0,"s",,terminal_output +1825,2723196,"TERMINAL",0,0,"h",,terminal_output +1826,2723262,"TERMINAL",0,0," ",,terminal_output +1827,2723550,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +1828,2723763,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch\r\n[?2004l\r",,terminal_output +1829,2723887,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +1830,2724038,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1831,2738036,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 151, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1666, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 945, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'model': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}, 'drop': {'rngs': {'default': {'count': {'value': ShapeDtypeStruct(shape=(), dtype=uint32)}, 'key': {'value': ShapeDtypeStruct(shape=(), dtype=key)}}}}}}, rhs=None), 'action_embed': Diff(lhs=None, rhs={'embedding': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}})}, 'opt_state': {'0': {'mu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=bfloat16)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=bfloat16)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=bfloat16)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=bfloat16)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=bfloat16)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=bfloat16)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=bfloat16)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=bfloat16)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=bfloat16)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=bfloat16)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=bfloat16)}}}, rhs=None), 'action_embed': Diff(lhs=None, rhs={'embedding': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}})}, 'nu': {'lam': Diff(lhs={'action_in': {'value': ShapeDtypeStruct(shape=(1, 1, 1, 768), dtype=float32)}, 'action_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(32, 512), dtype=float32)}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '1': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '2': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}, '3': {'ffn_dense1': {'bias': {'value': ShapeDtypeStruct(shape=(2048,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 2048), dtype=float32)}}, 'ffn_dense2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(2048, 512), dtype=float32)}}, 'ffn_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'spatial_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'spatial_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'temporal_attention': {'key': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'out': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(8, 64, 512), dtype=float32)}}, 'query': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}, 'value': {'bias': {'value': ShapeDtypeStruct(shape=(8, 64), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 8, 64), dtype=float32)}}}, 'temporal_norm': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}}}, 'input_dense': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'input_norm1': {'bias': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(768,), dtype=float32)}}, 'input_norm2': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'scale': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}}, 'output_dense': {'bias': {'value': ShapeDtypeStruct(shape=(32,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(512, 32), dtype=float32)}}}, 'patch_up': {'bias': {'value': ShapeDtypeStruct(shape=(512,), dtype=float32)}, 'kernel': {'value': ShapeDtypeStruct(shape=(768, 512), dtype=float32)}}, 'vq': {'codebook': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}}}, rhs=None), 'action_embed': Diff(lhs=None, rhs={'embedding': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}})}}}}\r\n",,terminal_output +1832,2738579,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1833,2746923,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +1834,2748747,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +1835,2750121,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1122,0,"",shellscript,selection_command +1836,2750352,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1122,0,"\n ",shellscript,content +1837,2750884,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1127,0,"-",shellscript,content +1838,2750885,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1128,0,"",shellscript,selection_keyboard +1839,2751026,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1128,0,"-",shellscript,content +1840,2751027,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1129,0,"",shellscript,selection_keyboard +1841,2751642,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1129,0,"u",shellscript,content +1842,2751643,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,0,"",shellscript,selection_keyboard +1843,2751822,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,0,"y",shellscript,content +1844,2751823,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1131,0,"",shellscript,selection_keyboard +1845,2752400,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,1,"",shellscript,content +1846,2752733,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,0,"s",shellscript,content +1847,2752734,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1131,0,"",shellscript,selection_keyboard +1848,2752880,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1131,0,"e",shellscript,content +1849,2752881,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1132,0,"",shellscript,selection_keyboard +1850,2753020,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1132,0,"_",shellscript,content +1851,2753021,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1133,0,"",shellscript,selection_keyboard +1852,2753164,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1133,0,"g",shellscript,content +1853,2753165,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1134,0,"",shellscript,selection_keyboard +1854,2753315,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1134,0,"t",shellscript,content +1855,2753315,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1135,0,"",shellscript,selection_keyboard +1856,2753408,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1135,0,"_",shellscript,content +1857,2753409,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1136,0,"",shellscript,selection_keyboard +1858,2753574,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1136,0,"a",shellscript,content +1859,2753575,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1137,0,"",shellscript,selection_keyboard +1860,2753689,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1137,0,"c",shellscript,content +1861,2753690,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1138,0,"",shellscript,selection_keyboard +1862,2753873,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1138,0,"t",shellscript,content +1863,2753874,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1139,0,"",shellscript,selection_keyboard +1864,2753992,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1139,0,"i",shellscript,content +1865,2753993,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1140,0,"",shellscript,selection_keyboard +1866,2754057,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1140,0,"o",shellscript,content +1867,2754058,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1141,0,"",shellscript,selection_keyboard +1868,2754330,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1141,0,"s",shellscript,content +1869,2754331,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1142,0,"",shellscript,selection_keyboard +1870,2754727,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1141,1,"",shellscript,content +1871,2754898,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1141,0,"n",shellscript,content +1872,2754899,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1142,0,"",shellscript,selection_keyboard +1873,2754974,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1142,0,"s",shellscript,content +1874,2754975,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1143,0,"",shellscript,selection_keyboard +1875,2755127,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1143,0," ",shellscript,content +1876,2755128,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1144,0,"",shellscript,selection_keyboard +1877,2755291,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1144,0,"\",shellscript,content +1878,2755292,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1145,0,"",shellscript,selection_keyboard +1879,2755632,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1144,0,"",shellscript,selection_command +1880,2757102,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +1881,2757309,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1882,2757421,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +1883,2757543,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1884,2765911,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1885,2768291,"TERMINAL",0,0,"2025-09-23 13:37:07.661014: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1886,2776153,"TERMINAL",0,0,"2025-09-23 13:37:15.496105: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1887,2778167,"TERMINAL",0,0,"2025-09-23 13:37:17.524558: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1888,2790189,"TERMINAL",0,0,"2025-09-23 13:37:29.462643: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1889,2800056,"TERMINAL",0,0,"SSIM: 0.017371613532304764\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 240, in \r\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 2016, in reshape\r\n return a.reshape(shape, order=order) # type: ignore[call-overload,union-attr]\r\nValueError: cannot reshape array of size 60 into shape (4,0,1)\r\n",,terminal_output +1890,2801064,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1891,3637129,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +1892,3646666,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +1893,3646670,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7641,0,"",python,selection_command +1894,3649215,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7712,0,"",python,selection_mouse +1895,3649379,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7712,1,"S",python,selection_mouse +1896,3651800,"TERMINAL",0,0,"bash",,terminal_focus +1897,3652521,"TERMINAL",0,0,"queue",,terminal_command +1898,3652601,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:51:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3511158 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3512808 dev_accel interact tum_cte0 R51:01\t 1 hkn04013512651 large preproce tum_cte0 R 1:48:06\t 1 hkn1901",,terminal_output +1899,3653640,"TERMINAL",0,0,"238",,terminal_output +1900,3653742,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1901,3654983,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"",python,tab +1902,3656149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7716,0,"",python,selection_mouse +1903,3657924,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7663,0,"",python,selection_mouse +1904,3658205,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7663,1,"S",python,selection_mouse +1905,3658243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7663,2,"Sm",python,selection_mouse +1906,3658290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7663,3,"Sm1",python,selection_mouse +1907,3662618,"TERMINAL",0,0,"srun",,terminal_focus +1908,3678018,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"",python,tab +1909,3679391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7640,0,"",python,selection_mouse +1910,3679425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7639,0,"",python,selection_command +1911,3680147,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7640,0,"\n ",python,content +1912,3681626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7649,0,"b",python,content +1913,3681627,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7650,0,"",python,selection_keyboard +1914,3681721,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7650,0,"r",python,content +1915,3681723,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7651,0,"",python,selection_keyboard +1916,3681907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7651,0,"e",python,content +1917,3681909,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7652,0,"",python,selection_keyboard +1918,3682088,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7652,0,"a",python,content +1919,3682090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7653,0,"",python,selection_keyboard +1920,3682160,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7653,0,"k",python,content +1921,3682161,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7654,0,"",python,selection_keyboard +1922,3682523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7654,0,"p",python,content +1923,3682524,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7655,0,"",python,selection_keyboard +1924,3682813,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7655,0,"o",python,content +1925,3682814,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7656,0,"",python,selection_keyboard +1926,3683009,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7656,0,"i",python,content +1927,3683011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7657,0,"",python,selection_keyboard +1928,3683093,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7657,0,"n",python,content +1929,3683094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7658,0,"",python,selection_keyboard +1930,3683196,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7658,0,"t",python,content +1931,3683198,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7659,0,"",python,selection_keyboard +1932,3683839,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7659,0,"()",python,content +1933,3683840,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7660,0,"",python,selection_keyboard +1934,3683883,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7660,1,")",python,content +1935,3683884,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7661,0,"",python,selection_keyboard +1936,3686423,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1937,3686584,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +1938,3686698,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1939,3695196,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1940,3697464,"TERMINAL",0,0,"2025-09-23 13:52:36.825826: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1941,3704814,"TERMINAL",0,0,"2025-09-23 13:52:44.152239: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1942,3706747,"TERMINAL",0,0,"2025-09-23 13:52:46.117865: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1943,3718604,"TERMINAL",0,0,"2025-09-23 13:52:57.975303: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1944,3728476,"TERMINAL",0,0,"SSIM: 0.017371613532304764\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py(241)()\r\n-> action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\r\n",,terminal_output +1945,3734812,"TERMINAL",0,0,"batch[""actions""]",,terminal_output +1946,3735141,"TERMINAL",0,0,".",,terminal_output +1947,3735453,"TERMINAL",0,0,"s",,terminal_output +1948,3735620,"TERMINAL",0,0,"h",,terminal_output +1949,3735673,"TERMINAL",0,0,"a",,terminal_output +1950,3735880,"TERMINAL",0,0,"p",,terminal_output +1951,3736004,"TERMINAL",0,0,"e",,terminal_output +1952,3736071,"TERMINAL",0,0,"\r\n(Pdb) (4, 16)\r\n",,terminal_output +1953,3747955,"TERMINAL",0,0,"jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))",,terminal_output +1954,3748279,"TERMINAL",0,0,"\r\n(Pdb) *** ValueError: cannot reshape array of size 60 into shape (4,0,1)\r\n",,terminal_output +1955,3755683,"TERMINAL",0,0,"batch[""actions""][:, :-1]",,terminal_output +1956,3756031,"TERMINAL",0,0,".",,terminal_output +1957,3756231,"TERMINAL",0,0,"s",,terminal_output +1958,3756476,"TERMINAL",0,0,"h",,terminal_output +1959,3756724,"TERMINAL",0,0,"a",,terminal_output +1960,3756880,"TERMINAL",0,0,"p",,terminal_output +1961,3757005,"TERMINAL",0,0,"e",,terminal_output +1962,3757357,"TERMINAL",0,0,"\r\n(Pdb) (4, 15)\r\n",,terminal_output +1963,3762818,"TERMINAL",0,0,"S",,terminal_output +1964,3765462,"TERMINAL",0,0,"\r\n(Pdb) 1\r\n",,terminal_output +1965,3767673,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py",0,0,"",python,tab +1966,3768552,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"",python,tab +1967,3770837,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7548,0,"",python,selection_mouse +1968,3771373,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7506,0,"",python,selection_mouse +1969,3771519,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,5,"batch",python,selection_mouse +1970,3771684,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,6,"batch[",python,selection_mouse +1971,3771684,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,13,"batch[""videos",python,selection_mouse +1972,3772015,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,14,"batch[""videos""",python,selection_mouse +1973,3772074,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,15,"batch[""videos""]",python,selection_mouse +1974,3776582,"TERMINAL",0,0,"batch[""videos""]",,terminal_output +1975,3776991,"TERMINAL",0,0,".",,terminal_output +1976,3777176,"TERMINAL",0,0,"s",,terminal_output +1977,3777287,"TERMINAL",0,0,"h",,terminal_output +1978,3777426,"TERMINAL",0,0,"a",,terminal_output +1979,3777549,"TERMINAL",0,0,"pe",,terminal_output +1980,3777655,"TERMINAL",0,0,"\r\n(Pdb) (4, 1, 64, 64, 3)\r\n",,terminal_output +1981,3789202,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"",python,tab +1982,3792960,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7505,0,"",python,selection_mouse +1983,3793069,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,5,"batch",python,selection_mouse +1984,3826637,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7490,0,"",python,selection_mouse +1985,3833500,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6127,0,"",python,selection_mouse +1986,3833652,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6126,5,"batch",python,selection_mouse +1987,3839615,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6187,0,"",python,selection_mouse +1988,3839782,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6182,6,"latent",python,selection_mouse +1989,3840489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6188,0,"",python,selection_mouse +1990,3840928,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6137,0,"",python,selection_mouse +1991,3841149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",6133,6,"videos",python,selection_mouse +1992,3864554,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7504,0,"",python,selection_mouse +1993,3864719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7502,5,"batch",python,selection_mouse +1994,3905107,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7531,0,"",python,selection_mouse +1995,3906142,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,0,"",python,selection_mouse +1996,3906282,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,1," ",python,selection_mouse +1997,3906331,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,2," S",python,selection_mouse +1998,3906535,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,3," S,",python,selection_mouse +1999,3906536,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,4," S, ",python,selection_mouse +2000,3906536,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,5," S, _",python,selection_mouse +2001,3906590,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,6," S, _,",python,selection_mouse +2002,3906591,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,7," S, _, ",python,selection_mouse +2003,3906623,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,8," S, _, _",python,selection_mouse +2004,3906704,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,9," S, _, _,",python,selection_mouse +2005,3906710,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,10," S, _, _, ",python,selection_mouse +2006,3906927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,11," S, _, _, _",python,selection_mouse +2007,3907466,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7488,11,"",python,content +2008,3908238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7487,1,"",python,content +2009,3910496,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7511,0,"[]",python,content +2010,3910497,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7512,0,"",python,selection_keyboard +2011,3910674,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7512,0,"0",python,content +2012,3910675,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7513,0,"",python,selection_keyboard +2013,3910827,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7513,1,"]",python,content +2014,3910828,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7514,0,"",python,selection_keyboard +2015,3916255,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7612,0,"",python,selection_mouse +2016,3916600,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7611,1,"",python,content +2017,3916893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7611,0,"a",python,content +2018,3916895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7612,0,"",python,selection_keyboard +2019,3917211,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7612,0,"r",python,content +2020,3917212,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7613,0,"",python,selection_keyboard +2021,3917375,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7613,0,"g",python,content +2022,3917377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7614,0,"",python,selection_keyboard +2023,3917538,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7614,0,"s",python,content +2024,3917540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7615,0,"",python,selection_keyboard +2025,3917980,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7615,0,".",python,content +2026,3917982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7616,0,"",python,selection_keyboard +2027,3918356,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7616,0,"s",python,content +2028,3918358,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7617,0,"",python,selection_keyboard +2029,3918552,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7617,0,"e",python,content +2030,3918553,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7618,0,"",python,selection_keyboard +2031,3918767,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7618,0,"q",python,content +2032,3918768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7619,0,"",python,selection_keyboard +2033,3919379,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7619,0,"_",python,content +2034,3919381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7620,0,"",python,selection_keyboard +2035,3919657,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7620,0,"l",python,content +2036,3919658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7621,0,"",python,selection_keyboard +2037,3919793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7621,0,"e",python,content +2038,3919795,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7622,0,"",python,selection_keyboard +2039,3919932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7622,0,"n",python,content +2040,3919934,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7623,0,"",python,selection_keyboard +2041,3920419,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7642,0,"",python,selection_command +2042,3920611,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7652,0,"",python,selection_command +2043,3920790,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7673,0,"",python,selection_command +2044,3923651,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7736,0,"a",python,content +2045,3923653,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7737,0,"",python,selection_keyboard +2046,3923988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7737,0,"r",python,content +2047,3923990,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7738,0,"",python,selection_keyboard +2048,3924151,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7738,0,"g",python,content +2049,3924152,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7739,0,"",python,selection_keyboard +2050,3924295,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7739,0,"s",python,content +2051,3924296,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7740,0,"",python,selection_keyboard +2052,3924402,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7740,0,".",python,content +2053,3924404,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7741,0,"",python,selection_keyboard +2054,3924851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7740,1,"",python,content +2055,3924993,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7739,1,"",python,content +2056,3925185,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7738,1,"",python,content +2057,3925310,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7737,1,"",python,content +2058,3925408,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7736,1,"",python,content +2059,3925537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7735,1,"",python,content +2060,3925646,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7735,0,"a",python,content +2061,3925646,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7736,0,"",python,selection_keyboard +2062,3925813,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7736,0,"r",python,content +2063,3925815,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7737,0,"",python,selection_keyboard +2064,3925926,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7737,0,"g",python,content +2065,3925927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7738,0,"",python,selection_keyboard +2066,3926030,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7738,0,"s",python,content +2067,3926031,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7739,0,"",python,selection_keyboard +2068,3926245,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7739,0,".",python,content +2069,3926247,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7740,0,"",python,selection_keyboard +2070,3926403,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7740,0,"s",python,content +2071,3926404,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7741,0,"",python,selection_keyboard +2072,3926571,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7741,0,"e",python,content +2073,3926572,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7742,0,"",python,selection_keyboard +2074,3926728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7742,0,"q",python,content +2075,3926729,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7743,0,"",python,selection_keyboard +2076,3926874,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7743,0,"_",python,content +2077,3926875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7744,0,"",python,selection_keyboard +2078,3927147,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7744,0,"l",python,content +2079,3927148,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7745,0,"",python,selection_keyboard +2080,3927330,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7745,0,"e",python,content +2081,3927331,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7746,0,"",python,selection_keyboard +2082,3927408,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7746,0,"n",python,content +2083,3927409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7747,0,"",python,selection_keyboard +2084,3930781,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3512808.4 task 0: running\r\n",,terminal_output +2085,3931380,"TERMINAL",0,0,"(Pdb) \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 241, in \r\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, args.seq_len - 1, 1))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 241, in \r\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, args.seq_len - 1, 1))\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +2086,3932369,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2087,3934140,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",0,0,"",python,tab +2088,3934966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7746,0,"",python,selection_command +2089,3935384,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7663,0,"",python,selection_mouse +2090,3935399,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7662,0,"",python,selection_command +2091,3935882,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7643,21,"",python,content +2092,3935955,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py",7651,0,"",python,selection_command +2093,3937840,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2094,3938189,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2095,3938243,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +2096,3938386,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2097,3946869,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2098,3949098,"TERMINAL",0,0,"2025-09-23 13:56:48.469565: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2099,3956955,"TERMINAL",0,0,"2025-09-23 13:56:56.256598: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2100,3958983,"TERMINAL",0,0,"2025-09-23 13:56:58.228641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2101,3970789,"TERMINAL",0,0,"2025-09-23 13:57:10.137093: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2102,3980999,"TERMINAL",0,0,"SSIM: 0.017371613532304764\r\n",,terminal_output +2103,3981968,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2104,3997280,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2105,3998901,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2106,3999012,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +2107,3999125,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2108,4007521,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2109,4009467,"TERMINAL",0,0,"2025-09-23 13:57:48.727198: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2110,4017044,"TERMINAL",0,0,"2025-09-23 13:57:56.351699: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2111,4018875,"TERMINAL",0,0,"2025-09-23 13:57:58.246100: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2112,4030759,"TERMINAL",0,0,"2025-09-23 13:58:10.128976: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2113,4040917,"TERMINAL",0,0,"SSIM: 0.017371613532304764\r\n",,terminal_output +2114,4041943,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2115,4065429,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --use_gt_actions \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_interval 750 \\n --eval_full_frame \\n",shellscript,tab +2116,4067599,"TERMINAL",0,0,"salloc",,terminal_focus +2117,4069779,"TERMINAL",0,0,"srun",,terminal_focus +2118,4070419,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2119,4071647,"TERMINAL",0,0,"bash",,terminal_focus +2120,4072547,"TERMINAL",0,0,"queue",,terminal_command +2121,4072595,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 13:58:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3511158 accelerat train_dy tum_cte0 R\t3:29\t 1 hkn04283512808 dev_accel interact tum_cte0 R58:01\t 1 hkn04013512651 large preproce tum_cte0 R 1:55:06\t 1 hkn1901",,terminal_output +2122,4073672,"TERMINAL",0,0,"23027",,terminal_output +2123,4074288,"TERMINAL",0,0,"srun",,terminal_focus +2124,4074482,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2125,4074792,"TERMINAL",0,0,"3249",,terminal_output +2126,4075700,"TERMINAL",0,0,"53510",,terminal_output +2127,4076826,"TERMINAL",0,0,"6461",,terminal_output +2128,4077949,"TERMINAL",0,0,"7572",,terminal_output +2129,4078947,"TERMINAL",0,0,"8683",,terminal_output +2130,4079668,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",741,0,"",shellscript,selection_mouse +2131,4079876,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",740,1,"t",shellscript,selection_mouse +2132,4079953,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",739,2,"st",shellscript,selection_mouse +2133,4080029,"TERMINAL",0,0,"9794",,terminal_output +2134,4080363,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",738,3,"est",shellscript,selection_mouse +2135,4080569,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",737,4,"test",shellscript,selection_mouse +2136,4080742,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",736,5,"_test",shellscript,selection_mouse +2137,4080777,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",735,6,"s_test",shellscript,selection_mouse +2138,4080846,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",734,7,"es_test",shellscript,selection_mouse +2139,4080853,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",733,8,"des_test",shellscript,selection_mouse +2140,4080877,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",732,9,"odes_test",shellscript,selection_mouse +2141,4080901,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",730,11,"isodes_test",shellscript,selection_mouse +2142,4080923,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",729,12,"pisodes_test",shellscript,selection_mouse +2143,4080945,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,13,"episodes_test",shellscript,selection_mouse +2144,4080970,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,14,"_episodes_test",shellscript,selection_mouse +2145,4080994,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",725,16,"un_episodes_test",shellscript,selection_mouse +2146,4081031,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",724,17,"run_episodes_test",shellscript,selection_mouse +2147,4081070,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",723,18,"nrun_episodes_test",shellscript,selection_mouse +2148,4081104,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",722,19,"inrun_episodes_test",shellscript,selection_mouse +2149,4081126,"TERMINAL",0,0,"9:008105",,terminal_output +2150,4081197,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",721,20,"oinrun_episodes_test",shellscript,selection_mouse +2151,4081295,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",720,21,"coinrun_episodes_test",shellscript,selection_mouse +2152,4081368,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",719,22,"/coinrun_episodes_test",shellscript,selection_mouse +2153,4081442,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,23,"n/coinrun_episodes_test",shellscript,selection_mouse +2154,4081506,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",717,24,"un/coinrun_episodes_test",shellscript,selection_mouse +2155,4081641,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",716,25,"run/coinrun_episodes_test",shellscript,selection_mouse +2156,4081749,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",715,26,"nrun/coinrun_episodes_test",shellscript,selection_mouse +2157,4081849,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",714,27,"inrun/coinrun_episodes_test",shellscript,selection_mouse +2158,4082150,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",713,28,"oinrun/coinrun_episodes_test",shellscript,selection_mouse +2159,4082217,"TERMINAL",0,0,"1916",,terminal_output +2160,4082794,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",712,29,"coinrun/coinrun_episodes_test",shellscript,selection_mouse +2161,4083129,"TERMINAL",0,0,"24027",,terminal_output +2162,4084115,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",712,29,"",shellscript,content +2163,4084283,"TERMINAL",0,0,"3138",,terminal_output +2164,4084449,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",712,0,"b",shellscript,content +2165,4084449,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",713,0,"",shellscript,selection_keyboard +2166,4084544,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",713,0,"r",shellscript,content +2167,4084545,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",714,0,"",shellscript,selection_keyboard +2168,4084741,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",714,0,"e",shellscript,content +2169,4084742,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",715,0,"",shellscript,selection_keyboard +2170,4084987,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",715,0,"a",shellscript,content +2171,4084988,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",716,0,"",shellscript,selection_keyboard +2172,4085166,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",716,0,"k",shellscript,content +2173,4085167,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",717,0,"",shellscript,selection_keyboard +2174,4085275,"TERMINAL",0,0,"4249",,terminal_output +2175,4085356,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",717,0,"p",shellscript,content +2176,4085357,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,0,"",shellscript,selection_keyboard +2177,4085699,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,0,"u",shellscript,content +2178,4085700,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",719,0,"",shellscript,selection_keyboard +2179,4085996,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,1,"",shellscript,content +2180,4086102,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",717,1,"",shellscript,content +2181,4086274,"TERMINAL",0,0,"53520",,terminal_output +2182,4087092,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",717,0,"o",shellscript,content +2183,4087093,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,0,"",shellscript,selection_keyboard +2184,4087292,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",718,0,"u",shellscript,content +2185,4087293,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",719,0,"",shellscript,selection_keyboard +2186,4087388,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",719,0,"t",shellscript,content +2187,4087389,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",720,0,"",shellscript,selection_keyboard +2188,4087440,"TERMINAL",0,0,"6461",,terminal_output +2189,4087766,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",720,0,"/",shellscript,content +2190,4087766,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",721,0,"",shellscript,selection_keyboard +2191,4088417,"TERMINAL",0,0,"7572",,terminal_output +2192,4088698,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",721,0,"b",shellscript,content +2193,4088699,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",722,0,"",shellscript,selection_keyboard +2194,4088851,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",722,0,"r",shellscript,content +2195,4088852,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",723,0,"",shellscript,selection_keyboard +2196,4088983,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",723,0,"e",shellscript,content +2197,4088984,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",724,0,"",shellscript,selection_keyboard +2198,4089329,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",724,0,"a",shellscript,content +2199,4089330,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",725,0,"",shellscript,selection_keyboard +2200,4089421,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",725,0,"k",shellscript,content +2201,4089422,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",726,0,"",shellscript,selection_keyboard +2202,4089436,"TERMINAL",0,0,"8683",,terminal_output +2203,4089636,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",726,0,"p",shellscript,content +2204,4089637,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,0,"",shellscript,selection_keyboard +2205,4089890,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,0,"u",shellscript,content +2206,4089890,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,0,"",shellscript,selection_keyboard +2207,4090027,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,0,"t",shellscript,content +2208,4090028,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",729,0,"",shellscript,selection_keyboard +2209,4090473,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,1,"",shellscript,content +2210,4090491,"TERMINAL",0,0,"9794",,terminal_output +2211,4090605,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,1,"",shellscript,content +2212,4090795,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",726,1,"",shellscript,content +2213,4091229,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",726,0,"o",shellscript,content +2214,4091230,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,0,"",shellscript,selection_keyboard +2215,4091397,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",727,0,"u",shellscript,content +2216,4091398,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,0,"",shellscript,selection_keyboard +2217,4091519,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",728,0,"t",shellscript,content +2218,4091520,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",729,0,"",shellscript,selection_keyboard +2219,4091539,"TERMINAL",0,0,"108205",,terminal_output +2220,4091768,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",729,0,"_",shellscript,content +2221,4091769,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",730,0,"",shellscript,selection_keyboard +2222,4091907,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",730,0,"e",shellscript,content +2223,4091908,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",731,0,"",shellscript,selection_keyboard +2224,4092583,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",731,0,"p",shellscript,content +2225,4092584,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",732,0,"",shellscript,selection_keyboard +2226,4092636,"TERMINAL",0,0,"1916",,terminal_output +2227,4092747,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",732,0,"i",shellscript,content +2228,4092748,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",733,0,"",shellscript,selection_keyboard +2229,4092874,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",733,0,"s",shellscript,content +2230,4092875,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",734,0,"",shellscript,selection_keyboard +2231,4092962,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",734,0,"o",shellscript,content +2232,4092963,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",735,0,"",shellscript,selection_keyboard +2233,4093166,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",735,0,"d",shellscript,content +2234,4093167,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",736,0,"",shellscript,selection_keyboard +2235,4093230,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",736,0,"e",shellscript,content +2236,4093231,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",737,0,"",shellscript,selection_keyboard +2237,4093489,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",737,0,"s",shellscript,content +2238,4093490,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",738,0,"",shellscript,selection_keyboard +2239,4093602,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",738,0,"_",shellscript,content +2240,4093603,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",739,0,"",shellscript,selection_keyboard +2241,4093644,"TERMINAL",0,0,"25027",,terminal_output +2242,4093969,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",739,0,"e",shellscript,content +2243,4093970,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",740,0,"",shellscript,selection_keyboard +2244,4094255,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",739,1,"",shellscript,content +2245,4094526,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",739,0,"p",shellscript,content +2246,4094527,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",740,0,"",shellscript,selection_keyboard +2247,4094620,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",740,0,"e",shellscript,content +2248,4094621,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",741,0,"",shellscript,selection_keyboard +2249,4094674,"TERMINAL",0,0,"3249",,terminal_output +2250,4094706,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",741,0,"r",shellscript,content +2251,4094707,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",742,0,"",shellscript,selection_keyboard +2252,4094896,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",742,0,"f",shellscript,content +2253,4094897,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",743,0,"",shellscript,selection_keyboard +2254,4095057,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",743,0,"e",shellscript,content +2255,4095058,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",744,0,"",shellscript,selection_keyboard +2256,4095187,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",744,0,"c",shellscript,content +2257,4095188,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",745,0,"",shellscript,selection_keyboard +2258,4095399,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",745,0,"t",shellscript,content +2259,4095400,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",746,0,"",shellscript,selection_keyboard +2260,4095741,"TERMINAL",0,0,"53530",,terminal_output +2261,4097085,"TERMINAL",0,0,"6461",,terminal_output +2262,4097865,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2263,4097892,"TERMINAL",0,0,"7572",,terminal_output +2264,4097918,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +2265,4098046,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2266,4098872,"TERMINAL",0,0,"8683",,terminal_output +2267,4099195,"TERMINAL",0,0,"watch",,terminal_focus +2268,4099620,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2269,4102570,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +2270,4105920,"TERMINAL",0,0,"cd data_breakout/",,terminal_command +2271,4106455,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2272,4106472,"TERMINAL",0,0,"ls",,terminal_command +2273,4106522,"TERMINAL",0,0,"]633;C",,terminal_output +2274,4106732,"TERMINAL",0,0,"breakout_episodes_10m_gt_actions_split breakout_episodes_10m_gt_actions_split_old breakout_episodes_10m_gt_actions_split_small breakout_episodes_perfect_big\r\nbreakout_episodes_10m_gt_actions_split_2 breakout_episodes_10m_gt_actions_split_perfect breakout_episodes_perfect\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout",,terminal_output +2275,4107712,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 202, in \r\n batch = next(dataloader)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/data_loader.py"", line 497, in __next__\r\n result_record = next(self._iterator)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/data_loader.py"", line 604, in _apply_transform\r\n for r in batch_op(input_iterator):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/operations.py"", line 152, in __call__\r\n for input_record in input_iterator:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/data_loader.py"", line 620, in _apply_transform\r\n raise e\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/data_loader.py"", line 616, in _apply_transform\r\n output_record, filter_result = fn(input_record)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/grain/_src/python/data_loader.py"", line 586, in \r\n record.Record(r.metadata, transform.random_map(r.data, r.metadata.rng)),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/utils/dataloader.py"", line 80, in random_map\r\n episode_tensor = episode_tensor.reshape(video_shape)\r\nValueError: cannot reshape array of size 4800 into shape (16,64,64,3)\r\n",,terminal_output +2276,4108281,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2277,4116085,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2278,4118054,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1101,0,"",shellscript,selection_mouse +2279,4118909,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1102,0,"",shellscript,selection_command +2280,4120157,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1101,1,"",shellscript,content +2281,4120264,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1100,1,"",shellscript,content +2282,4120560,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1100,0,"1",shellscript,content +2283,4120561,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1101,0,"",shellscript,selection_keyboard +2284,4120664,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1101,0,"0",shellscript,content +2285,4120665,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1102,0,"",shellscript,selection_keyboard +2286,4121074,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1101,0,"",shellscript,selection_command +2287,4121265,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1125,0,"",shellscript,selection_command +2288,4121699,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1124,1,"",shellscript,content +2289,4121804,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1123,1,"",shellscript,content +2290,4121979,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1123,0,"1",shellscript,content +2291,4121980,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1124,0,"",shellscript,selection_keyboard +2292,4122059,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1124,0,"0",shellscript,content +2293,4122060,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1125,0,"",shellscript,selection_keyboard +2294,4123399,"TERMINAL",0,0,"srun",,terminal_focus +2295,4124018,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2296,4124220,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2297,4124356,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3512647\r\n",,terminal_output +2298,4124479,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2299,4128431,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2300,4132878,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2301,4135239,"TERMINAL",0,0,"2025-09-23 13:59:54.601387: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2302,4141244,"TERMINAL",0,0,"2025-09-23 14:00:00.499682: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2303,4142792,"TERMINAL",0,0,"2025-09-23 14:00:02.063777: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2304,4144697,"TERMINAL",0,0,"2025-09-23 14:00:04.063498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2305,4150891,"TERMINAL",0,0,"2025-09-23 14:00:10.255473: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2306,4159380,"TERMINAL",0,0,"SSIM: 0.691791296005249\r\n",,terminal_output +2307,4160316,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2308,4203737,"TERMINAL",0,0,"salloc: Job 3512808 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3512808.interactive ON hkn0401 CANCELLED AT 2025-09-23T14:01:03 DUE TO TIME LIMIT ***\r\n",,terminal_output +2309,4233370,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Killed\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2310,4466324,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +2311,4466379,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3512949\r\nsalloc: job 3512949 queued and waiting for resources\r\n",,terminal_output +2312,4469771,"TERMINAL",0,0,"bash",,terminal_focus +2313,4471052,"TERMINAL",0,0,"queue",,terminal_command +2314,4471137,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 23 14:05:30 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3511159 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3512804 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3511158 accelerat train_dy tum_cte0 R10:08\t 1 hkn04283512949 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)3512651 large preproce tum_cte0 R 2:01:45\t 1 hkn1901",,terminal_output +2315,4472216,"TERMINAL",0,0,"196",,terminal_output +2316,4473251,"TERMINAL",0,0,"2107",,terminal_output +2317,4474368,"TERMINAL",0,0,"318",,terminal_output +2318,4475390,"TERMINAL",0,0,"429",,terminal_output +2319,4476415,"TERMINAL",0,0,"5350",,terminal_output +2320,4477439,"TERMINAL",0,0,"641",,terminal_output +2321,4478450,"TERMINAL",0,0,"752",,terminal_output +2322,4478863,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout",,terminal_output +2323,4482051,"TERMINAL",0,0,"salloc: job 3512949 has been allocated resources\r\nsalloc: Granted job allocation 3512949\r\n",,terminal_output +2324,4482118,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +2325,4509168,"TERMINAL",0,0,"salloc: Nodes hkn0402 are ready for job\r\n",,terminal_output +2326,4510060,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +2327,4531700,"input_pipeline/generate_breakout_dataset.py",0,0,"""""""\nGenerates a dataset of random-action Breakout episodes using MinAtar.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\nimport numpy as np\nimport os\nimport json\nimport tyro\n\nfrom minatar import Environment\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 5000000\n num_episodes_val: int = 25000\n num_episodes_test: int = 25000\n output_dir: str = ""data/breakout_episodes""\n min_episode_length: int = 20\n max_episode_length: int = 500\n chunk_size: int = 50\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\n\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be >= minimum episode length.""\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. ""\n ""Episodes shorter than the chunk size will be discarded.""\n )\n\n\ndef _obs_to_rgb(obs):\n # Define a color matrix for each boolean combination\n color_matrix = np.array(\n [\n [0, 0, 0], # Black for all False\n [128, 0, 0], # Maroon\n [0, 128, 0], # Dark Green\n [0, 0, 128], # Navy\n [128, 128, 0], # Olive\n [128, 0, 128], # Purple\n [0, 128, 128], # Teal\n [192, 192, 192], # Silver\n [128, 128, 128], # Gray\n [255, 0, 0], # Red\n [0, 255, 0], # Green\n [0, 0, 255], # Blue\n [255, 255, 0], # Yellow\n [255, 0, 255], # Magenta\n [0, 255, 255], # Cyan\n [255, 255, 255], # White\n ],\n dtype=np.uint8,\n )\n\n # Convert boolean array to integer indices\n indices = obs.dot(1 << np.arange(obs.shape[-1] - 1, -1, -1))\n\n # Map indices to colors using matrix multiplication\n obs = color_matrix[indices]\n return obs\n\n\ndef generate_episodes(num_episodes: int, split: str):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n os.makedirs(output_dir_split, exist_ok=True)\n\n while episode_idx < num_episodes:\n env = Environment(""breakout"", sticky_action_prob=0.0) # typical MinAtar setup\n env.reset()\n obs_seq, act_seq = [], []\n episode_obs_chunks, episode_act_chunks = [], []\n\n step_t = 0\n for step_t in range(args.max_episode_length):\n obs = env.state() # shape: (10,10,num_channels)\n obs = _obs_to_rgb(obs)\n action = np.random.randint(env.num_actions())\n _, done = env.act(action)\n obs_seq.append(obs.astype(np.uint8))\n act_seq.append(action)\n\n if len(obs_seq) == args.chunk_size:\n episode_obs_chunks.append(np.stack(obs_seq))\n episode_act_chunks.append(np.array(act_seq))\n obs_seq, act_seq = [], []\n\n if done:\n break\n\n if step_t + 1 >= args.min_episode_length:\n if obs_seq:\n if len(obs_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk sizes. Episode has {len(obs_seq)} frames ""\n f""(less than chunk_size {args.chunk_size}).""\n )\n episode_obs_chunks.append(np.stack(obs_seq))\n episode_act_chunks.append(np.array(act_seq))\n\n obs_chunks_data = episode_obs_chunks\n act_chunks_data = episode_act_chunks\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, file_idx, obs_chunks, act_chunks = save_chunks(\n file_idx, args.chunks_per_file, output_dir_split, obs_chunks, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""[{split}] Episode {episode_idx} completed, length: {step_t + 1}"")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} leftover chunks. ""\n ""Consider adjusting chunk_size or chunks_per_file.""\n )\n\n print(f""Done generating {split} split."")\n return episode_metadata\n\n\ndef get_action_space() -> int:\n env = Environment(""breakout"")\n return env.num_actions()\n\n\ndef main():\n np.random.seed(args.seed)\n os.makedirs(args.output_dir, exist_ok=True)\n\n train_meta = generate_episodes(args.num_episodes_train, ""train"")\n val_meta = generate_episodes(args.num_episodes_val, ""val"")\n test_meta = generate_episodes(args.num_episodes_test, ""test"")\n\n metadata = {\n ""env"": ""MinAtar-Breakout"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": float(\n np.mean([ep[""avg_seq_len""] for ep in train_meta])\n ),\n ""avg_episode_len_val"": float(np.mean([ep[""avg_seq_len""] for ep in val_meta])),\n ""avg_episode_len_test"": float(np.mean([ep[""avg_seq_len""] for ep in test_meta])),\n ""episode_metadata_train"": train_meta,\n ""episode_metadata_val"": val_meta,\n ""episode_metadata_test"": test_meta,\n }\n\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +2328,4537718,"TERMINAL",0,0,"salloc",,terminal_focus +2329,4546088,"TERMINAL",0,0,"bash",,terminal_focus +2330,4549106,"TERMINAL",0,0,"salloc",,terminal_focus +2331,4552152,"TERMINAL",0,0,"",,terminal_focus +2332,4554227,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +2333,4554301,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2334,4555604,"TERMINAL",0,0,"git diff",,terminal_command +2335,4555687,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/sample.py b/sample.py\r\nindex c83b5e5..ab05007 100644\r\n--- a/sample.py\r\n+++ b/sample.py\r\n@@ -233,11 +233,11 @@ if __name__ == ""__main__"":\r\n # --- Save video ---\r\n imgs = [Image.fromarray(img) for img in frames]\r\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\r\n- B, S, _, _, _ = batch[""videos""].shape\r\n+ B = batch[""videos""].shape[0]\r\n if action_batch_E is not None:\r\n- action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n+ action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\r\n else:\r\n- action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\r\n+ action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, args.seq_len - 1, 1))\r\n for t, img in enumerate(imgs[1:]):\r\n d = ImageDraw.Draw(img)\r\n for row in range(B):\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2336,4563500,"TERMINAL",0,0,"git diff > diff.diff",,terminal_command +2337,4563540,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2338,4569213,"TERMINAL",0,0,"git stash",,terminal_command +2339,4569272,"TERMINAL",0,0,"]633;C",,terminal_output +2340,4569488,"TERMINAL",0,0,"Saved working directory and index state WIP on generate-minatar-breakout-dataset: 677c691 Merge branch 'gt-actions' into generate-minatar-breakout-dataset\r\n",,terminal_output +2341,4569505,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2342,4574024,"TERMINAL",0,0,"git checkout main",,terminal_command +2343,4574098,"TERMINAL",0,0,"]633;C",,terminal_output +2344,4574269,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2345,4575648,"",0,0,"Switched from branch 'generate-minatar-breakout-dataset' to 'main'",,git_branch_checkout +2346,4597297,"TERMINAL",0,0,"git pull",,terminal_command +2347,4597363,"TERMINAL",0,0,"]633;C",,terminal_output +2348,4599333,"TERMINAL",0,0,"remote: Enumerating objects: 12, done.\r\nremote: Counting objects: 8% (1/12)\rremote: Counting objects: 16% (2/12)\rremote: Counting objects: 25% (3/12)\rremote: Counting objects: 33% (4/12)\rremote: Counting objects: 41% (5/12)\rremote: Counting objects: 50% (6/12)\rremote: Counting objects: 58% (7/12)\rremote: Counting objects: 66% (8/12)\rremote: Counting objects: 75% (9/12)\rremote: Counting objects: 83% (10/12)\rremote: Counting objects: 91% (11/12)\rremote: Counting objects: 100% (12/12)\rremote: Counting objects: 100% (12/12), done.\r\nremote: Compressing objects: 12% (1/8)\rremote: Compressing objects: 25% (2/8)\rremote: Compressing objects: 37% (3/8)\rremote: Compressing objects: 50% (4/8)\rremote: Compressing objects: 62% (5/8)\rremote: Compressing objects: 75% (6/8)\rremote: Compressing objects: 87% (7/8)\rremote: Compressing objects: 100% (8/8)\rremote: Compressing objects: 100% (8/8), done.\r\nUnpacking objects: 8% (1/12)\rUnpacking objects: 16% (2/12)\rUnpacking objects: 25% (3/12)\rUnpacking objects: 33% (4/12)\rUnpacking objects: 41% (5/12)\r",,terminal_output +2349,4599401,"TERMINAL",0,0,"remote: Total 12 (delta 6), reused 7 (delta 4), pack-reused 0 (from 0)\r\nUnpacking objects: 50% (6/12)\rUnpacking objects: 58% (7/12)\r",,terminal_output +2350,4599464,"TERMINAL",0,0,"Unpacking objects: 66% (8/12)\r",,terminal_output +2351,4599574,"TERMINAL",0,0,"Unpacking objects: 75% (9/12)\rUnpacking objects: 83% (10/12)\rUnpacking objects: 91% (11/12)\rUnpacking objects: 100% (12/12)\rUnpacking objects: 100% (12/12), 12.18 KiB | 46.00 KiB/s, done.\r\n",,terminal_output +2352,4599778,"TERMINAL",0,0,"From github.com:p-doom/jasmine\r\n c7522f2..e686951 main -> origin/main\r\n",,terminal_output +2353,4599885,"TERMINAL",0,0,"Updating c7522f2..e686951\r\n",,terminal_output +2354,4600004,"TERMINAL",0,0,"Fast-forward\r\n genie.py | 114 +++++++++++++++++++++++++++++++++++++++++++++++----------------------\r\n input_pipeline/generate_coinrun_dataset.py | 97 +++++++++++++++++++++++++++++++++++++++++------------------\r\n input_pipeline/pngs_to_array_records.py | 82 ++++++++++++++++++++++++++++++++------------------\r\n input_pipeline/utils.py | 33 ++++++++++++++------\r\n input_pipeline/video_to_array_records.py | 80 ++++++++++++++++++++++++++++++-------------------\r\n models/dynamics.py | 4 +--\r\n sample.py | 43 +++++++++++++++-----------\r\n train_dynamics.py | 115 +++++++++++++++++++++++++++++++++++++++++++++++-----------------------\r\n train_lam.py | 58 +++++++++++++++++------------------\r\n train_tokenizer.py | 56 +++++++++++++++++-----------------\r\n utils/dataloader.py | 8 ++++-\r\n 11 files changed, 440 insertions(+), 250 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2355,4603995,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +2356,4603996,"sample.py",6776,0,"",python,selection_mouse +2357,4660011,"TERMINAL",0,0,"git checkout -b ""hotfix/sampling-shapes-error""",,terminal_command +2358,4660083,"TERMINAL",0,0,"]633;C",,terminal_output +2359,4660160,"TERMINAL",0,0,"Switched to a new branch 'hotfix/sampling-shapes-error'\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2360,4660660,"",0,0,"Switched from branch 'main' to 'hotfix/sampling-shapes-error'",,git_branch_checkout +2361,4672948,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +2362,4672951,"sample.py",7596,0,"",python,selection_mouse +2363,4675017,"sample.py",7793,0,"",python,selection_mouse +2364,4675021,"sample.py",7792,0,"",python,selection_command +2365,4675180,"sample.py",7792,1,")",python,selection_mouse +2366,4675180,"sample.py",7790,2,"mg",python,selection_mouse +2367,4675181,"sample.py",7759,33,"):\n d = ImageDraw.Draw(img",python,selection_mouse +2368,4675181,"sample.py",7753,39,"gs[1:]):\n d = ImageDraw.Draw(img",python,selection_mouse +2369,4675244,"sample.py",7793,0,"",python,selection_command +2370,4675245,"sample.py",7667,126," = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2371,4675332,"sample.py",7575,218,"atch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2372,4675409,"sample.py",7536,257,"n_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2373,4675457,"sample.py",7390,403,"Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2374,4675496,"sample.py",7334,459," imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2375,4675524,"sample.py",7332,461," imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2376,4675557,"sample.py",7307,486," # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B, S, _, _, _ = batch[""videos""].shape\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)",python,selection_mouse +2377,4676728,"sample.py",7311,0,"",python,selection_command +2378,4685604,"sample.py",7307,0,"",python,selection_command +2379,4691814,"sample.py",7482,0," B = batch[""videos""].shape[0]\n",python,content +2380,4692003,"sample.py",7515,42,"",python,content +2381,4693439,"sample.py",7550,0," action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n",python,content +2382,4693533,"sample.py",7633,72,"",python,content +2383,4694298,"sample.py",7643,0," action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, args.seq_len - 1, 1))\n",python,content +2384,4694597,"sample.py",7736,82,"",python,content +2385,4704040,"sample.py",7331,0,"",python,selection_mouse +2386,4704075,"sample.py",7330,0,"",python,selection_command +2387,4704722,"sample.py",7774,0,"",python,selection_mouse +2388,4704734,"sample.py",7773,0,"",python,selection_command +2389,4705372,"sample.py",8019,0,"",python,selection_mouse +2390,4705374,"sample.py",8018,0,"",python,selection_command +2391,4706054,"sample.py",7794,0,"",python,selection_mouse +2392,4707166,"TERMINAL",0,0,"bash",,terminal_focus +2393,4707758,"TERMINAL",0,0,"bash",,terminal_focus +2394,4708435,"TERMINAL",0,0,"batch[""videos""]^C",,terminal_command +2395,4708550,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2396,4713023,"TERMINAL",0,0,"git diff",,terminal_command +2397,4713116,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/sample.py b/sample.py\r\nindex c83b5e5..ab05007 100644\r\n--- a/sample.py\r\n+++ b/sample.py\r\n@@ -233,11 +233,11 @@ if __name__ == ""__main__"":\r\n # --- Save video ---\r\n imgs = [Image.fromarray(img) for img in frames]\r\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\r\n- B, S, _, _, _ = batch[""videos""].shape\r\n+ B = batch[""videos""].shape[0]\r\n if action_batch_E is not None:\r\n- action_batch_BSm11 = jnp.reshape(action_batch_E, (B, S - 1, 1))\r\n+ action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\r\n else:\r\n- action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, S - 1, 1))\r\n+ action_batch_BSm11 = jnp.reshape(batch[""actions""][:, :-1], (B, args.seq_len - 1, 1))\r\n for t, img in enumerate(imgs[1:]):\r\n d = ImageDraw.Draw(img)\r\n for row in range(B):\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2398,4740335,"TERMINAL",0,0,"git commit -am ""fix: sampling seq_len wrong after modification in autoreg_sample""",,terminal_command +2399,4740403,"TERMINAL",0,0,"]633;C",,terminal_output +2400,4742869,"TERMINAL",0,0,"black....................................................................",,terminal_output +2401,4744249,"TERMINAL",0,0,"Failed\r\n- hook id: black\r\n- files were modified by this hook\r\n\r\nreformatted sample.py\r\n\r\nAll done! ✨ 🍰 ✨\r\n1 file reformatted.\r\n\r\n",,terminal_output +2402,4744328,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2403,4746301,"TERMINAL",0,0,"git commit -am ""fix: sampling seq_len wrong after modification in autoreg_sample""",,terminal_command +2404,4746382,"TERMINAL",0,0,"]633;C",,terminal_output +2405,4746779,"TERMINAL",0,0,"black....................................................................",,terminal_output +2406,4747064,"TERMINAL",0,0,"Passed\r\n",,terminal_output +2407,4747202,"TERMINAL",0,0,"[hotfix/sampling-shapes-error 8891b84] fix: sampling seq_len wrong after modification in autoreg_sample\r\n 1 file changed, 5 insertions(+), 3 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2408,4748475,"TERMINAL",0,0,"git pus",,terminal_command +2409,4748570,"TERMINAL",0,0,"]633;Cgit: 'pus' is not a git command. See 'git --help'.\r\n\r\nThe most similar commands are\r\n\tpush\r\n\tpull\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2410,4751215,"TERMINAL",0,0,"git push",,terminal_command +2411,4751335,"TERMINAL",0,0,"]633;Cfatal: The current branch hotfix/sampling-shapes-error has no upstream branch.\r\nTo push the current branch and set the remote as upstream, use\r\n\r\n git push --set-upstream origin hotfix/sampling-shapes-error\r\n\r\nTo have this happen automatically for branches without a tracking\r\nupstream, see 'push.autoSetupRemote' in 'git help config'.\r\n\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +2412,4755066,"TERMINAL",0,0,"git push --set-upstream origin hotfix/sampling-shapes-error",,terminal_command +2413,4755163,"TERMINAL",0,0,"]633;C",,terminal_output +2414,4756505,"TERMINAL",0,0,"Enumerating objects: 5, done.\r\nCounting objects: 20% (1/5)\rCounting objects: 40% (2/5)\rCounting objects: 60% (3/5)\rCounting objects: 80% (4/5)\rCounting objects: 100% (5/5)\rCounting objects: 100% (5/5), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 33% (1/3)\rCompressing objects: 66% (2/3)\rCompressing objects: 100% (3/3)\rCompressing objects: 100% (3/3), done.\r\nWriting objects: 33% (1/3)\rWriting objects: 66% (2/3)\rWriting objects: 100% (3/3)\rWriting objects: 100% (3/3), 413 bytes | 413.00 KiB/s, done.\r\nTotal 3 (delta 2), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +2415,4756791,"TERMINAL",0,0,"remote: \r\nremote: Create a pull request for 'hotfix/sampling-shapes-error' on GitHub by visiting:\r\nremote: https://github.com/p-doom/jasmine/pull/new/hotfix/sampling-shapes-error\r\nremote: \r\nTo github.com:p-doom/jasmine.git\r\n * [new branch] hotfix/sampling-shapes-error -> hotfix/sampling-shapes-error\r\n",,terminal_output +2416,4756845,"TERMINAL",0,0,"branch 'hotfix/sampling-shapes-error' set up to track 'origin/hotfix/sampling-shapes-error'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-12b522dd-8518-4c62-b207-ca1ed4ce90571752782954186-2025_07_17-22.10.14.626/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-12b522dd-8518-4c62-b207-ca1ed4ce90571752782954186-2025_07_17-22.10.14.626/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..eaa16124f808454b35b57168d26bcfd26a3ee3a3 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-12b522dd-8518-4c62-b207-ca1ed4ce90571752782954186-2025_07_17-22.10.14.626/source.csv @@ -0,0 +1,60 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +2,590,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:10:14 PM [info] Activating crowd-code\n10:10:14 PM [info] Recording started\n10:10:14 PM [info] Initializing git provider using file system watchers...\n10:10:14 PM [info] Git repository found\n10:10:14 PM [info] Git provider initialized successfully\n10:10:14 PM [info] Initial git state: [object Object]\n",Log,tab +3,3709,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,3743,"TERMINAL",0,0,"]633;E;2025-07-17 22:10:18 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;541e3580-6a0f-4f0b-bb4c-198841a406f7]633;C",,terminal_output +5,3850,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +6,21978,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +7,21982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2750,0,"",python,selection_mouse +8,25341,"TERMINAL",0,0,"queue",,terminal_command +9,25354,"TERMINAL",0,0,"]633;E;2025-07-17 22:10:39 queue;8a8d6fe6-a600-49ab-ad7b-e3418768748b]633;C",,terminal_output +10,25432,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 17 22:10:39 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3353884 accelerat interact tum_cte0 R 7:01:56\t 2 hkn[0508,0517]",,terminal_output +11,26402,"TERMINAL",0,0,"407",,terminal_output +12,27450,"TERMINAL",0,0,"18",,terminal_output +13,28557,"TERMINAL",0,0,"29",,terminal_output +14,29633,"TERMINAL",0,0,"32:01",,terminal_output +15,29809,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +16,34077,"TERMINAL",0,0,"scancel 3353884",,terminal_command +17,34117,"TERMINAL",0,0,"]633;E;2025-07-17 22:10:48 scancel 3353884;8a8d6fe6-a600-49ab-ad7b-e3418768748b]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +18,100445,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,0,"",python,selection_mouse +19,102391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2912,0,"",python,selection_command +20,102556,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +21,102702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,"",python,selection_command +22,102897,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +23,103038,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2912,0,"",python,selection_command +24,103183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,0,"",python,selection_command +25,103313,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2912,0,"",python,selection_command +26,103461,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +27,103646,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,"",python,selection_command +28,103749,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +29,103907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2912,0,"",python,selection_command +30,104034,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,0,"",python,selection_command +31,104150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3010,0,"",python,selection_command +32,104180,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,0,"",python,selection_command +33,104347,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2912,0,"",python,selection_command +34,104508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +35,104634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,"",python,selection_command +36,104768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_command +37,105312,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2801,0,"",python,selection_command +38,105809,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2802,0,"",python,selection_command +39,106322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2803,0,"",python,selection_command +40,106363,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2804,0,"",python,selection_command +41,106416,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,0,"",python,selection_command +42,106454,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2806,0,"",python,selection_command +43,106498,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"",python,selection_command +44,106499,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2808,0,"",python,selection_command +45,106500,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,0,"",python,selection_command +46,106539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2810,0,"",python,selection_command +47,106557,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,0,"",python,selection_command +48,106584,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"",python,selection_command +49,106634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2843,0,"",python,selection_command +50,107209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2897,0,"",python,selection_command +51,107343,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2951,0,"",python,selection_command +52,109636,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\npython sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $array_records_dir\n\n",shellscript,tab +53,114762,"TERMINAL",0,0,"",,terminal_focus +54,121002,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +55,121089,"TERMINAL",0,0,"]633;E;2025-07-17 22:12:15 salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;2fac4da8-d4f0-4d83-a6ce-f6776ed5ed51]633;Csalloc: Granted job allocation 3355871\r\n",,terminal_output +56,121223,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +57,148351,"TERMINAL",0,0,"salloc: Nodes hkn0508 are ready for job\r\n",,terminal_output +58,149414,"TERMINAL",0,0,"]0;tum_cte0515@hkn0508:~/Projects/jafar[?2004h[tum_cte0515@hkn0508 jafar]$ ",,terminal_output +59,165129,"TERMINAL",0,0,"\r[tum_cte0515@hkn0508 jafar]$ ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1dc733b8-f415-4be5-b7dd-dc5953da5bb91753973887840-2025_07_31-16.58.50.401/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1dc733b8-f415-4be5-b7dd-dc5953da5bb91753973887840-2025_07_31-16.58.50.401/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..5efed80d4a8bffbb3a94a316a1524124c2c428f8 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1dc733b8-f415-4be5-b7dd-dc5953da5bb91753973887840-2025_07_31-16.58.50.401/source.csv @@ -0,0 +1,2616 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,738,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"4:58:50 PM [info] Activating crowd-code\n4:58:50 PM [info] Recording started\n4:58:50 PM [info] Initializing git provider using file system watchers...\n4:58:50 PM [info] Git repository found\n4:58:50 PM [info] Git provider initialized successfully\n",Log,tab +3,929,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"4:58:51 PM [info] Initial git state: [object Object]\n",Log,content +4,3321,"TERMINAL",0,0,"bash",,terminal_focus +5,15654,"TERMINAL",0,0,"queue",,terminal_command +6,15698,"TERMINAL",0,0,"]633;E;2025-07-31 16:59:05 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +7,15901,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Thu Jul 31 16:59:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3387190 accelerat interact tum_cte0 R 4:26:27\t 1 hkn0602",,terminal_output +8,16962,"TERMINAL",0,0,"78",,terminal_output +9,18016,"TERMINAL",0,0,"89",,terminal_output +10,19096,"TERMINAL",0,0,"930",,terminal_output +11,19314,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +12,22875,"TERMINAL",0,0,"queue",,terminal_command +13,22969,"TERMINAL",0,0,"]633;E;2025-07-31 16:59:13 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Thu Jul 31 16:59:13 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3387190 accelerat interact tum_cte0 R 4:26:34\t 1 hkn0602",,terminal_output +14,24137,"TERMINAL",0,0,"45",,terminal_output +15,25203,"TERMINAL",0,0,"56",,terminal_output +16,25586,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +17,28187,"TERMINAL",0,0,"scancel 3387190",,terminal_command +18,28226,"TERMINAL",0,0,"]633;E;2025-07-31 16:59:18 scancel 3387190;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +19,28295,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +20,33134,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +21,97015,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +22,97079,"TERMINAL",0,0,"]633;E;2025-07-31 17:00:27 salloc --time=05:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;adbf53fe-397b-40d3-9339-94ea79afad56]633;Csalloc: Pending job allocation 3388128\r\nsalloc: job 3388128 queued and waiting for resources\r\n",,terminal_output +23,169212,"TERMINAL",0,0,"salloc: job 3388128 has been allocated resources\r\nsalloc: Granted job allocation 3388128\r\n",,terminal_output +24,169346,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +25,177657,"TERMINAL",0,0,"s",,terminal_output +26,186051,"TERMINAL",0,0,"bash",,terminal_focus +27,187760,"TERMINAL",0,0,"idling",,terminal_command +28,187824,"TERMINAL",0,0,"]633;E;2025-07-31 17:01:58 idling;fefc14ad-7c55-4796-a83b-9fc26af4ce88]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Thu Jul 31 17:01:58 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 69 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 103 nodes idle",,terminal_output +29,188950,"TERMINAL",0,0,"9\t ",,terminal_output +30,190020,"TERMINAL",0,0,"2:00\t ",,terminal_output +31,191027,"TERMINAL",0,0,"1\t ",,terminal_output +32,191100,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Thu Jul 31 17:02:01 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 69 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 103 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle",,terminal_output +33,191162,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Thu Jul 31 17:02:01 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 69 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 103 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 3 nodes idle\rPartition large:\t 4 nodes idle",,terminal_output +34,191217,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Thu Jul 31 17:02:01 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 69 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 103 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 3 nodes idle\rPartition large:\t 4 nodes idle",,terminal_output +35,192315,"TERMINAL",0,0,"2\t ",,terminal_output +36,193339,"TERMINAL",0,0,"3\t ",,terminal_output +37,194343,"TERMINAL",0,0,"4\t ",,terminal_output +38,195390,"TERMINAL",0,0,"5\t ",,terminal_output +39,196430,"TERMINAL",0,0,"salloc: Nodes hkn0802 are ready for job\r\n",,terminal_output +40,196438,"TERMINAL",0,0,"6\t ",,terminal_output +41,197191,"TERMINAL",0,0,"]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h[tum_cte0515@hkn0802 jafar]$ ",,terminal_output +42,197502,"TERMINAL",0,0,"7\t ",,terminal_output +43,198517,"TERMINAL",0,0,"8\t ",,terminal_output +44,198784,"TERMINAL",0,0,"srun",,terminal_focus +45,199164,"TERMINAL",0,0,"s",,terminal_output +46,199516,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +47,199561,"TERMINAL",0,0,"9\t ",,terminal_output +48,199665,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +49,199725,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +50,199999,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +51,200155,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +52,200257,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +53,200368,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +54,200562,"TERMINAL",0,0,"env/",,terminal_output +55,200594,"TERMINAL",0,0,"10\t ",,terminal_output +56,200711,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +57,200871,"TERMINAL",0,0,"in/",,terminal_output +58,201116,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +59,201216,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +60,201386,"TERMINAL",0,0,"tivate",,terminal_output +61,201637,"TERMINAL",0,0,"1\t ",,terminal_output +62,201708,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ [?25h",,terminal_output +63,202903,"TERMINAL",0,0,"2\t ",,terminal_output +64,203740,"TERMINAL",0,0,"r",,terminal_output +65,203769,"TERMINAL",0,0,"3\t ",,terminal_output +66,203832,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +67,204099,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +68,204281,"TERMINAL",0,0,"[?25ln[?25h[?25le[?25h",,terminal_output +69,204417,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +70,204483,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +71,204818,"TERMINAL",0,0,"5\t ",,terminal_output +72,205377,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +73,205482,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ [?25h",,terminal_output +74,205842,"TERMINAL",0,0,"6\t ",,terminal_output +75,206383,"TERMINAL",0,0,"v",,terminal_output +76,206609,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +77,206677,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +78,206801,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +79,206858,"TERMINAL",0,0,"7\t ",,terminal_output +80,208000,"TERMINAL",0,0,"8\t ",,terminal_output +81,208941,"TERMINAL",0,0,"9\t ",,terminal_output +82,209729,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +83,209814,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +84,209940,"TERMINAL",0,0,"urm",,terminal_output +85,209996,"TERMINAL",0,0,"20\t ",,terminal_output +86,211027,"TERMINAL",0,0,"1\t ",,terminal_output +87,212089,"TERMINAL",0,0,"2\t ",,terminal_output +88,212316,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +89,213161,"TERMINAL",0,0,"3\t ",,terminal_output +90,213585,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +91,213693,"TERMINAL",0,0,"ev/",,terminal_output +92,214218,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +93,214218,"TERMINAL",0,0,"4\t ",,terminal_output +94,214284,"TERMINAL",0,0,"ihir/",,terminal_output +95,214852,"TERMINAL",0,0,"",,terminal_output +96,215221,"TERMINAL",0,0,"5\t ",,terminal_output +97,215273,"TERMINAL",0,0,"\r\ncremers/ horeka/ placeholder \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ vim slurm/dev/mihir/",,terminal_output +98,215703,"TERMINAL",0,0,"h",,terminal_output +99,215830,"TERMINAL",0,0,"oreka/",,terminal_output +100,216274,"TERMINAL",0,0,"6\t ",,terminal_output +101,216429,"TERMINAL",0,0,"",,terminal_output +102,216516,"TERMINAL",0,0,"\r\ncausal_fit_modelsizes/ overfit_sample_tiny/ train_lam.sh\r\noverfit_batch/ preprocess_dataset.sbatch train_tokenizer_coinrun.sbatch\r\noverfit_batch_tiny/ sync_runner.sh train_tokenizer.sh\r\noverfit_sample/ train_dynamics.sh yolo-runs/\r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ vim slurm/dev/mihir/horeka/",,terminal_output +103,217288,"TERMINAL",0,0,"7 80",,terminal_output +104,218334,"TERMINAL",0,0,"8\t ",,terminal_output +105,218950,"TERMINAL",0,0,"",,terminal_output +106,219186,"TERMINAL",0,0,"",,terminal_output +107,219407,"TERMINAL",0,0,"",,terminal_output +108,219457,"TERMINAL",0,0,"9\t ",,terminal_output +109,220043,"TERMINAL",0,0,"[?25lv/mihir/horeka/[?25h",,terminal_output +110,220294,"TERMINAL",0,0,"[?25le/mihir/horeka/[?25h",,terminal_output +111,220358,"TERMINAL",0,0,"[?25ld/mihir/horeka/[?25h",,terminal_output +112,220444,"TERMINAL",0,0,"30\t ",,terminal_output +113,220904,"TERMINAL",0,0,"j/mihir/horeka/",,terminal_output +114,220988,"TERMINAL",0,0,"[?25lo/mihir/horeka/[?25h",,terminal_output +115,221157,"TERMINAL",0,0,"[?25lb/mihir/horeka/[?25h",,terminal_output +116,221242,"TERMINAL",0,0,"[?25ls/mihir/horeka/[?25h",,terminal_output +117,221614,"TERMINAL",0,0,"1\t ",,terminal_output +118,222220,"TERMINAL",0,0,"",,terminal_output +119,222478,"TERMINAL",0,0,"",,terminal_output +120,222504,"TERMINAL",0,0,"2\t ",,terminal_output +121,222753,"TERMINAL",0,0,"\r\naction_space_scaling/ causal_big_runs/ mask_prob_fix/ sbatch_dir.sh \r\nbatchsize_scaling/ lr_tuning/ modelsize_scaling/ \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ vim slurm/jobs/mihir/horeka/",,terminal_output +122,223557,"TERMINAL",0,0,"31",,terminal_output +123,224586,"TERMINAL",0,0,"4\t ",,terminal_output +124,225712,"TERMINAL",0,0,"5\t ",,terminal_output +125,226166,"TERMINAL",0,0,"c",,terminal_output +126,226474,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +127,226648,"TERMINAL",0,0,"usal_big_runs/",,terminal_output +128,226662,"TERMINAL",0,0,"6\t ",,terminal_output +129,227193,"TERMINAL",0,0,"train_dynamics_",,terminal_output +130,227326,"TERMINAL",0,0,"",,terminal_output +131,227777,"TERMINAL",0,0,"7\t ",,terminal_output +132,227848,"TERMINAL",0,0,"\r\ntrain_dynamics_2_nodes.sbatch train_dynamics_8_nodes.sbatch \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_",,terminal_output +133,228782,"TERMINAL",0,0,"9\t ",,terminal_output +134,229803,"TERMINAL",0,0,"40\t ",,terminal_output +135,230858,"TERMINAL",0,0,"1\t ",,terminal_output +136,230998,"TERMINAL",0,0,"2",,terminal_output +137,231317,"TERMINAL",0,0,"_nodes.sbatch ",,terminal_output +138,231870,"TERMINAL",0,0,"2\t ",,terminal_output +139,232201,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch"" 81L, 2407B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=3#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j..log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.llog\r\n#SBATCH --job-name=train_dynamics_causal_2_node\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}1,1Top[?25h",,terminal_output +140,232378,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +141,232977,"TERMINAL",0,0,"3\t ",,terminal_output +142,233036,"TERMINAL",0,0,"[?25lj 2,0-1[?25h",,terminal_output +143,233299,"TERMINAL",0,0,"[?25lj 3,1 [?25h",,terminal_output +144,233444,"TERMINAL",0,0,"[?25lj 4[?25h",,terminal_output +145,233580,"TERMINAL",0,0,"[?25lj 5[?25h",,terminal_output +146,233749,"TERMINAL",0,0,"[?25ll 2[?25h",,terminal_output +147,233963,"TERMINAL",0,0,"4\t ",,terminal_output +148,234564,"TERMINAL",0,0,"[?25ll 3[?25h[?25ll 4[?25h[?25ll 5[?25h[?25ll 6[?25h[?25ll 7[?25h[?25ll 8[?25h[?25ll 9[?25h[?25ll 10[?25h[?25ll 1[?25h[?25ll 2[?25h[?25l3[?25h[?25ll 4[?25h",,terminal_output +149,234604,"TERMINAL",0,0,"[?25ll 5[?25h",,terminal_output +150,234773,"TERMINAL",0,0,"[?25ll 6[?25h",,terminal_output +151,234961,"TERMINAL",0,0,"[?25ll 7[?25h",,terminal_output +152,235000,"TERMINAL",0,0,"5\t ",,terminal_output +153,235365,"TERMINAL",0,0,"[?25lh 6[?25h",,terminal_output +154,236054,"TERMINAL",0,0,"6\t ",,terminal_output +155,236930,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +156,236993,"TERMINAL",0,0,"[?25l0 0[?25h",,terminal_output +157,237080,"TERMINAL",0,0,"7\t ",,terminal_output +158,237334,"TERMINAL",0,0,"[?25ll 7[?25h",,terminal_output +159,238148,"TERMINAL",0,0,"8\t ",,terminal_output +160,239092,"TERMINAL",0,0,"[?25l^[",,terminal_output +161,239168,"TERMINAL",0,0,"9\t ",,terminal_output +162,239233,"TERMINAL",0,0," ^[ [?25h",,terminal_output +163,240288,"TERMINAL",0,0,"50\t ",,terminal_output +164,241327,"TERMINAL",0,0,"1\t ",,terminal_output +165,241598,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +166,241762,"TERMINAL",0,0,"w",,terminal_output +167,241835,"TERMINAL",0,0,"q",,terminal_output +168,242366,"TERMINAL",0,0,"2\t ",,terminal_output +169,243401,"TERMINAL",0,0,"3\t ",,terminal_output +170,244448,"TERMINAL",0,0,"4\t ",,terminal_output +171,244736,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch"" 81L, 2407B written\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +172,245491,"TERMINAL",0,0,"5\t ",,terminal_output +173,245541,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch ",,terminal_output +174,246563,"TERMINAL",0,0,"6\t ",,terminal_output +175,247604,"TERMINAL",0,0,"7\t ",,terminal_output +176,248376,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +177,248564,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +178,248619,"TERMINAL",0,0,"8\t ",,terminal_output +179,248648,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +180,248867,"TERMINAL",0,0,"[1@s",,terminal_output +181,249007,"TERMINAL",0,0,"[?25ls [1@b[?25h[1@a",,terminal_output +182,249301,"TERMINAL",0,0,"[?25l [1@t[?25h",,terminal_output +183,249463,"TERMINAL",0,0,"[?25l [1@c[?25h",,terminal_output +184,249553,"TERMINAL",0,0,"[?25l [1@h[?25h",,terminal_output +185,249709,"TERMINAL",0,0,"9\t ",,terminal_output +186,250712,"TERMINAL",0,0,"3:00\t ",,terminal_output +187,250922,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3388135\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +188,251675,"TERMINAL",0,0,"q",,terminal_output +189,251787,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +190,251787,"TERMINAL",0,0,"2\t ",,terminal_output +191,252005,"TERMINAL",0,0,"[?25leu[?25h",,terminal_output +192,252117,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +193,252209,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:03:02 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3388135 accelerat train_dy tum_cte0 PD\t0:00\t 3 (Priority)3388128 accelerat interact tum_cte0 R\t1:23\t 1 hkn0802",,terminal_output +194,252850,"TERMINAL",0,0,"3\t ",,terminal_output +195,253259,"TERMINAL",0,0,"34",,terminal_output +196,253972,"TERMINAL",0,0,"4\t ",,terminal_output +197,254245,"TERMINAL",0,0,"45",,terminal_output +198,254902,"TERMINAL",0,0,"5\t ",,terminal_output +199,255250,"TERMINAL",0,0,"56",,terminal_output +200,256022,"TERMINAL",0,0,"6\t ",,terminal_output +201,256262,"TERMINAL",0,0,"67",,terminal_output +202,257050,"TERMINAL",0,0,"7\t ",,terminal_output +203,257297,"TERMINAL",0,0,"78",,terminal_output +204,258064,"TERMINAL",0,0,"8\t ",,terminal_output +205,258298,"TERMINAL",0,0,"89",,terminal_output +206,259034,"TERMINAL",0,0,"9\t ",,terminal_output +207,259319,"TERMINAL",0,0,"930",,terminal_output +208,260073,"TERMINAL",0,0,"10\t ",,terminal_output +209,260326,"TERMINAL",0,0,"101",,terminal_output +210,261281,"TERMINAL",0,0,"1\t ",,terminal_output +211,261342,"TERMINAL",0,0,"12",,terminal_output +212,262170,"TERMINAL",0,0,"2\t ",,terminal_output +213,262366,"TERMINAL",0,0,"23",,terminal_output +214,263192,"TERMINAL",0,0,"3\t ",,terminal_output +215,263373,"TERMINAL",0,0,"34",,terminal_output +216,264229,"TERMINAL",0,0,"4\t ",,terminal_output +217,264401,"TERMINAL",0,0,"45",,terminal_output +218,265270,"TERMINAL",0,0,"5\t ",,terminal_output +219,265423,"TERMINAL",0,0,"56",,terminal_output +220,266316,"TERMINAL",0,0,"6\t ",,terminal_output +221,266441,"TERMINAL",0,0,"67",,terminal_output +222,267349,"TERMINAL",0,0,"7\t ",,terminal_output +223,267479,"TERMINAL",0,0,"78",,terminal_output +224,268394,"TERMINAL",0,0,"8\t ",,terminal_output +225,268457,"TERMINAL",0,0,"89",,terminal_output +226,269458,"TERMINAL",0,0,"9\t ",,terminal_output +227,269489,"TERMINAL",0,0,"940",,terminal_output +228,270468,"TERMINAL",0,0,"201",,terminal_output +229,270491,"TERMINAL",0,0,"201",,terminal_output +230,271505,"TERMINAL",0,0,"12",,terminal_output +231,271538,"TERMINAL",0,0,"1\t ",,terminal_output +232,272517,"TERMINAL",0,0,"23",,terminal_output +233,272594,"TERMINAL",0,0,"2\t ",,terminal_output +234,273630,"TERMINAL",0,0,"34",,terminal_output +235,273630,"TERMINAL",0,0,"3\t ",,terminal_output +236,274756,"TERMINAL",0,0,"45",,terminal_output +237,274756,"TERMINAL",0,0,"4\t ",,terminal_output +238,275572,"TERMINAL",0,0,"56",,terminal_output +239,275698,"TERMINAL",0,0,"5\t ",,terminal_output +240,276600,"TERMINAL",0,0,"67",,terminal_output +241,276811,"TERMINAL",0,0,"60",,terminal_output +242,277633,"TERMINAL",0,0,"78",,terminal_output +243,277856,"TERMINAL",0,0,"8\t ",,terminal_output +244,278751,"TERMINAL",0,0,"89",,terminal_output +245,278893,"TERMINAL",0,0,"9\t ",,terminal_output +246,279673,"TERMINAL",0,0,"950",,terminal_output +247,279980,"TERMINAL",0,0,"30\t ",,terminal_output +248,280709,"TERMINAL",0,0,"301",,terminal_output +249,281004,"TERMINAL",0,0,"1\t ",,terminal_output +250,281824,"TERMINAL",0,0,"12",,terminal_output +251,282021,"TERMINAL",0,0,"2\t ",,terminal_output +252,282848,"TERMINAL",0,0,"23",,terminal_output +253,283071,"TERMINAL",0,0,"3\t ",,terminal_output +254,283876,"TERMINAL",0,0,"35",,terminal_output +255,284098,"TERMINAL",0,0,"4\t ",,terminal_output +256,284894,"TERMINAL",0,0,"56",,terminal_output +257,285209,"TERMINAL",0,0,"5\t ",,terminal_output +258,285741,"TERMINAL",0,0,"67",,terminal_output +259,286226,"TERMINAL",0,0,"6\t ",,terminal_output +260,286842,"TERMINAL",0,0,"78",,terminal_output +261,287258,"TERMINAL",0,0,"7\t ",,terminal_output +262,287875,"TERMINAL",0,0,"89",,terminal_output +263,288274,"TERMINAL",0,0,"8\t ",,terminal_output +264,288889,"TERMINAL",0,0,"92:00",,terminal_output +265,289447,"TERMINAL",0,0,"90 99",,terminal_output +266,289842,"TERMINAL",0,0,"40 R1hkn[0811-0813]1",,terminal_output +267,290527,"TERMINAL",0,0,"40\t ",,terminal_output +268,291070,"TERMINAL",0,0,"122",,terminal_output +269,291549,"TERMINAL",0,0,"1\t ",,terminal_output +270,291857,"TERMINAL",0,0,"233",,terminal_output +271,292674,"TERMINAL",0,0,"2\t ",,terminal_output +272,292868,"TERMINAL",0,0,"344",,terminal_output +273,293626,"TERMINAL",0,0,"3\t ",,terminal_output +274,293906,"TERMINAL",0,0,"455",,terminal_output +275,294707,"TERMINAL",0,0,"4\t ",,terminal_output +276,294895,"TERMINAL",0,0,"566",,terminal_output +277,295749,"TERMINAL",0,0,"5\t ",,terminal_output +278,295911,"TERMINAL",0,0,"677",,terminal_output +279,296878,"TERMINAL",0,0,"6\t ",,terminal_output +280,296941,"TERMINAL",0,0,"788",,terminal_output +281,297903,"TERMINAL",0,0,"8\t ",,terminal_output +282,297964,"TERMINAL",0,0,"899",,terminal_output +283,298814,"TERMINAL",0,0,"9\t ",,terminal_output +284,298972,"TERMINAL",0,0,"91010",,terminal_output +285,299951,"TERMINAL",0,0,"50\t ",,terminal_output +286,299988,"TERMINAL",0,0,"5011",,terminal_output +287,301032,"TERMINAL",0,0,"1\t ",,terminal_output +288,301033,"TERMINAL",0,0,"122",,terminal_output +289,301996,"TERMINAL",0,0,"2\t ",,terminal_output +290,302014,"TERMINAL",0,0,"233",,terminal_output +291,303019,"TERMINAL",0,0,"3\t ",,terminal_output +292,303053,"TERMINAL",0,0,"344",,terminal_output +293,304357,"TERMINAL",0,0,"455",,terminal_output +294,304362,"TERMINAL",0,0,"4\t ",,terminal_output +295,305101,"TERMINAL",0,0,"566",,terminal_output +296,305102,"TERMINAL",0,0,"5\t ",,terminal_output +297,306085,"TERMINAL",0,0,"677",,terminal_output +298,306148,"TERMINAL",0,0,"6\t ",,terminal_output +299,307096,"TERMINAL",0,0,"788",,terminal_output +300,307213,"TERMINAL",0,0,"7\t ",,terminal_output +301,308119,"TERMINAL",0,0,"899",,terminal_output +302,308250,"TERMINAL",0,0,"8\t ",,terminal_output +303,309160,"TERMINAL",0,0,"92020",,terminal_output +304,309266,"TERMINAL",0,0,"9\t ",,terminal_output +305,310180,"TERMINAL",0,0,"4:0011",,terminal_output +306,310302,"TERMINAL",0,0,"4:00\t ",,terminal_output +307,311380,"TERMINAL",0,0,"122",,terminal_output +308,311381,"TERMINAL",0,0,"1\t ",,terminal_output +309,312237,"TERMINAL",0,0,"233",,terminal_output +310,312369,"TERMINAL",0,0,"2\t ",,terminal_output +311,313261,"TERMINAL",0,0,"344",,terminal_output +312,313451,"TERMINAL",0,0,"3\t ",,terminal_output +313,314216,"TERMINAL",0,0,"455",,terminal_output +314,314447,"TERMINAL",0,0,"4\t ",,terminal_output +315,315237,"TERMINAL",0,0,"566",,terminal_output +316,315512,"TERMINAL",0,0,"5\t ",,terminal_output +317,316254,"TERMINAL",0,0,"677",,terminal_output +318,316742,"TERMINAL",0,0,"6\t ",,terminal_output +319,317272,"TERMINAL",0,0,"788",,terminal_output +320,317665,"TERMINAL",0,0,"7\t ",,terminal_output +321,318288,"TERMINAL",0,0,"899",,terminal_output +322,318697,"TERMINAL",0,0,"8\t ",,terminal_output +323,319343,"TERMINAL",0,0,"93030",,terminal_output +324,319660,"TERMINAL",0,0,"9\t ",,terminal_output +325,320330,"TERMINAL",0,0,"1011",,terminal_output +326,320776,"TERMINAL",0,0,"10\t ",,terminal_output +327,321356,"TERMINAL",0,0,"122",,terminal_output +328,321767,"TERMINAL",0,0,"2\t ",,terminal_output +329,322362,"TERMINAL",0,0,"233",,terminal_output +330,322829,"TERMINAL",0,0,"3\t ",,terminal_output +331,323379,"TERMINAL",0,0,"344",,terminal_output +332,324021,"TERMINAL",0,0,"4\t ",,terminal_output +333,324401,"TERMINAL",0,0,"455",,terminal_output +334,324878,"TERMINAL",0,0,"5\t ",,terminal_output +335,325413,"TERMINAL",0,0,"566",,terminal_output +336,325972,"TERMINAL",0,0,"6\t ",,terminal_output +337,326470,"TERMINAL",0,0,"677",,terminal_output +338,326951,"TERMINAL",0,0,"7\t ",,terminal_output +339,327448,"TERMINAL",0,0,"788",,terminal_output +340,328005,"TERMINAL",0,0,"8\t ",,terminal_output +341,328518,"TERMINAL",0,0,"899",,terminal_output +342,329032,"TERMINAL",0,0,"9\t ",,terminal_output +343,329487,"TERMINAL",0,0,"94040",,terminal_output +344,330266,"TERMINAL",0,0,"20\t ",,terminal_output +345,330563,"TERMINAL",0,0,"2011",,terminal_output +346,331111,"TERMINAL",0,0,"1\t ",,terminal_output +347,331515,"TERMINAL",0,0,"122",,terminal_output +348,332153,"TERMINAL",0,0,"2\t ",,terminal_output +349,332612,"TERMINAL",0,0,"233",,terminal_output +350,333228,"TERMINAL",0,0,"3\t ",,terminal_output +351,333635,"TERMINAL",0,0,"344",,terminal_output +352,334227,"TERMINAL",0,0,"4\t ",,terminal_output +353,334660,"TERMINAL",0,0,"455",,terminal_output +354,335279,"TERMINAL",0,0,"5\t ",,terminal_output +355,335689,"TERMINAL",0,0,"566",,terminal_output +356,336314,"TERMINAL",0,0,"6\t ",,terminal_output +357,336604,"TERMINAL",0,0,"677",,terminal_output +358,337350,"TERMINAL",0,0,"7\t ",,terminal_output +359,337629,"TERMINAL",0,0,"788",,terminal_output +360,338395,"TERMINAL",0,0,"8\t ",,terminal_output +361,338645,"TERMINAL",0,0,"899",,terminal_output +362,339426,"TERMINAL",0,0,"9\t ",,terminal_output +363,339652,"TERMINAL",0,0,"95050",,terminal_output +364,340469,"TERMINAL",0,0,"30\t ",,terminal_output +365,340676,"TERMINAL",0,0,"3011",,terminal_output +366,341525,"TERMINAL",0,0,"1\t ",,terminal_output +367,341685,"TERMINAL",0,0,"122",,terminal_output +368,342562,"TERMINAL",0,0,"2\t ",,terminal_output +369,342702,"TERMINAL",0,0,"244",,terminal_output +370,343597,"TERMINAL",0,0,"3\t ",,terminal_output +371,343726,"TERMINAL",0,0,"455",,terminal_output +372,344694,"TERMINAL",0,0,"4\t ",,terminal_output +373,344746,"TERMINAL",0,0,"566",,terminal_output +374,345776,"TERMINAL",0,0,"5\t ",,terminal_output +375,345782,"TERMINAL",0,0,"677",,terminal_output +376,346718,"TERMINAL",0,0,"6\t ",,terminal_output +377,346808,"TERMINAL",0,0,"788",,terminal_output +378,347755,"TERMINAL",0,0,"8\t ",,terminal_output +379,347818,"TERMINAL",0,0,"899",,terminal_output +380,348818,"TERMINAL",0,0,"9\t ",,terminal_output +381,348858,"TERMINAL",0,0,"91:003:00",,terminal_output +382,350002,"TERMINAL",0,0,"4011",,terminal_output +383,350002,"TERMINAL",0,0,"40\t ",,terminal_output +384,350922,"TERMINAL",0,0,"122",,terminal_output +385,350953,"TERMINAL",0,0,"1\t ",,terminal_output +386,351867,"TERMINAL",0,0,"233",,terminal_output +387,351946,"TERMINAL",0,0,"2\t ",,terminal_output +388,352889,"TERMINAL",0,0,"344",,terminal_output +389,352968,"TERMINAL",0,0,"3\t ",,terminal_output +390,354012,"TERMINAL",0,0,"455",,terminal_output +391,354043,"TERMINAL",0,0,"4\t ",,terminal_output +392,354934,"TERMINAL",0,0,"566",,terminal_output +393,355058,"TERMINAL",0,0,"5\t ",,terminal_output +394,355959,"TERMINAL",0,0,"677",,terminal_output +395,356090,"TERMINAL",0,0,"6\t ",,terminal_output +396,356985,"TERMINAL",0,0,"788",,terminal_output +397,357133,"TERMINAL",0,0,"7\t ",,terminal_output +398,357967,"TERMINAL",0,0,"899",,terminal_output +399,358202,"TERMINAL",0,0,"8\t ",,terminal_output +400,359027,"TERMINAL",0,0,"91010",,terminal_output +401,359216,"TERMINAL",0,0,"9\t ",,terminal_output +402,360035,"TERMINAL",0,0,"5011",,terminal_output +403,360256,"TERMINAL",0,0,"50\t ",,terminal_output +404,361185,"TERMINAL",0,0,"122",,terminal_output +405,361312,"TERMINAL",0,0,"1\t ",,terminal_output +406,362048,"TERMINAL",0,0,"233",,terminal_output +407,362345,"TERMINAL",0,0,"2\t ",,terminal_output +408,363233,"TERMINAL",0,0,"344",,terminal_output +409,363392,"TERMINAL",0,0,"31",,terminal_output +410,364090,"TERMINAL",0,0,"455",,terminal_output +411,364433,"TERMINAL",0,0,"4\t ",,terminal_output +412,365101,"TERMINAL",0,0,"566",,terminal_output +413,365481,"TERMINAL",0,0,"5\t ",,terminal_output +414,366305,"TERMINAL",0,0,"677",,terminal_output +415,366535,"TERMINAL",0,0,"6\t ",,terminal_output +416,366577,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +417,367453,"TERMINAL",0,0,"c",,terminal_output +418,367607,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +419,367607,"TERMINAL",0,0,"7\t ",,terminal_output +420,367690,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +421,368607,"TERMINAL",0,0,"8\t ",,terminal_output +422,369689,"TERMINAL",0,0,"9\t ",,terminal_output +423,369990,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +424,370053,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +425,370281,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +426,370389,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +427,370713,"TERMINAL",0,0,"5:00\t ",,terminal_output +428,371200,"TERMINAL",0,0,"watch",,terminal_focus +429,371729,"TERMINAL",0,0,"1\t ",,terminal_output +430,372403,"TERMINAL",0,0,"srun",,terminal_focus +431,372857,"TERMINAL",0,0,"3\t ",,terminal_output +432,373880,"TERMINAL",0,0,"4\t ",,terminal_output +433,374915,"TERMINAL",0,0,"5\t ",,terminal_output +434,375929,"TERMINAL",0,0,"6\t ",,terminal_output +435,376943,"TERMINAL",0,0,"7\t ",,terminal_output +436,378081,"TERMINAL",0,0,"8\t ",,terminal_output +437,379133,"TERMINAL",0,0,"9\t ",,terminal_output +438,380187,"TERMINAL",0,0,"10\t ",,terminal_output +439,381254,"TERMINAL",0,0,"1\t ",,terminal_output +440,382292,"TERMINAL",0,0,"2\t ",,terminal_output +441,383521,"TERMINAL",0,0,"3\t ",,terminal_output +442,384372,"TERMINAL",0,0,"4100",,terminal_output +443,385380,"TERMINAL",0,0,"5\t ",,terminal_output +444,386425,"TERMINAL",0,0,"6\t ",,terminal_output +445,387471,"TERMINAL",0,0,"7\t ",,terminal_output +446,388633,"TERMINAL",0,0,"8\t ",,terminal_output +447,389664,"TERMINAL",0,0,"9\t ",,terminal_output +448,390753,"TERMINAL",0,0,"20\t ",,terminal_output +449,391731,"TERMINAL",0,0,"1\t ",,terminal_output +450,392927,"TERMINAL",0,0,"3\t ",,terminal_output +451,393849,"TERMINAL",0,0,"4\t ",,terminal_output +452,394874,"TERMINAL",0,0,"5\t ",,terminal_output +453,395900,"TERMINAL",0,0,"6\t ",,terminal_output +454,396968,"TERMINAL",0,0,"70",,terminal_output +455,398151,"TERMINAL",0,0,"8\t ",,terminal_output +456,399167,"TERMINAL",0,0,"9\t ",,terminal_output +457,399795,"TERMINAL",0,0,"q",,terminal_output +458,399943,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +459,400008,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +460,400081,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +461,400113,"TERMINAL",0,0,"30\t ",,terminal_output +462,400180,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +463,400281,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:05:30 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3388135 accelerat train_dy tum_cte0 R\t1:51\t 3 hkn[0811-0813]3388128 accelerat interact tum_cte0 R\t3:51\t 1 hkn0802",,terminal_output +464,401236,"TERMINAL",0,0,"1\t ",,terminal_output +465,401270,"TERMINAL",0,0,"122",,terminal_output +466,402140,"TERMINAL",0,0,"2\t ",,terminal_output +467,402284,"TERMINAL",0,0,"233",,terminal_output +468,403296,"TERMINAL",0,0,"3\t ",,terminal_output +469,403296,"TERMINAL",0,0,"344",,terminal_output +470,404218,"TERMINAL",0,0,"4\t ",,terminal_output +471,404375,"TERMINAL",0,0,"455",,terminal_output +472,405324,"TERMINAL",0,0,"5\t ",,terminal_output +473,405325,"TERMINAL",0,0,"566",,terminal_output +474,406342,"TERMINAL",0,0,"6\t ",,terminal_output +475,406342,"TERMINAL",0,0,"677",,terminal_output +476,407326,"TERMINAL",0,0,"7\t ",,terminal_output +477,407350,"TERMINAL",0,0,"788",,terminal_output +478,408384,"TERMINAL",0,0,"899",,terminal_output +479,408416,"TERMINAL",0,0,"8\t ",,terminal_output +480,409406,"TERMINAL",0,0,"92:004:00",,terminal_output +481,409438,"TERMINAL",0,0,"9\t ",,terminal_output +482,410402,"TERMINAL",0,0,"4011",,terminal_output +483,410490,"TERMINAL",0,0,"40 99",,terminal_output +484,411460,"TERMINAL",0,0,"122",,terminal_output +485,411498,"TERMINAL",0,0,"1\t ",,terminal_output +486,412431,"TERMINAL",0,0,"233",,terminal_output +487,412561,"TERMINAL",0,0,"2\t ",,terminal_output +488,413453,"TERMINAL",0,0,"344",,terminal_output +489,413581,"TERMINAL",0,0,"3\t ",,terminal_output +490,414469,"TERMINAL",0,0,"455",,terminal_output +491,414624,"TERMINAL",0,0,"4\t ",,terminal_output +492,415557,"TERMINAL",0,0,"566",,terminal_output +493,415684,"TERMINAL",0,0,"5\t ",,terminal_output +494,416514,"TERMINAL",0,0,"677",,terminal_output +495,416716,"TERMINAL",0,0,"6100",,terminal_output +496,417531,"TERMINAL",0,0,"788",,terminal_output +497,417770,"TERMINAL",0,0,"8\t ",,terminal_output +498,418548,"TERMINAL",0,0,"899",,terminal_output +499,418795,"TERMINAL",0,0,"9\t ",,terminal_output +500,419660,"TERMINAL",0,0,"91010",,terminal_output +501,419842,"TERMINAL",0,0,"50\t ",,terminal_output +502,420590,"TERMINAL",0,0,"5011",,terminal_output +503,420985,"TERMINAL",0,0,"1\t ",,terminal_output +504,421602,"TERMINAL",0,0,"122",,terminal_output +505,422010,"TERMINAL",0,0,"2\t ",,terminal_output +506,422624,"TERMINAL",0,0,"233",,terminal_output +507,422980,"TERMINAL",0,0,"3\t ",,terminal_output +508,423647,"TERMINAL",0,0,"344",,terminal_output +509,424057,"TERMINAL",0,0,"4\t ",,terminal_output +510,424672,"TERMINAL",0,0,"455",,terminal_output +511,425082,"TERMINAL",0,0,"5\t ",,terminal_output +512,425697,"TERMINAL",0,0,"566",,terminal_output +513,426242,"TERMINAL",0,0,"6\t ",,terminal_output +514,426725,"TERMINAL",0,0,"677",,terminal_output +515,427232,"TERMINAL",0,0,"7\t ",,terminal_output +516,427723,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +517,428212,"TERMINAL",0,0,"8\t ",,terminal_output +518,429285,"TERMINAL",0,0,"9\t ",,terminal_output +519,430312,"TERMINAL",0,0,"6:00\t ",,terminal_output +520,430475,"TERMINAL",0,0,"[?25lru[?25h",,terminal_output +521,430529,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +522,430658,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +523,431026,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +524,431225,"TERMINAL",0,0,"[?25lre[?25h",,terminal_output +525,431335,"TERMINAL",0,0,"1\t ",,terminal_output +526,431464,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +527,432362,"TERMINAL",0,0,"2\t ",,terminal_output +528,432529,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +529,432576,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +530,432841,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +531,432928,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +532,433403,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +533,433422,"TERMINAL",0,0,"3\t ",,terminal_output +534,434321,"TERMINAL",0,0,"[?25lvim[?25h[?25li[?25h[?25lm[?25h",,terminal_output +535,434412,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +536,434449,"TERMINAL",0,0,"4\t ",,terminal_output +537,435478,"TERMINAL",0,0,"51",,terminal_output +538,435914,"TERMINAL",0,0,"runner-3",,terminal_output +539,436199,"TERMINAL",0,0,"queue",,terminal_output +540,436510,"TERMINAL",0,0,"6\t ",,terminal_output +541,436565,"TERMINAL",0,0,"dev",,terminal_output +542,437023,"TERMINAL",0,0,"queue",,terminal_output +543,437434,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch ",,terminal_output +544,437560,"TERMINAL",0,0,"7\t ",,terminal_output +545,438452,"TERMINAL",0,0,"\rvim",,terminal_output +546,438614,"TERMINAL",0,0,"8\t ",,terminal_output +547,439677,"TERMINAL",0,0,"9\t ",,terminal_output +548,439805,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +549,439953,"TERMINAL",0,0,"",,terminal_output +550,440167,"TERMINAL",0,0,"",,terminal_output +551,440384,"TERMINAL",0,0,"",,terminal_output +552,440581,"TERMINAL",0,0,"",,terminal_output +553,440745,"TERMINAL",0,0,"",,terminal_output +554,440746,"TERMINAL",0,0,"10\t ",,terminal_output +555,441318,"TERMINAL",0,0,"",,terminal_output +556,441750,"TERMINAL",0,0,"1\t ",,terminal_output +557,441750,"TERMINAL",0,0,"[?25l_[1@8[?25h",,terminal_output +558,442238,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch"" 81L, 2418B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=8#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j..log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.llog\r\n#SBATCH --job-name=train_dynamics_causal_8_node\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}5,17Top[?25h",,terminal_output +559,442334,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +560,442797,"TERMINAL",0,0,"3\t ",,terminal_output +561,443370,"TERMINAL",0,0,"[?25lh 6[?25h",,terminal_output +562,443923,"TERMINAL",0,0,"4\t ",,terminal_output +563,444846,"TERMINAL",0,0,"5\t ",,terminal_output +564,444940,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +565,445007,"TERMINAL",0,0,"[?25l0 0[?25h",,terminal_output +566,445450,"TERMINAL",0,0,"[?25ll 7[?25h",,terminal_output +567,445974,"TERMINAL",0,0,"6\t ",,terminal_output +568,446324,"TERMINAL",0,0,"[?25ll 8[?25h",,terminal_output +569,446484,"TERMINAL",0,0,"[?25li -- INSERT --5,18Top[?25h",,terminal_output +570,447015,"TERMINAL",0,0,"7\t ",,terminal_output +571,447130,"TERMINAL",0,0,"[?25l:00:07[?25h",,terminal_output +572,447230,"TERMINAL",0,0,"[?25l:00:06[?25h",,terminal_output +573,447644,"TERMINAL",0,0,"[?25l1:00:007[?25h[?25l0:00:008[?25h",,terminal_output +574,448020,"TERMINAL",0,0,"8\t ",,terminal_output +575,448385,"TERMINAL",0,0,"[?25l:00:07[?25h",,terminal_output +576,448477,"TERMINAL",0,0,"[?25l:00:06[?25h",,terminal_output +577,448753,"TERMINAL",0,0,"[?25l0:00:007[?25h",,terminal_output +578,448998,"TERMINAL",0,0,"9\t ",,terminal_output +579,449066,"TERMINAL",0,0,"[?25l8:00:008[?25h",,terminal_output +580,449302,"TERMINAL",0,0,"[?25l^[",,terminal_output +581,449416,"TERMINAL",0,0," 5,17Top[?25h",,terminal_output +582,449921,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +583,449964,"TERMINAL",0,0,"w",,terminal_output +584,450061,"TERMINAL",0,0,"20\t ",,terminal_output +585,450411,"TERMINAL",0,0,"q",,terminal_output +586,451166,"TERMINAL",0,0,"1\t ",,terminal_output +587,451231,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch"" 81L, 2418B written",,terminal_output +588,451403,"TERMINAL",0,0,"\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +589,452144,"TERMINAL",0,0,"2\t ",,terminal_output +590,452197,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch ",,terminal_output +591,452646,"TERMINAL",0,0,"\rrunner-3",,terminal_output +592,453192,"TERMINAL",0,0,"queue",,terminal_output +593,453224,"TERMINAL",0,0,"3\t ",,terminal_output +594,453608,"TERMINAL",0,0,"dev",,terminal_output +595,454004,"TERMINAL",0,0,"queue",,terminal_output +596,454228,"TERMINAL",0,0,"4\t ",,terminal_output +597,454392,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch ",,terminal_output +598,455413,"TERMINAL",0,0,"5\t ",,terminal_output +599,456487,"TERMINAL",0,0,"6\t ",,terminal_output +600,457535,"TERMINAL",0,0,"7\t ",,terminal_output +601,457702,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +602,458312,"TERMINAL",0,0,"[1@8",,terminal_output +603,458773,"TERMINAL",0,0,"8\t ",,terminal_output +604,459712,"TERMINAL",0,0,"9\t ",,terminal_output +605,459803,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3388140\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +606,460567,"TERMINAL",0,0,"q",,terminal_output +607,460673,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +608,460674,"TERMINAL",0,0,"30\t ",,terminal_output +609,460879,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +610,460974,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +611,461092,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:06:31 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3388140 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3388135 accelerat train_dy tum_cte0 R\t2:52\t 3 hkn[0811-0813]3388128 accelerat interact tum_cte0 R\t4:52\t 1 hkn0802",,terminal_output +612,461743,"TERMINAL",0,0,"1\t ",,terminal_output +613,462150,"TERMINAL",0,0,"233",,terminal_output +614,462772,"TERMINAL",0,0,"3\t ",,terminal_output +615,463183,"TERMINAL",0,0,"344",,terminal_output +616,463798,"TERMINAL",0,0,"4\t ",,terminal_output +617,464095,"TERMINAL",0,0,"455",,terminal_output +618,464814,"TERMINAL",0,0,"5\t ",,terminal_output +619,465120,"TERMINAL",0,0,"566",,terminal_output +620,466045,"TERMINAL",0,0,"6\t ",,terminal_output +621,466120,"TERMINAL",0,0,"677",,terminal_output +622,467065,"TERMINAL",0,0,"7\t ",,terminal_output +623,467139,"TERMINAL",0,0,"788",,terminal_output +624,467634,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +625,468020,"TERMINAL",0,0,"8\t ",,terminal_output +626,468104,"TERMINAL",0,0,"f",,terminal_output +627,468353,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +628,468421,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +629,468492,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +630,468559,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +631,468620,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +632,468715,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn0802.localdomain: Thu Jul 31 17:06:38 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3386722\taccelerated train_dynamics_causal_8_node tum_cte0 PENDING\t 0:00 2-00:00:00\t8 (Priority)3386719\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3386718\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3388140\taccelerated train_dynamics_causal_8_node tum_cte0 PENDING\t 0:00 8:00:008 (Priority)3388135\taccelerated train_dynamics_causal_2_node tum_cte0 RUNNING\t 3:00 8:00:003 hkn[0811-0813]3388128\tacceleratedinteractive tum_cte0 RUNNING\t 5:00 5:00:001 hkn0802\t ",,terminal_output +633,469026,"TERMINAL",0,0,"9\t ",,terminal_output +634,469735,"TERMINAL",0,0,"4011",,terminal_output +635,470037,"TERMINAL",0,0,"40\t ",,terminal_output +636,470768,"TERMINAL",0,0,"122",,terminal_output +637,471068,"TERMINAL",0,0,"1\t ",,terminal_output +638,471747,"TERMINAL",0,0,"233",,terminal_output +639,472104,"TERMINAL",0,0,"2\t ",,terminal_output +640,472802,"TERMINAL",0,0,"344",,terminal_output +641,473135,"TERMINAL",0,0,"3\t ",,terminal_output +642,474032,"TERMINAL",0,0,"455",,terminal_output +643,474186,"TERMINAL",0,0,"4\t ",,terminal_output +644,474953,"TERMINAL",0,0,"566",,terminal_output +645,475330,"TERMINAL",0,0,"5\t ",,terminal_output +646,475872,"TERMINAL",0,0,"677",,terminal_output +647,476385,"TERMINAL",0,0,"6\t ",,terminal_output +648,476999,"TERMINAL",0,0,"788",,terminal_output +649,477413,"TERMINAL",0,0,"7\t ",,terminal_output +650,477819,"TERMINAL",0,0,"899",,terminal_output +651,478351,"TERMINAL",0,0,"8\t ",,terminal_output +652,478954,"TERMINAL",0,0,"91010",,terminal_output +653,479387,"TERMINAL",0,0,"9\t ",,terminal_output +654,479969,"TERMINAL",0,0,"5011",,terminal_output +655,480433,"TERMINAL",0,0,"50\t ",,terminal_output +656,480994,"TERMINAL",0,0,"122",,terminal_output +657,481476,"TERMINAL",0,0,"1\t ",,terminal_output +658,482017,"TERMINAL",0,0,"233",,terminal_output +659,482634,"TERMINAL",0,0,"2\t ",,terminal_output +660,482938,"TERMINAL",0,0,"344",,terminal_output +661,483656,"TERMINAL",0,0,"3\t ",,terminal_output +662,483974,"TERMINAL",0,0,"455",,terminal_output +663,484598,"TERMINAL",0,0,"4\t ",,terminal_output +664,484986,"TERMINAL",0,0,"566",,terminal_output +665,485706,"TERMINAL",0,0,"5\t ",,terminal_output +666,485926,"TERMINAL",0,0,"677",,terminal_output +667,487144,"TERMINAL",0,0,"6\t ",,terminal_output +668,487145,"TERMINAL",0,0,"788",,terminal_output +669,487956,"TERMINAL",0,0,"8\t ",,terminal_output +670,487962,"TERMINAL",0,0,"899",,terminal_output +671,489086,"TERMINAL",0,0,"9\t ",,terminal_output +672,489115,"TERMINAL",0,0,"92020",,terminal_output +673,490115,"TERMINAL",0,0,"7:0011",,terminal_output +674,490155,"TERMINAL",0,0,"7:00\t ",,terminal_output +675,491029,"TERMINAL",0,0,"122",,terminal_output +676,491030,"TERMINAL",0,0,"1\t ",,terminal_output +677,492053,"TERMINAL",0,0,"233",,terminal_output +678,492084,"TERMINAL",0,0,"2\t ",,terminal_output +679,493183,"TERMINAL",0,0,"344",,terminal_output +680,493183,"TERMINAL",0,0,"3\t ",,terminal_output +681,494126,"TERMINAL",0,0,"455",,terminal_output +682,494158,"TERMINAL",0,0,"4\t ",,terminal_output +683,495126,"TERMINAL",0,0,"M53388135\taccelerated train_dynamics_causal_2_node tum_cte0 COMPLETI\t 3:25 8:00:003 hkn[0811-0813]6",,terminal_output +684,495186,"TERMINAL",0,0,"54",,terminal_output +685,496150,"TERMINAL",0,0,"67",,terminal_output +686,496257,"TERMINAL",0,0,"6\t ",,terminal_output +687,497277,"TERMINAL",0,0,"78",,terminal_output +688,497283,"TERMINAL",0,0,"7\t ",,terminal_output +689,498107,"TERMINAL",0,0,"89",,terminal_output +690,498312,"TERMINAL",0,0,"8\t ",,terminal_output +691,499090,"TERMINAL",0,0,"930",,terminal_output +692,499357,"TERMINAL",0,0,"9\t ",,terminal_output +693,500142,"TERMINAL",0,0,"101",,terminal_output +694,500411,"TERMINAL",0,0,"10\t ",,terminal_output +695,501118,"TERMINAL",0,0,"12",,terminal_output +696,501480,"TERMINAL",0,0,"1\t ",,terminal_output +697,502192,"TERMINAL",0,0,"23",,terminal_output +698,502489,"TERMINAL",0,0,"2\t ",,terminal_output +699,503319,"TERMINAL",0,0,"34",,terminal_output +700,503545,"TERMINAL",0,0,"3\t ",,terminal_output +701,504004,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +702,504592,"TERMINAL",0,0,"4\t ",,terminal_output +703,504973,"TERMINAL",0,0,"[?25lcd[?25h",,terminal_output +704,505035,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +705,505262,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +706,505673,"TERMINAL",0,0,"5\t ",,terminal_output +707,506302,"TERMINAL",0,0,"[?25l$[?25h",,terminal_output +708,506575,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +709,506680,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +710,506709,"TERMINAL",0,0,"6\t ",,terminal_output +711,506802,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +712,507029,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +713,507113,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +714,507230,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +715,507469,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared[?2004h(jafar) [tum_cte0515@hkn0802 tum_ind3695-jafa_ws_shared]$ ",,terminal_output +716,507731,"TERMINAL",0,0,"7\t ",,terminal_output +717,507875,"TERMINAL",0,0,"[?25lls[?25h[?25ls[?25h",,terminal_output +718,507986,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +719,508057,"TERMINAL",0,0,"checkpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared[?2004h(jafar) [tum_cte0515@hkn0802 tum_ind3695-jafa_ws_shared]$ ",,terminal_output +720,508260,"TERMINAL",0,0,"c",,terminal_output +721,508500,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +722,508507,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +723,509165,"TERMINAL",0,0,"9\t ",,terminal_output +724,510002,"TERMINAL",0,0,"20\t ",,terminal_output +725,510216,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +726,510308,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +727,510406,"TERMINAL",0,0,"gs/",,terminal_output +728,510859,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs[?2004h(jafar) [tum_cte0515@hkn0802 logs]$ ",,terminal_output +729,510896,"TERMINAL",0,0,"1\t ",,terminal_output +730,511457,"TERMINAL",0,0,"c",,terminal_output +731,511547,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +732,511611,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +733,511772,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +734,511878,"TERMINAL",0,0,"2\t ",,terminal_output +735,511950,"TERMINAL",0,0,"[?25lo[?25hgs_",,terminal_output +736,512357,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +737,512444,"TERMINAL",0,0,"ihir/",,terminal_output +738,512785,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir[?2004h(jafar) [tum_cte0515@hkn0802 logs_mihir]$ ",,terminal_output +739,512988,"TERMINAL",0,0,"3\t ",,terminal_output +740,513055,"TERMINAL",0,0,"s\r\n[?2004l\rbash: s: command not found...\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir[?2004h(jafar) [tum_cte0515@hkn0802 logs_mihir]$ ",,terminal_output +741,513530,"TERMINAL",0,0,"[?25lls[?25h",,terminal_output +742,513603,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +743,513698,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +744,513829,"TERMINAL",0,0,"big_run train_lam_action_space_scaling_6_3318549.log\r\nbig-runs train_lam_action_space_scaling_6_3320178.log\r\ncausal train_lam_action_space_scaling_6_3321528.log\r\nmaskgit-maskprob-fix train_lam_action_space_scaling_6_3329790.log\r\ntrain_dyn_causal_180M_3372931.log train_lam_action_space_scaling_6_3329805.log\r\ntrain_dyn_causal_180M_3372963.log train_lam_action_space_scaling_6_3331287.log\r\ntrain_dyn_causal_180M_3372969.log train_lam_action_space_scaling_8_3318550.log\r\ntrain_dyn_causal_180M_3373107.log train_lam_action_space_scaling_8_3329791.log\r\ntrain_dyn_causal_255M_3372932.log train_lam_action_space_scaling_8_3329806.log\r\ntrain_dyn_causal_255M_3372970.log train_lam_action_space_scaling_8_3331288.log\r\ntrain_dyn_causal_255M_3373108.log train_lam_minecraft_overfit_sample_3309655.log\r\ntrain_dyn_causal_356M_3372934.log train_lam_model_size_scaling_38M_3317098.log\r\ntrain_dyn_causal_356M_3372971.log train_lam_model_size_scaling_38M_3317115.log\r\ntrain_dyn_causal_356M_3373109.log train_lam_model_size_scaling_38M_3317231.log\r\ntrain_dyn_causal_500M_3372936.log train_tokenizer_batch_size_scaling_16_node_3321526.log\r\ntrain_dyn_causal_500M_3372972.log train_tokenizer_batch_size_scaling_1_node_3318551.log\r\ntrain_dyn_causal_500M_3373110.log train_tokenizer_batch_size_scaling_2_node_3318552.log\r\ntrain_dyn_new_arch-bugfixed-spatial-shift_3359343.log train_tokenizer_batch_size_scaling_2_node_3330806.log\r\ntrain_dyn_new_arch-bugfixed-temporal-shift_3359349.log train_tokenizer_batch_size_scaling_2_node_3330848.log\r\ntrain_dyn_yolorun_3333026.log train_tokenizer_batch_size_scaling_2_node_3331282.log\r\ntrain_dyn_yolorun_3333448.log train_tokenizer_batch_size_scaling_4_node_3318553.log\r\ntrain_dyn_yolorun_3335345.log train_tokenizer_batch_size_scaling_4_node_3320175.log\r\ntrain_dyn_yolorun_3335362.log train_tokenizer_batch_size_scaling_4_node_3321524.log\r\ntrain_dyn_yolorun_3348592.log train_tokenizer_batch_size_scaling_8_node_3320176.log\r\ntrain_dyn_yolorun_new_arch_3351743.log train_tokenizer_batch_size_scaling_8_node_3321525.log\r\ntrain_dyn_yolorun_new_arch_3352103.log train_tokenizer_minecraft_overfit_sample_3309656.log\r\ntrain_dyn_yolorun_new_arch_3352115.log train_tokenizer_model_size_scaling_127M_3317233.log\r\ntrain_dyn_yolorun_new_arch_3358457.log train_tokenizer_model_size_scaling_127M_3318554.log\r\ntrain_lam_action_space_scaling_10_3320179.log train_tokenizer_model_size_scaling_140M_3313562.log\r\ntrain_lam_action_space_scaling_10_3321529.log train_tokenizer_model_size_scaling_140M_3316019.log\r\ntrain_lam_action_space_scaling_10_3329786.log train_tokenizer_model_size_scaling_200M_3313563.log\r\ntrain_lam_action_space_scaling_10_3329801.log train_tokenizer_model_size_scaling_200M_3316020.log\r\ntrain_lam_action_space_scaling_10_3331283.log train_tokenizer_model_size_scaling_227M_3317234.log\r\ntrain_lam_action_space_scaling_12_3318546.log train_tokenizer_model_size_scaling_227M_3318555.log\r\ntrain_lam_action_space_scaling_12_3320177.log train_tokenizer_model_size_scaling_227M_3320173.log\r\ntrain_lam_action_space_scaling_12_3321527.log train_tokenizer_model_size_scaling_227M_3321523.log\r\ntrain_lam_action_space_scaling_12_3329787.log train_tokenizer_model_size_scaling_37M_3313565.log\r\ntrain_lam_action_space_scaling_12_3329802.log train_tokenizer_model_size_scaling_37M_3316022.log\r\ntrain_lam_action_space_scaling_12_3331284.log train_tokenizer_model_size_scaling_37M_3317232.log\r\ntrain_lam_action_space_scaling_20_3318547.log train_tokenizer_model_size_scaling_37M_3317239.log\r\ntrain_lam_action_space_scaling_20_3329788.log train_tokenizer_model_size_scaling_37M_3318556.log\r\ntrain_lam_action_space_scaling_20_3329803.log train_tokenizer_model_size_scaling_74M_3318557.log\r\ntrain_lam_action_space_scaling_20_3331285.log train_tokenizer_model_size_scaling_74M_3320174.log\r\ntrain_lam_action_space_scaling_50_3320180.log train_tokenizer_model_size_scaling_74M_3321522.log\r\ntrain_lam_action_space_scaling_50_3329789.log train_tokenizer_model_size_scaling_80M_3313564.log\r\ntrain_lam_action_space_scaling_50_3329804.log train_tokenizer_model_size_scaling_80M_3316026.log\r\ntrain_lam_action_space_scaling_50_3331286.log yoloruns\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir[?2004h(jafar) [tum_cte0515@hkn0802 logs_mihir]$ ",,terminal_output +745,513969,"TERMINAL",0,0,"4\t ",,terminal_output +746,515091,"TERMINAL",0,0,"5\t ",,terminal_output +747,516116,"TERMINAL",0,0,"6\t ",,terminal_output +748,516844,"TERMINAL",0,0,"[?25lcd[?25h",,terminal_output +749,516910,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +750,517005,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +751,517105,"TERMINAL",0,0,"7\t ",,terminal_output +752,517372,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +753,517566,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +754,517697,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +755,517903,"TERMINAL",0,0,"sal/",,terminal_output +756,518149,"TERMINAL",0,0,"8\t ",,terminal_output +757,518253,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal[?2004h(jafar) [tum_cte0515@hkn0802 causal]$ ",,terminal_output +758,518387,"TERMINAL",0,0,"[?25lls[?25h",,terminal_output +759,518447,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +760,518543,"TERMINAL",0,0,"\r\n[?2004l\rdynamics-cotraining\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal[?2004h(jafar) [tum_cte0515@hkn0802 causal]$ ",,terminal_output +761,519170,"TERMINAL",0,0,"c",,terminal_output +762,519204,"TERMINAL",0,0,"9\t ",,terminal_output +763,519311,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +764,519371,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +765,519523,"TERMINAL",0,0,"dynamics-cotraining/",,terminal_output +766,519856,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +767,519951,"TERMINAL",0,0,"l",,terminal_output +768,520055,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +769,520160,"TERMINAL",0,0,"\r\n[?2004l\rtrain_dynamics_causal_2_node_3373407.log train_dynamics_causal_2_node_3388135.log\r\ntrain_dynamics_causal_2_node_3373407.log_bak train_dynamics_causal_8_node_3373408.log\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +770,520261,"TERMINAL",0,0,"30\t ",,terminal_output +771,520759,"TERMINAL",0,0,"[?25lcd[?25h",,terminal_output +772,520817,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +773,520975,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +774,521342,"TERMINAL",0,0,"1\t ",,terminal_output +775,522292,"TERMINAL",0,0,"2\t ",,terminal_output +776,522668,"TERMINAL",0,0,"",,terminal_output +777,523333,"TERMINAL",0,0,"3\t ",,terminal_output +778,524361,"TERMINAL",0,0,"4\t ",,terminal_output +779,524731,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +780,524801,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +781,524961,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +782,525101,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +783,525174,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:07:35 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3388140 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3388128 accelerat interact tum_cte0 R\t5:56\t 1 hkn0802",,terminal_output +784,525393,"TERMINAL",0,0,"5\t ",,terminal_output +785,526356,"TERMINAL",0,0,"67",,terminal_output +786,526472,"TERMINAL",0,0,"6\t ",,terminal_output +787,527199,"TERMINAL",0,0,"78",,terminal_output +788,527316,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +789,527483,"TERMINAL",0,0,"7\t ",,terminal_output +790,528095,"TERMINAL",0,0,"d",,terminal_output +791,528535,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +792,528535,"TERMINAL",0,0,"8\t ",,terminal_output +793,528741,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +794,529611,"TERMINAL",0,0,"9\t ",,terminal_output +795,529864,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +796,530609,"TERMINAL",0,0,"407 95",,terminal_output +797,530658,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +798,530959,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +799,531144,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +800,531292,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +801,531438,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +802,531683,"TERMINAL",0,0,"[?25lT[?25h",,terminal_output +803,531684,"TERMINAL",0,0,"1\t ",,terminal_output +804,531816,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +805,532471,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +806,532696,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +807,532696,"TERMINAL",0,0,"2\t ",,terminal_output +808,532825,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +809,533196,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +810,533300,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +811,533371,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +812,533674,"TERMINAL",0,0,"\r\n[?2004l\r JobID JobName Partition All State Elapsed Timelimit \r\n--------------- ------------------------------ ---------------- --- ------------ ---------- ---------- \r\n 3372629 train_dynamics_maskprob_fix_8+ accelerated 192 COMPLETED 1-02:29:22 2-00:00:00 \r\n 3372631 train_dynamics_maskprob_fix_2+ accelerated 48 COMPLETED 1-01:17:59 2-00:00:00 \r\n 3372931 train_dyn_causal_180M dev_accelerated 6 FAILED 00:00:33 00:10:00 \r\n 3372932 train_dyn_causal_255M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372934 train_dyn_causal_356M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372936 train_dyn_causal_500M dev_accelerated 6 FAILED 00:00:29 00:10:00 \r\n 3372969 train_dyn_causal_180M dev_accelerated 6 FAILED 00:02:11 00:10:00 \r\n 3372970 train_dyn_causal_255M dev_accelerated 6 FAILED 00:02:24 00:10:00 \r\n 3372971 train_dyn_causal_356M dev_accelerated 6 FAILED 00:02:08 00:10:00 \r\n 3372972 train_dyn_causal_500M dev_accelerated 6 FAILED 00:02:09 00:10:00 \r\n 3373107 train_dyn_causal_180M dev_accelerated 6 COMPLETED 00:06:15 00:10:00 \r\n 3373108 train_dyn_causal_255M dev_accelerated 6 COMPLETED 00:07:14 00:10:00 \r\n 3373109 train_dyn_causal_356M dev_accelerated 6 FAILED 00:04:17 00:10:00 \r\n 3373110 train_dyn_causal_500M dev_accelerated 6 FAILED 00:04:59 00:10:00 \r\n 3373400 wrap accelerated 6 COMPLETED 00:04:34 02:00:00 \r\n 3373404 wrap accelerated 6 COMPLETED 00:04:38 02:00:00 \r\n 3373409 wrap accelerated 6 COMPLETED 00:41:24 02:00:00 \r\n 3373410 wrap accelerated 6 COMPLETED 00:42:56 02:00:00 \r\n 3371237 train_dynamics_maskprob_fix_8+ accelerated 192 FAILED 23:35:23 2-00:00:00 \r\n 3373407 train_dynamics_causal_2_node accelerated 48 FAILED 00:05:15 2-00:00:00 \r\n 3379613 wrap accelerated 6 FAILED 00:01:28 02:00:00 \r\n 3379616 wrap accelerated 6 COMPLETED 04:44:13 10:00:00 \r\n 3386718 train_tokenizer_1e-4 accelerated 0 PENDING 00:00:00 2-00:00:00 \r\n 3386719 train_tokenizer_1e-4 accelerated 0 PENDING 00:00:00 2-00:00:00 \r\n 3386722 train_dynamics_causal_8_node accelerated 0 PENDING 00:00:00 2-00:00:00 \r\n 3388135 train_dynamics_causal_2_node accelerated 72 FAILED 00:03:25 08:00:00 \r\n 3388140 train_dynamics_causal_8_node accelerated 192 RUNNING 00:00:03 08:00:00 \r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +813,533757,"TERMINAL",0,0,"3\t ",,terminal_output +814,534852,"TERMINAL",0,0,"5\t ",,terminal_output +815,535804,"TERMINAL",0,0,"6\t ",,terminal_output +816,536976,"TERMINAL",0,0,"7\t ",,terminal_output +817,537892,"TERMINAL",0,0,"8\t ",,terminal_output +818,538948,"TERMINAL",0,0,"9\t ",,terminal_output +819,540074,"TERMINAL",0,0,"50\t ",,terminal_output +820,541104,"TERMINAL",0,0,"1\t ",,terminal_output +821,541924,"TERMINAL",0,0,"fsacct_week",,terminal_output +822,542063,"TERMINAL",0,0,"2\t ",,terminal_output +823,542320,"TERMINAL",0,0,"queue",,terminal_output +824,543108,"TERMINAL",0,0,"3\t ",,terminal_output +825,543166,"TERMINAL",0,0,"fsacct_week",,terminal_output +826,543381,"TERMINAL",0,0,"",,terminal_output +827,543907,"TERMINAL",0,0,"l",,terminal_output +828,543989,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +829,544156,"TERMINAL",0,0,"4\t ",,terminal_output +830,544226,"TERMINAL",0,0,"\r\n[?2004l\rtrain_dynamics_causal_2_node_3373407.log train_dynamics_causal_2_node_3388135.log\r\ntrain_dynamics_causal_2_node_3373407.log_bak train_dynamics_causal_8_node_3373408.log\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +831,545178,"TERMINAL",0,0,"5\t ",,terminal_output +832,546250,"TERMINAL",0,0,"6\t ",,terminal_output +833,547264,"TERMINAL",0,0,"7\t ",,terminal_output +834,548305,"TERMINAL",0,0,"8\t ",,terminal_output +835,549103,"TERMINAL",0,0,"sl",,terminal_output +836,549374,"TERMINAL",0,0,"9\t ",,terminal_output +837,549986,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +838,550058,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +839,550170,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +840,550397,"TERMINAL",0,0,"8:00\t ",,terminal_output +841,550706,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +842,550820,"TERMINAL",0,0,"rain_dynamics_causal_",,terminal_output +843,551468,"TERMINAL",0,0,"1\t ",,terminal_output +844,551715,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +845,551873,"TERMINAL",0,0,"_node_33",,terminal_output +846,552473,"TERMINAL",0,0,"2\t ",,terminal_output +847,553621,"TERMINAL",0,0,"3\t ",,terminal_output +848,553900,"TERMINAL",0,0,"[?25l8[?25h",,terminal_output +849,553984,"TERMINAL",0,0,"8135.log ",,terminal_output +850,554533,"TERMINAL",0,0,"\r\n[?2004l\rtrain_dynamics_causal_2_node_3388135.log\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +851,554570,"TERMINAL",0,0,"4\t ",,terminal_output +852,555656,"TERMINAL",0,0,"5\t ",,terminal_output +853,556652,"TERMINAL",0,0,"p",,terminal_output +854,556653,"TERMINAL",0,0,"6\t ",,terminal_output +855,556748,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +856,556829,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +857,557674,"TERMINAL",0,0,"7\t ",,terminal_output +858,558297,"TERMINAL",0,0,"ls train_dynamics_causal_2_node_3388135.log ",,terminal_output +859,558732,"TERMINAL",0,0,"8\t ",,terminal_output +860,559276,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +861,559391,"TERMINAL",0,0,"",,terminal_output +862,559846,"TERMINAL",0,0,"10\t ",,terminal_output +863,560328,"TERMINAL",0,0,"",,terminal_output +864,560864,"TERMINAL",0,0,"1\t ",,terminal_output +865,561533,"TERMINAL",0,0,"[?25lt[1@$[?25h",,terminal_output +866,561821,"TERMINAL",0,0,"[1@(",,terminal_output +867,561848,"TERMINAL",0,0,"2\t ",,terminal_output +868,562029,"TERMINAL",0,0,"[1@p",,terminal_output +869,562196,"TERMINAL",0,0,"[1@w",,terminal_output +870,562448,"TERMINAL",0,0,"[1@d",,terminal_output +871,562861,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +872,562885,"TERMINAL",0,0,"3\t ",,terminal_output +873,563303,"TERMINAL",0,0,"[?25lt[1@s[?25h",,terminal_output +874,563927,"TERMINAL",0,0,"4\t ",,terminal_output +875,564025,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +876,564119,"TERMINAL",0,0,"[?25lt[1@d[?25h",,terminal_output +877,564605,"TERMINAL",0,0,"[?25lt[1@)[?25h",,terminal_output +878,564913,"TERMINAL",0,0,"[?25lt[1@/[?25h",,terminal_output +879,565024,"TERMINAL",0,0,"5\t ",,terminal_output +880,565125,"TERMINAL",0,0,"\r\n[?2004l\r/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log\r\n]0;tum_cte0515@hkn0802:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining[?2004h(jafar) [tum_cte0515@hkn0802 dynamics-cotraining]$ ",,terminal_output +881,566088,"TERMINAL",0,0,"6\t ",,terminal_output +882,566408,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=3\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=08:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=3373407\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n/var/spool/slurmd/job3388135/slurm_script: line 42: .venv/bin/activate: No such file or directory\nSLURM_STEP_NUM_TASKS=1\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4(x3)\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=1740698\nSLURM_JOB_GPUS=0,1,2,3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3\nSLURMD_NODENAME=hkn0811\nSLURM_JOB_START_TIME=1753974219\nSLURM_STEP_NODELIST=hkn0802\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1754003019\nSLURM_PMI2_SRUN_PORT=45055\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24(x3)\nSLURM_GPUS_ON_NODE=4\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=3\nSLURM_STEPID=4294967290\nSLURM_JOBID=3388135\nSLURM_PTY_PORT=41681\nSLURM_JOB_QOS=normal\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\nSLURM_PTY_WIN_ROW=25\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\nSLURMD_DEBUG=2\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=12\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e27.hkn0811\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SRUN_COMM_HOST=10.0.7.198\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_PTY_WIN_COL=122\nSLURM_NODELIST=hkn[0811-0813]\nSLURM_SRUN_COMM_PORT=40457\nSLURM_STEP_ID=4294967290\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=12\nSLURM_NNODES=3\nSLURM_SUBMIT_HOST=hkn0802.localdomain\nSLURM_JOB_ID=3388135\nSLURM_NODEID=0\nSLURM_STEP_NUM_NODES=1\nSLURM_STEP_TASKS_PER_NODE=1\nSLURM_MPI_TYPE=pmi2\nSLURM_PMI2_STEP_NODES=hkn0802\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_dynamics_causal_2_node\nSLURM_NTASKS_PER_NODE=4\nSLURM_STEP_LAUNCHER_PORT=40457\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn[0811-0813]\nGpuFreq=control_disabled\nGpuFreq=control_disabled\nGpuFreq=control_disabled\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n2025-07-31 17:04:39.842772: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.860386: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.865618: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.888429: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908642: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908647: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908645: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.023638: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.051915: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.136830: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.136830: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.322056: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.409087: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.780628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.783363: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.804234: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.900002: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.521547: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.751082: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.769259: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:12.354135: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:12.641217: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:13.326244: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:24.807011: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:24.914904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.032184: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.057194: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.140488: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.234489: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:27.650083: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:27.827769: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:28.323172: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:28.621175: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:29.040613: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:29.717997: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\nwandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\nwandb: Tracking run with wandb version 0.19.11\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/wandb/run-20250731_170531-3388135\nwandb: Run `wandb offline` to turn off syncing.\nwandb: Syncing run dynamics-causal-2-node-3373407\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3388135\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_4-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_5-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_6-of-12.json does not exist.\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_10-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_8-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_9-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_11-of-12.json does not exist.\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_2-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_1-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_3-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_7-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_0-of-12.json does not exist.\nwandb: \nwandb: 🚀 View run dynamics-causal-2-node-3373407 at: https://wandb.ai/instant-uv/jafar/runs/3388135\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/wandb/run-20250731_170531-3388135/logs\nsrun: error: hkn0811: task 2: Exited with exit code 1\nsrun: error: hkn0811: task 0: Exited with exit code 1\nsrun: error: hkn0812: tasks 4-6: Exited with exit code 1\nsrun: error: hkn0812: task 7: Exited with exit code 1\nsrun: error: hkn0811: tasks 1,3: Exited with exit code 1\nsrun: error: hkn0813: tasks 8,10-11: Exited with exit code 1\nsrun: error: hkn0813: task 9: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3388135\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 3\nCores per node: 24\nNodelist: hkn[0811-0813]\nCPU Utilized: 00:29:42\nCPU Efficiency: 12.07% of 04:06:00 core-walltime\nJob Wall-clock time: 00:03:25\nStarttime: Thu Jul 31 17:03:39 2025\nEndtime: Thu Jul 31 17:07:04 2025\nMemory Utilized: 60.77 GB (estimated maximum)\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\nEnergy Consumed: 332493 Joule / 92.3591666666667 Watthours\nAverage node power draw: 1621.91707317073 Watt\n",log,tab +883,566960,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log",871,0,"",log,selection_mouse +884,567092,"TERMINAL",0,0,"7\t ",,terminal_output +885,567704,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log",62178,0,"",log,selection_command +886,568105,"TERMINAL",0,0,"8\t ",,terminal_output +887,569158,"TERMINAL",0,0,"9\t ",,terminal_output +888,570184,"TERMINAL",0,0,"20\t ",,terminal_output +889,571438,"TERMINAL",0,0,"1\t ",,terminal_output +890,572331,"TERMINAL",0,0,"2\t ",,terminal_output +891,573293,"TERMINAL",0,0,"3\t ",,terminal_output +892,574381,"TERMINAL",0,0,"4\t ",,terminal_output +893,575378,"TERMINAL",0,0,"5\t ",,terminal_output +894,576428,"TERMINAL",0,0,"6\t ",,terminal_output +895,577462,"TERMINAL",0,0,"7\t ",,terminal_output +896,578581,"TERMINAL",0,0,"8\t ",,terminal_output +897,579642,"TERMINAL",0,0,"9\t ",,terminal_output +898,580934,"TERMINAL",0,0,"30\t ",,terminal_output +899,581650,"TERMINAL",0,0,"1\t ",,terminal_output +900,582686,"TERMINAL",0,0,"2\t ",,terminal_output +901,583707,"TERMINAL",0,0,"3\t ",,terminal_output +902,584753,"TERMINAL",0,0,"5\t ",,terminal_output +903,585857,"TERMINAL",0,0,"6\t ",,terminal_output +904,586165,"TERMINAL",0,0,"r",,terminal_output +905,586228,"TERMINAL",0,0,"u",,terminal_output +906,586318,"TERMINAL",0,0,"n",,terminal_output +907,586491,"TERMINAL",0,0,"ne",,terminal_output +908,586548,"TERMINAL",0,0,"r",,terminal_output +909,586750,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +910,586853,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +911,586882,"TERMINAL",0,0,"7\t ",,terminal_output +912,587407,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +913,587912,"TERMINAL",0,0,"8\t ",,terminal_output +914,588919,"TERMINAL",0,0,"9\t ",,terminal_output +915,589962,"TERMINAL",0,0,"40\t ",,terminal_output +916,590186,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log",0,0,"",log,tab +917,591027,"TERMINAL",0,0,"1\t ",,terminal_output +918,592059,"TERMINAL",0,0,"2\t ",,terminal_output +919,593082,"TERMINAL",0,0,"3\t ",,terminal_output +920,594288,"TERMINAL",0,0,"4\t ",,terminal_output +921,595162,"TERMINAL",0,0,"5\t ",,terminal_output +922,596208,"TERMINAL",0,0,"6\t ",,terminal_output +923,597256,"TERMINAL",0,0,"7\t ",,terminal_output +924,598300,"TERMINAL",0,0,"8\t ",,terminal_output +925,599331,"TERMINAL",0,0,"9\t ",,terminal_output +926,600381,"TERMINAL",0,0,"50\t ",,terminal_output +927,601396,"TERMINAL",0,0,"1\t ",,terminal_output +928,602435,"TERMINAL",0,0,"2\t ",,terminal_output +929,603488,"TERMINAL",0,0,"3\t ",,terminal_output +930,604513,"TERMINAL",0,0,"4\t ",,terminal_output +931,604721,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +932,605050,"TERMINAL",0,0,"[75@v': vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch\r[?25lv[1@i': vi[?25h",,terminal_output +933,605106,"TERMINAL",0,0,"[1@m': vim",,terminal_output +934,605614,"TERMINAL",0,0,"5\t ",,terminal_output +935,606502,"TERMINAL",0,0,"2\r",,terminal_output +936,606628,"TERMINAL",0,0,"6\t ",,terminal_output +937,607211,"TERMINAL",0,0,"\r[19@jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ vim\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch"" 81L, 2407B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=3#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=08:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j..log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.llog\r\n#SBATCH --job-name=train_dynamics_causal_2_node\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}5,17Top[?25h",,terminal_output +938,607301,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +939,607667,"TERMINAL",0,0,"7\t ",,terminal_output +940,607832,"TERMINAL",0,0,"[?25lj 6[?25h",,terminal_output +941,608700,"TERMINAL",0,0,"8\t ",,terminal_output +942,608799,"TERMINAL",0,0,"[?25lj 7[?25h[?25lj 8[?25h[?25lj 9[?25h[?25lj 10,17[?25h[?25lj 1[?25h[?25lj 2[?25h[?25lj 3[?25h[?25lj 4,0-1[?25h[?25lj 5,17 [?25h[?25lj {}6,15[?25h[?25lj {}7,17[?25h[?25lj \r\n18,171%[?25h[?25lj \r\ntrap requeue_job sigusr119,173%[?25h[?25lj \r\n20,175%[?25h[?25lj \r\n# set checkpoint flag based on restart count21,106%[?25h[?25lj \r\n{}restart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)22,18%[?25h[?25lj \r\n{}23,0-110%[?25h",,terminal_output +943,609303,"TERMINAL",0,0,"[?25l\r\nif [ $restart_count -eq 0 ]; then24,1711%[?25h[?25lj \r\nrestore_ckpt_flag=""--no-restore-ckpt""25,0-113%[?25h[?25lj else\r\n restore_ckpt_flag=""--restore-ckpt""26,1715%[?25h[?25lj 7[?25h[?25lj fi28,0-117%[?25h[?25lj 9,17 [?25h[?25lj \r\n30,1719%[?25h[?25lj \r\n31,421%[?25h[?25lj \r\n# Log the sbatch script32,1722%[?25h[?25lj \r\ncat $033,224%[?25h[?25lj \r\n34,0-126%[?25h[?25lj \r\nmodule unload mpi/openmpi/5.035,0-128%[?25h[?25lj \r\nmodule unload devel/cuda/12.436,0-129%[?25h[?25lj \r\nsource .venv/bin/activate37,1731%[?25h[?25lj \r\n38,633%[?25h[?25lj \r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked39,0-135%[?25h[?25lj \r\n40,1736%[?25h",,terminal_output +944,609370,"TERMINAL",0,0,"[?25lj \r\njob_name=$SLURM_JOB_NAME41,1738%[?25h",,terminal_output +945,609721,"TERMINAL",0,0,"9\t ",,terminal_output +946,609781,"TERMINAL",0,0,"[?25lj \r\nslurm_job_id=337340742,1740%[?25h",,terminal_output +947,610741,"TERMINAL",0,0,"[?25l\r\n43,0-142%[?25h[?25lj \r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id44,1743%[?25h[?25lj \r\nmkdir -p $CHECKPOINT_DIR45,0-145%[?25h[?25lj \r\n46,1747%[?25h[?25lj \r\n# tokenizer with the new structure supporting larger ffn_dim47,1749%[?25h[?25lj tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4__larger_ffn/48,0-151%[?25h[?25lj 9,17 [?25h[?25lj \r\n50,1753%[?25h[?25lj \r\nenv | grep SLURM51,0-155%[?25h[?25lj \r\n52,1756%[?25h[?25lj srun python train_dynamics.py \\r\n --save_ckpt \53,1760%[?25h[?25lj \r\n--restore_ckpt \54,0-162%[?25h[?25lj \r\n--wandb_id $SLURM_JOB_ID \55,1663%[?25h[?25lj \r\n--ckpt_dir $CHECKPOINT_DIR \56,0-165%[?25h[?25lj \r\n--batch_size=96 \57,1767%[?25h[?25lj \r\n--init_lr=0 \58,1768%[?25h[?25lj \r\n--max_lr=2e-5 \59,1770%[?25h",,terminal_output +948,610769,"TERMINAL",0,0,"9:01\t ",,terminal_output +949,611113,"TERMINAL",0,0,"[?25lj \r\n--log_image_interval=1000 \60,1772%[?25h",,terminal_output +950,611301,"TERMINAL",0,0,"[?25lj \r\n--log \61,1774%[?25h",,terminal_output +951,611626,"TERMINAL",0,0,"[?25lk 0[?25h",,terminal_output +952,611717,"TERMINAL",0,0,"[?25lk 59[?25h",,terminal_output +953,611811,"TERMINAL",0,0,"2\t ",,terminal_output +954,612884,"TERMINAL",0,0,"3\t ",,terminal_output +955,613908,"TERMINAL",0,0,"4\t ",,terminal_output +956,614265,"TERMINAL",0,0,"[?25lk 8[?25h",,terminal_output +957,614727,"TERMINAL",0,0,"[?25lj 9[?25h",,terminal_output +958,614924,"TERMINAL",0,0,"5\t ",,terminal_output +959,615080,"TERMINAL",0,0,"[?25lk 8[?25h",,terminal_output +960,615976,"TERMINAL",0,0,"6\t ",,terminal_output +961,616477,"TERMINAL",0,0,"[?25lk 7[?25h[?25lk 6,0-1[?25h[?25lk 5,16 [?25h[?25lk 4,0-1[?25h[?25lk 3,17 [?25h[?25lk 2[?25h[?25lk 1,0-1[?25h[?25lk 0,17 [?25h[?25lk 49[?25h[?25lk 48,0-172%[?25h[?25lk source .venv/bin/activate47,1770%[?25h[?25lk module unload devel/cuda/12.446,1768%[?25h[?25lk module unload mpi/openmpi/5.045,0-167%[?25h[?25lk 44,1765%[?25h[?25lk cat $043,0-163%[?25h[?25lk # Log the sbatch script42,1762%[?25h[?25lk 41,1760%[?25h[?25lk 40,1758%[?25h[?25lk 39,0-156%[?25h[?25lk fi38,655%[?25h[?25lk restore_ckpt_flag=""--restore-ckpt""37,1753%[?25h[?25lk else36,0-151%[?25h[?25lk restore_ckpt_flag=""--no-restore-ckpt""@@@ 35,0-150%[?25h[?25lk if [ $restart_count -eq 0 ]; then34,0-149%[?25h[?25lk 33,247%[?25h[?25lk restart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)32,1745%[?25h[?25lk # set checkpoint flag based on restart count31,443%[?25h[?25lk 30,1742%[?25h[?25lk trap requeue_job sigusr129,1740%[?25h[?25lk 28,0-138%[?25h[?25lk }27,1736%[?25h[?25lk exit 026,1735%[?25h",,terminal_output +962,616592,"TERMINAL",0,0,"[?25lk scontrol requeue $SLURM_JOB_ID25,0-133%[?25h[?25lk # e.g., touch $checkpoint_dir/requeue_trigger24,1731%[?25h[?25lk # optional: trigger checkpoint saving here23,0-129%[?25h[?25lk echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""22,128%[?25h",,terminal_output +963,617019,"TERMINAL",0,0,"[?25lj 3,0-1[?25h",,terminal_output +964,617020,"TERMINAL",0,0,"7\t ",,terminal_output +965,617193,"TERMINAL",0,0,"[?25lj 4,17 [?25h",,terminal_output +966,617357,"TERMINAL",0,0,"[?25lj 5,0-1[?25h",,terminal_output +967,617513,"TERMINAL",0,0,"[?25lj 6,17 [?25h",,terminal_output +968,617703,"TERMINAL",0,0,"[?25lj 7[?25h",,terminal_output +969,617813,"TERMINAL",0,0,"[?25lj 8,0-1[?25h",,terminal_output +970,617958,"TERMINAL",0,0,"[?25lj 9,17 [?25h",,terminal_output +971,618081,"TERMINAL",0,0,"[?25lj 30[?25h",,terminal_output +972,618081,"TERMINAL",0,0,"8\t ",,terminal_output +973,618231,"TERMINAL",0,0,"[?25lj 1,4 [?25h",,terminal_output +974,618961,"TERMINAL",0,0,"[?25lk 0,17[?25h",,terminal_output +975,619103,"TERMINAL",0,0,"9\t ",,terminal_output +976,619394,"TERMINAL",0,0,"[?25lh 6[?25h",,terminal_output +977,619782,"TERMINAL",0,0,"[?25lh 5[?25h",,terminal_output +978,619942,"TERMINAL",0,0,"[?25lh 4[?25h[?25lh 3[?25h[?25lh 2[?25h[?25lh 1[?25h[?25lh 0[?25h",,terminal_output +979,620024,"TERMINAL",0,0,"[?25lh 9 [?25h[?25lh 8[?25h[?25lh 7[?25h",,terminal_output +980,620108,"TERMINAL",0,0,"[?25lh 6[?25h",,terminal_output +981,620163,"TERMINAL",0,0,"10\t ",,terminal_output +982,620220,"TERMINAL",0,0,"[?25lh 5[?25h",,terminal_output +983,621050,"TERMINAL",0,0,"[?25lv1-- VISUAL --130,528%[?25h[?25ll2r6[?25h",,terminal_output +984,621181,"TERMINAL",0,0,"1\t ",,terminal_output +985,621638,"TERMINAL",0,0,"[?25ll3e7[?25h[?25ll4s8[?25h[?25ll5t9[?25h",,terminal_output +986,622004,"TERMINAL",0,0,"[?25ll6o10[?25h[?25ll7r1[?25h[?25ll8e2[?25h[?25l _3[?25h[?25ll10c4[?25h[?25ll 11k5[?25h[?25ll 12p6[?25h[?25ll 13t7[?25h[?25ll 14_8[?25h[?25ll 15f9[?25h[?25ll 16l20[?25h[?25ll 17a1[?25h",,terminal_output +987,622215,"TERMINAL",0,0,"2\t ",,terminal_output +988,622326,"TERMINAL",0,0,"[?25ll 18g2[?25h",,terminal_output +989,622514,"TERMINAL",0,0,"[?25ly  restore_ckpt_flag30,528%[?25h",,terminal_output +990,622991,"TERMINAL",0,0,"[?25lj 1,4[?25h",,terminal_output +991,623330,"TERMINAL",0,0,"3\t ",,terminal_output +992,624301,"TERMINAL",0,0,"4\t ",,terminal_output +993,624434,"TERMINAL",0,0,"[?25lj 2,5[?25h[?25lj 3,2[?25h[?25l4,0-1[?25h[?25lj 5[?25h[?25lj \r\nmodule unload devel/cuda/12.436,0-129%[?25h[?25lj \r\nsource .venv/bin/activate37,531%[?25h[?25lj \r\n38,533%[?25h[?25lj \r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked39,0-135%[?25h[?25lj \r\n40,536%[?25h[?25lj \r\njob_name=$SLURM_JOB_NAME41,538%[?25h[?25lj \r\nslurm_job_id=337340742,540%[?25h[?25lj \r\n43,0-142%[?25h[?25lj \r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id44,543%[?25h[?25lj \r\nmkdir -p $CHECKPOINT_DIR45,0-145%[?25h[?25l\r\n46,547%[?25h[?25lj \r\n# tokenizer with the new structure supporting larger ffn_dim47,549%[?25h[?25lj tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4__larger_ffn/48,0-151%[?25h[?25lj 9,5 [?25h[?25lj \r\n50,553%[?25h[?25lj \r\nenv | grep SLURM51,0-155%[?25h[?25lj \r\n52,556%[?25h[?25lj srun python train_dynamics.py \\r\n --save_ckpt \53,560%[?25h[?25lj \r\n--restore_ckpt \54,0-162%[?25h[?25lj \r\n--wandb_id $SLURM_JOB_ID \55,563%[?25h[?25lj \r\n--ckpt_dir $CHECKPOINT_DIR \56,0-165%[?25h[?25lj \r\n--batch_size=96 \57,567%[?25h[?25lj \r\n--init_lr=0 \58,568%[?25h[?25lj \r\n--max_lr=2e-5 \59,570%[?25h[?25lj \r\n--log_image_interval=1000 \60,572%[?25h[?25lj \r\n--log \61,574%[?25h[?25lj \r\n--log_checkpoint_interval=1000 \62,575%[?25h",,terminal_output +994,624545,"TERMINAL",0,0,"[?25lj \r\n--name=dynamics-causal-2-node-$slurm_job_id \63,577%[?25h[?25lj \r\n--tags dynamics causal 2-node \64,579%[?25h[?25lj \r\n--entity instant-uv \65,581%[?25h",,terminal_output +995,624667,"TERMINAL",0,0,"[?25lj \r\n--project jafar \66,582%[?25h[?25lj \r\n--dyna_dim=1024 \67,584%[?25h[?25lj \r\n--dyna_num_blocks=16 \68,586%[?25h",,terminal_output +996,625079,"TERMINAL",0,0,"[?25lj \r\n--dyna_num_heads=16 \69,587%[?25h[?25lj \r\n--dyna_ffn_dim=4096 \70,589%[?25h[?25lj --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir &71,592%[?25h[?25lj 2[?25h[?25lj \r\n73,594%[?25h[?25lj \r\nchild_pid=$!74,596%[?25h[?25lj \r\n75,598%[?25h[?25lj \r\nwait $child_pid76,5Bot[?25h[?25lj 7[?25h[?25lj 8,0-1[?25h[?25lj 9,5 [?25h[?25lj 80,0-1[?25h[?25lj 1,5 [?25h",,terminal_output +997,625134,"TERMINAL",0,0,"[?25lk 0,0-1[?25h",,terminal_output +998,625355,"TERMINAL",0,0,"5\t ",,terminal_output +999,625950,"TERMINAL",0,0,"[?25lk 79,5 [?25h[?25l8,0-1[?25h[?25lk 7,5 [?25h[?25lk 6[?25h[?25lk 5[?25h[?25lk 4[?25h[?25lk 3[?25h[?25lk 2[?25h[?25lk 1[?25h[?25lk 0[?25h[?25lk 69[?25h[?25lk 8[?25h[?25lk 7[?25h",,terminal_output +1000,626208,"TERMINAL",0,0,"[?25lk 6[?25h[?25l5[?25h[?25lk 4[?25h[?25lk 3[?25h[?25lk srun python train_dynamics.py \62,598%[?25h[?25lk 61,596%[?25h[?25lk env | grep SLURM60,594%[?25h[?25lk 59,592%[?25h",,terminal_output +1001,626372,"TERMINAL",0,0,"6\t ",,terminal_output +1002,626521,"TERMINAL",0,0,"[?25lj 60[?25h",,terminal_output +1003,626934,"TERMINAL",0,0,"[?25lk 59[?25h",,terminal_output +1004,627370,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1005,627427,"TERMINAL",0,0,"7\t ",,terminal_output +1006,627552,"TERMINAL",0,0,"[?25ld \r\n59,594%[?25h",,terminal_output +1007,627807,"TERMINAL",0,0,"[?25lk tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4__larger_ffn/58,591%[?25h",,terminal_output +1008,628454,"TERMINAL",0,0,"8\t ",,terminal_output +1009,629099,"TERMINAL",0,0,"[?25lp --restore_ckpt \59,589%[?25h",,terminal_output +1010,629496,"TERMINAL",0,0,"9\t ",,terminal_output +1011,630351,"TERMINAL",0,0,"[?25lu1 line less; before #2 1 second ago\r\n--tokenizer_checkpoint=$tokenizer_ckpt_dir \58,591%\r1 line less; before #2 1 second ago58,591%[?25h",,terminal_output +1012,630536,"TERMINAL",0,0,"20\t ",,terminal_output +1013,630900,"TERMINAL",0,0,"[?25li -- INSERT --58,591%[?25h",,terminal_output +1014,631623,"TERMINAL",0,0,"1\t ",,terminal_output +1015,632256,"TERMINAL",0,0,"[?25l^[",,terminal_output +1016,632388,"TERMINAL",0,0," 58,491%[?25h",,terminal_output +1017,632616,"TERMINAL",0,0,"2\t ",,terminal_output +1018,633001,"TERMINAL",0,0,"[?25lo -- INSERT --59,589%59,589%[?25h",,terminal_output +1019,633672,"TERMINAL",0,0,"3\t ",,terminal_output +1020,634695,"TERMINAL",0,0,"4\t ",,terminal_output +1021,635820,"TERMINAL",0,0,"6\t ",,terminal_output +1022,636844,"TERMINAL",0,0,"7\t ",,terminal_output +1023,637849,"TERMINAL",0,0,"8\t ",,terminal_output +1024,638910,"TERMINAL",0,0,"9\t ",,terminal_output +1025,640002,"TERMINAL",0,0,"30\t ",,terminal_output +1026,641148,"TERMINAL",0,0,"1\t ",,terminal_output +1027,642170,"TERMINAL",0,0,"2\t ",,terminal_output +1028,643195,"TERMINAL",0,0,"3\t ",,terminal_output +1029,644220,"TERMINAL",0,0,"4\t ",,terminal_output +1030,645243,"TERMINAL",0,0,"5\t ",,terminal_output +1031,646345,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-8-node-$slurm_job_id \\n --tags dynamics causal 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +1032,646382,"TERMINAL",0,0,"6\t ",,terminal_output +1033,647338,"TERMINAL",0,0,"7\t ",,terminal_output +1034,648011,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=3\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=3373407\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +1035,648269,"TERMINAL",0,0,"8\t ",,terminal_output +1036,648999,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1037,649342,"TERMINAL",0,0,"9\t ",,terminal_output +1038,650003,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1850,0,"",shellscript,selection_mouse +1039,650126,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1841,17,"restore_ckpt_flag",shellscript,selection_mouse +1040,650263,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1836,25," $restore_ckpt_flag \\n",shellscript,selection_mouse +1041,650322,"TERMINAL",0,0,"404",,terminal_output +1042,651390,"TERMINAL",0,0,"1\t ",,terminal_output +1043,652411,"TERMINAL",0,0,"2\t ",,terminal_output +1044,653434,"TERMINAL",0,0,"3\t ",,terminal_output +1045,653800,"TERMINAL",0,0,"[?25li6[?25h",,terminal_output +1046,654470,"TERMINAL",0,0,"4\t ",,terminal_output +1047,654646,"TERMINAL",0,0,"[?25l5[?25h",,terminal_output +1048,654813,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +1049,655083,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +1050,655260,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +1051,655542,"TERMINAL",0,0,"5\t ",,terminal_output +1052,655573,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +1053,655940,"TERMINAL",0,0,"[?25l\r\n--save_ckpt \--tokenizer_checkpoint=$tokenizer_ckpt_dir \58,1891%[?25h",,terminal_output +1054,656484,"TERMINAL",0,0,"[?25l59,589%[?25h",,terminal_output +1055,656549,"TERMINAL",0,0,"6\t ",,terminal_output +1056,657006,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +1057,657138,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +1058,657455,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +1059,657597,"TERMINAL",0,0,"7\t ",,terminal_output +1060,657714,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +1061,658022,"TERMINAL",0,0,"[?25l\r\n--save_ckpt \--tokenizer_checkpoint=$tokenizer_ckpt_dir \58,1891%[?25h",,terminal_output +1062,658658,"TERMINAL",0,0,"8\t ",,terminal_output +1063,658754,"TERMINAL",0,0,"[?25l59,589%[?25h",,terminal_output +1064,659176,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +1065,659313,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +1066,659527,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +1067,659668,"TERMINAL",0,0,"9\t ",,terminal_output +1068,659832,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +1069,660520,"TERMINAL",0,0,"[?25l$restore_ckpt_flag \(paste) --[?25h[?25l60,188%[?25h",,terminal_output +1070,660695,"TERMINAL",0,0,"50\t ",,terminal_output +1071,661384,"TERMINAL",0,0,"[?25l^[",,terminal_output +1072,661515,"TERMINAL",0,0," 60,0-188%[?25h",,terminal_output +1073,661740,"TERMINAL",0,0,"2\t ",,terminal_output +1074,662055,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1075,662142,"TERMINAL",0,0,"[?25ld \r\n--dyna_ffn_dim=4096 \60,589%[?25h",,terminal_output +1076,662853,"TERMINAL",0,0,"3\t ",,terminal_output +1077,663280,"TERMINAL",0,0,"[?25l59[?25h[?25l8[?25h[?25l~@k 7[?25h[?25l# tokenizer with the new structure supporting larger ffn_dim56,0-187%[?25h[?25l~@k 55,586%[?25h[?25lmkdir -p $CHECKPOINT_DIR54,0-184%[?25h[?25l~@k CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id53,581%[?25h[?25lslurm_job_id=337340752,579%[?25h[?25l~@k job_name=$SLURM_JOB_NAME51,0-177%[?25h[?25l~@k 50,575%[?25h[?25l~@k array_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked49,574%[?25h[?25l~@k 48,0-172%[?25h",,terminal_output +1078,663841,"TERMINAL",0,0,"[?25l9,5 [?25h[?25l50[?25h[?25l~@k 1,0-1[?25h[?25l2,5 [?25h[?25l~@k 3[?25h[?25l~@k 4,0-1[?25h[?25l~@k 5,5 [?25h[?25l6,0-1[?25h[?25l~@k 7,5 [?25h[?25l~@k 8[?25h[?25l~@k 9[?25h",,terminal_output +1079,663841,"TERMINAL",0,0,"4\t ",,terminal_output +1080,664723,"TERMINAL",0,0,"[?25l^[",,terminal_output +1081,664791,"TERMINAL",0,0," ^[ [?25h",,terminal_output +1082,664887,"TERMINAL",0,0,"5\t ",,terminal_output +1083,665345,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +1084,665549,"TERMINAL",0,0,"q",,terminal_output +1085,665957,"TERMINAL",0,0,"6\t ",,terminal_output +1086,666039,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +1087,666224,"TERMINAL",0,0,"w",,terminal_output +1088,666319,"TERMINAL",0,0,"q",,terminal_output +1089,666628,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch"" 81L, 2411B written\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +1090,667127,"TERMINAL",0,0,"7\t ",,terminal_output +1091,667673,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch ",,terminal_output +1092,667885,"TERMINAL",0,0,"\rrunner-3",,terminal_output +1093,668016,"TERMINAL",0,0,"88",,terminal_output +1094,668335,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch ",,terminal_output +1095,669065,"TERMINAL",0,0,"9\t ",,terminal_output +1096,670127,"TERMINAL",0,0,"10:00\t ",,terminal_output +1097,671095,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1098,671121,"TERMINAL",0,0,"1\t ",,terminal_output +1099,671165,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1100,671346,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +1101,671565,"TERMINAL",0,0,"[1@s",,terminal_output +1102,671779,"TERMINAL",0,0,"[?25ls [1@b[?25h",,terminal_output +1103,671831,"TERMINAL",0,0,"[1@a",,terminal_output +1104,672028,"TERMINAL",0,0,"[?25l [1@t[?25h",,terminal_output +1105,672094,"TERMINAL",0,0,"[?25ls [1@c[?25h",,terminal_output +1106,672176,"TERMINAL",0,0,"[1@h",,terminal_output +1107,672205,"TERMINAL",0,0,"2\t ",,terminal_output +1108,672485,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3388147\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +1109,673142,"TERMINAL",0,0,"q",,terminal_output +1110,673225,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1111,673225,"TERMINAL",0,0,"3\t ",,terminal_output +1112,673467,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1113,673496,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1114,673553,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1115,673665,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:10:03 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386722 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3388147 accelerat train_dy tum_cte0 PD\t0:00\t 3 (Priority)3388140 accelerat train_dy tum_cte0 R\t2:23\t 8 hkn[0703-0710]3388128 accelerat interact tum_cte0 R\t8:24\t 1 hkn0802",,terminal_output +1116,674325,"TERMINAL",0,0,"4\t ",,terminal_output +1117,674725,"TERMINAL",0,0,"445",,terminal_output +1118,675452,"TERMINAL",0,0,"5\t ",,terminal_output +1119,675757,"TERMINAL",0,0,"556",,terminal_output +1120,676367,"TERMINAL",0,0,"6\t ",,terminal_output +1121,676791,"TERMINAL",0,0,"667",,terminal_output +1122,677371,"TERMINAL",0,0,"7\t ",,terminal_output +1123,677711,"TERMINAL",0,0,"778",,terminal_output +1124,678418,"TERMINAL",0,0,"8\t ",,terminal_output +1125,678793,"TERMINAL",0,0,"8930",,terminal_output +1126,679452,"TERMINAL",0,0,"9\t ",,terminal_output +1127,679725,"TERMINAL",0,0,"10301",,terminal_output +1128,680492,"TERMINAL",0,0,"10\t ",,terminal_output +1129,680737,"TERMINAL",0,0,"112",,terminal_output +1130,681528,"TERMINAL",0,0,"1\t ",,terminal_output +1131,681762,"TERMINAL",0,0,"223",,terminal_output +1132,682565,"TERMINAL",0,0,"2\t ",,terminal_output +1133,682776,"TERMINAL",0,0,"334",,terminal_output +1134,683606,"TERMINAL",0,0,"37",,terminal_output +1135,683794,"TERMINAL",0,0,"445",,terminal_output +1136,684646,"TERMINAL",0,0,"4\t ",,terminal_output +1137,684811,"TERMINAL",0,0,"556",,terminal_output +1138,685815,"TERMINAL",0,0,"5\t ",,terminal_output +1139,685846,"TERMINAL",0,0,"667",,terminal_output +1140,686730,"TERMINAL",0,0,"6\t ",,terminal_output +1141,686864,"TERMINAL",0,0,"778",,terminal_output +1142,687769,"TERMINAL",0,0,"8\t ",,terminal_output +1143,687899,"TERMINAL",0,0,"889",,terminal_output +1144,688869,"TERMINAL",0,0,"9\t ",,terminal_output +1145,688903,"TERMINAL",0,0,"9940",,terminal_output +1146,689890,"TERMINAL",0,0,"20\t ",,terminal_output +1147,689922,"TERMINAL",0,0,"20401",,terminal_output +1148,691016,"TERMINAL",0,0,"1\t ",,terminal_output +1149,691049,"TERMINAL",0,0,"112",,terminal_output +1150,691943,"TERMINAL",0,0,"2\t ",,terminal_output +1151,691962,"TERMINAL",0,0,"223",,terminal_output +1152,693169,"TERMINAL",0,0,"334",,terminal_output +1153,693199,"TERMINAL",0,0,"3\t ",,terminal_output +1154,693988,"TERMINAL",0,0,"445",,terminal_output +1155,694006,"TERMINAL",0,0,"4\t ",,terminal_output +1156,695112,"TERMINAL",0,0,"556",,terminal_output +1157,695112,"TERMINAL",0,0,"5\t ",,terminal_output +1158,696064,"TERMINAL",0,0,"667",,terminal_output +1159,696096,"TERMINAL",0,0,"6\t ",,terminal_output +1160,697037,"TERMINAL",0,0,"778",,terminal_output +1161,697114,"TERMINAL",0,0,"7\t ",,terminal_output +1162,698185,"TERMINAL",0,0,"889",,terminal_output +1163,698185,"TERMINAL",0,0,"8\t ",,terminal_output +1164,699237,"TERMINAL",0,0,"9950",,terminal_output +1165,699265,"TERMINAL",0,0,"95",,terminal_output +1166,700462,"TERMINAL",0,0,"30501",,terminal_output +1167,700463,"TERMINAL",0,0,"30\t ",,terminal_output +1168,701466,"TERMINAL",0,0,"112",,terminal_output +1169,701471,"TERMINAL",0,0,"1\t ",,terminal_output +1170,702314,"TERMINAL",0,0,"223",,terminal_output +1171,702375,"TERMINAL",0,0,"2\t ",,terminal_output +1172,703410,"TERMINAL",0,0,"334",,terminal_output +1173,703417,"TERMINAL",0,0,"3\t ",,terminal_output +1174,704322,"TERMINAL",0,0,"445",,terminal_output +1175,704434,"TERMINAL",0,0,"4\t ",,terminal_output +1176,705272,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +1177,705478,"TERMINAL",0,0,"5\t ",,terminal_output +1178,705827,"TERMINAL",0,0,"s",,terminal_output +1179,705910,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1180,706150,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1181,706230,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1182,706322,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1183,706426,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1184,706540,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1185,706555,"TERMINAL",0,0,"6\t ",,terminal_output +1186,706632,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1187,707074,"TERMINAL",0,0,"3386722",,terminal_output +1188,707274,"TERMINAL",0,0,"3386722\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +1189,707556,"TERMINAL",0,0,"7\t ",,terminal_output +1190,708003,"TERMINAL",0,0,"qu",,terminal_output +1191,708631,"TERMINAL",0,0,"8\t ",,terminal_output +1192,709063,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1193,709242,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +1194,709327,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1195,709383,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1196,709483,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1197,709607,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1198,709710,"TERMINAL",0,0,"9\t ",,terminal_output +1199,709710,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn0802.localdomain: Thu Jul 31 17:10:39 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3386719\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3386718\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3388147\taccelerated train_dynamics_causal_2_node tum_cte0 PENDING\t 0:00 8:00:003 (Priority)3388140\taccelerated train_dynamics_causal_8_node tum_cte0 RUNNING\t 2:59 8:00:008 hkn[0703-0710]3388128\tacceleratedinteractive tum_cte0 RUNNING\t 9:00 5:00:001 hkn0802\t ",,terminal_output +1200,710782,"TERMINAL",0,0,"403:001",,terminal_output +1201,710782,"TERMINAL",0,0,"40\t ",,terminal_output +1202,711810,"TERMINAL",0,0,"112",,terminal_output +1203,711839,"TERMINAL",0,0,"1\t ",,terminal_output +1204,712827,"TERMINAL",0,0,"234",,terminal_output +1205,712828,"TERMINAL",0,0,"3\t ",,terminal_output +1206,713752,"TERMINAL",0,0,"445",,terminal_output +1207,713815,"TERMINAL",0,0,"4\t ",,terminal_output +1208,714784,"TERMINAL",0,0,"556",,terminal_output +1209,714858,"TERMINAL",0,0,"5\t ",,terminal_output +1210,715799,"TERMINAL",0,0,"667",,terminal_output +1211,715928,"TERMINAL",0,0,"6\t ",,terminal_output +1212,716822,"TERMINAL",0,0,"778",,terminal_output +1213,716948,"TERMINAL",0,0,"7\t ",,terminal_output +1214,717764,"TERMINAL",0,0,"889",,terminal_output +1215,717965,"TERMINAL",0,0,"8\t ",,terminal_output +1216,718778,"TERMINAL",0,0,"9910",,terminal_output +1217,719177,"TERMINAL",0,0,"9\t ",,terminal_output +1218,720006,"TERMINAL",0,0,"50101",,terminal_output +1219,720068,"TERMINAL",0,0,"50\t ",,terminal_output +1220,721019,"TERMINAL",0,0,"112",,terminal_output +1221,721094,"TERMINAL",0,0,"1\t ",,terminal_output +1222,721900,"TERMINAL",0,0,"223",,terminal_output +1223,722137,"TERMINAL",0,0,"2\t ",,terminal_output +1224,722890,"TERMINAL",0,0,"334",,terminal_output +1225,723183,"TERMINAL",0,0,"3\t ",,terminal_output +1226,723989,"TERMINAL",0,0,"445",,terminal_output +1227,724405,"TERMINAL",0,0,"4\t ",,terminal_output +1228,724828,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_3[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_3]$ ",,terminal_output +1229,725309,"TERMINAL",0,0,"5\t ",,terminal_output +1230,726377,"TERMINAL",0,0,"6\t ",,terminal_output +1231,727022,"TERMINAL",0,0,"[?25lru[?25h",,terminal_output +1232,727160,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1233,727244,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1234,727392,"TERMINAL",0,0,"7\t ",,terminal_output +1235,727397,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1236,727521,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1237,727580,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1238,727779,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ ",,terminal_output +1239,728381,"TERMINAL",0,0,"8\t ",,terminal_output +1240,728793,"TERMINAL",0,0,"iv",,terminal_output +1241,729491,"TERMINAL",0,0,"9\t ",,terminal_output +1242,729570,"TERMINAL",0,0,"[?25lvi[?25h[?25lm[?25h",,terminal_output +1243,729778,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1244,730472,"TERMINAL",0,0,"1:00\t ",,terminal_output +1245,731449,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1246,731449,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1247,731626,"TERMINAL",0,0,"1\t ",,terminal_output +1248,731825,"TERMINAL",0,0,"urm",,terminal_output +1249,732592,"TERMINAL",0,0,"2\t ",,terminal_output +1250,732949,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +1251,733338,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +1252,733432,"TERMINAL",0,0,"obs/",,terminal_output +1253,733572,"TERMINAL",0,0,"3\t ",,terminal_output +1254,733623,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +1255,734219,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1256,734278,"TERMINAL",0,0,"ihir/",,terminal_output +1257,734537,"TERMINAL",0,0,"",,terminal_output +1258,734628,"TERMINAL",0,0,"4\t ",,terminal_output +1259,734736,"TERMINAL",0,0,"\r\nhoreka/ placeholder \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim slurm/jobs/mihir/",,terminal_output +1260,735766,"TERMINAL",0,0,"5\t ",,terminal_output +1261,735923,"TERMINAL",0,0,"h",,terminal_output +1262,736005,"TERMINAL",0,0,"oreka/",,terminal_output +1263,736326,"TERMINAL",0,0,"",,terminal_output +1264,736486,"TERMINAL",0,0,"\r\naction_space_scaling/ lr_tuning/ overfit_batch/ overfit_sample_tiny/ \r\nbatchsize_scaling/ mask_prob_fix/ overfit_batch_tiny/ sbatch_dir.sh \r\ncausal_big_runs/ modelsize_scaling/ overfit_sample/ yolo-runs/ \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim slurm/jobs/mihir/horeka/",,terminal_output +1265,736700,"TERMINAL",0,0,"6\t ",,terminal_output +1266,737827,"TERMINAL",0,0,"7\t ",,terminal_output +1267,738793,"TERMINAL",0,0,"9\t ",,terminal_output +1268,739810,"TERMINAL",0,0,"1070",,terminal_output +1269,739865,"TERMINAL",0,0,"m",,terminal_output +1270,740046,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1271,740852,"TERMINAL",0,0,"1\t ",,terminal_output +1272,741010,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1273,741905,"TERMINAL",0,0,"2\t ",,terminal_output +1274,743036,"TERMINAL",0,0,"3\t ",,terminal_output +1275,744059,"TERMINAL",0,0,"469",,terminal_output +1276,744267,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1277,744363,"TERMINAL",0,0,"r_tuning/",,terminal_output +1278,744631,"TERMINAL",0,0,"",,terminal_output +1279,744784,"TERMINAL",0,0,"\r\ndynamics/ tokenizer/ \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim slurm/jobs/mihir/horeka/lr_tuning/",,terminal_output +1280,745084,"TERMINAL",0,0,"5\t ",,terminal_output +1281,745245,"TERMINAL",0,0,"t",,terminal_output +1282,745439,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1283,745521,"TERMINAL",0,0,"kenizer/",,terminal_output +1284,745908,"TERMINAL",0,0,"train_tokenizer_lr_",,terminal_output +1285,746088,"TERMINAL",0,0,"",,terminal_output +1286,746089,"TERMINAL",0,0,"6\t ",,terminal_output +1287,746570,"TERMINAL",0,0,"\r\ntrain_tokenizer_lr_1e-4_8nodes.sbatch train_tokenizer_lr_1e-4.sbatch train_tokenizer_lr_5e-6_8nodes.sbatch\r\ntrain_tokenizer_lr_1e-4_dev.sbatch train_tokenizer_lr_5e-5.sbatch \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_",,terminal_output +1288,746737,"TERMINAL",0,0,"\r\ntrain_tokenizer_lr_1e-4_8nodes.sbatch train_tokenizer_lr_1e-4.sbatch train_tokenizer_lr_5e-6_8nodes.sbatch\r\ntrain_tokenizer_lr_1e-4_dev.sbatch train_tokenizer_lr_5e-5.sbatch \r\n(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_",,terminal_output +1289,747233,"TERMINAL",0,0,"7\t ",,terminal_output +1290,747824,"TERMINAL",0,0,"1",,terminal_output +1291,748045,"TERMINAL",0,0,"e-4",,terminal_output +1292,748131,"TERMINAL",0,0,"8\t ",,terminal_output +1293,749284,"TERMINAL",0,0,"9\t ",,terminal_output +1294,749713,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1295,749841,"TERMINAL",0,0,"sbatch ",,terminal_output +1296,750330,"TERMINAL",0,0,"20\t ",,terminal_output +1297,751017,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch"" 72L, 2049B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=2#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}\r\n\r\ntrap requeue_job sigusr15,17Top[?25h",,terminal_output +1298,751117,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +1299,751253,"TERMINAL",0,0,"1\t ",,terminal_output +1300,751702,"TERMINAL",0,0,"[?25lj 6[?25h",,terminal_output +1301,752245,"TERMINAL",0,0,"[?25lj 7[?25h",,terminal_output +1302,752349,"TERMINAL",0,0,"2\t ",,terminal_output +1303,752742,"TERMINAL",0,0,"[?25lk 6[?25h",,terminal_output +1304,752912,"TERMINAL",0,0,"[?25lk 5[?25h",,terminal_output +1305,753483,"TERMINAL",0,0,"3\t ",,terminal_output +1306,754395,"TERMINAL",0,0,"4\t ",,terminal_output +1307,755432,"TERMINAL",0,0,"5\t ",,terminal_output +1308,756056,"TERMINAL",0,0,"[?25ll 8[?25h",,terminal_output +1309,756373,"TERMINAL",0,0,"[?25li -- INSERT --5,18Top[?25h",,terminal_output +1310,756467,"TERMINAL",0,0,"6\t ",,terminal_output +1311,756650,"TERMINAL",0,0,"[?25l:00:07[?25h",,terminal_output +1312,756847,"TERMINAL",0,0,"[?25l:00:06[?25h",,terminal_output +1313,757073,"TERMINAL",0,0,"[?25l1:00:007[?25h",,terminal_output +1314,757149,"TERMINAL",0,0,"[?25l2:00:008[?25h",,terminal_output +1315,757502,"TERMINAL",0,0,"7\t ",,terminal_output +1316,757703,"TERMINAL",0,0,"[?25l^[",,terminal_output +1317,757830,"TERMINAL",0,0," 5,17Top[?25h",,terminal_output +1318,758605,"TERMINAL",0,0,"8\t ",,terminal_output +1319,759981,"TERMINAL",0,0,"9\t ",,terminal_output +1320,760050,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +1321,760673,"TERMINAL",0,0,"w",,terminal_output +1322,760857,"TERMINAL",0,0,"q",,terminal_output +1323,760857,"TERMINAL",0,0,"31\t ",,terminal_output +1324,761029,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch""\r[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ ",,terminal_output +1325,762005,"TERMINAL",0,0,"2\t ",,terminal_output +1326,762086,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1327,762933,"TERMINAL",0,0,"3\t ",,terminal_output +1328,764058,"TERMINAL",0,0,"4\t ",,terminal_output +1329,764434,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1330,764633,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1331,764733,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +1332,764896,"TERMINAL",0,0,"[1@s",,terminal_output +1333,764996,"TERMINAL",0,0,"5\t ",,terminal_output +1334,765056,"TERMINAL",0,0,"[?25l [1@b[?25h",,terminal_output +1335,765169,"TERMINAL",0,0,"[?25l [1@a[?25h",,terminal_output +1336,765269,"TERMINAL",0,0,"[?25lt slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1337,765369,"TERMINAL",0,0,"[?25lc slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1338,765440,"TERMINAL",0,0,"[?25lh slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1339,766040,"TERMINAL",0,0,"6\t ",,terminal_output +1340,766334,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r[?25hSubmitted batch job 3388151\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ ",,terminal_output +1341,767100,"TERMINAL",0,0,"7\t ",,terminal_output +1342,767409,"TERMINAL",0,0,"r",,terminal_output +1343,767504,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1344,767679,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1345,767773,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1346,767833,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1347,768208,"TERMINAL",0,0,"8\t ",,terminal_output +1348,768966,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1349,769050,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1350,769115,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1351,769197,"TERMINAL",0,0,"9\t ",,terminal_output +1352,769478,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +1353,769708,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +1354,770118,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_2]$ ",,terminal_output +1355,770250,"TERMINAL",0,0,"40\t ",,terminal_output +1356,770601,"TERMINAL",0,0,"runner-2",,terminal_output +1357,770732,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1358,771390,"TERMINAL",0,0,"188",,terminal_output +1359,771552,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1360,772044,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch"" 72L, 2049B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=2#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}\r\n\r\ntrap requeue_job sigusr15,17Top[?25h",,terminal_output +1361,772105,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +1362,772292,"TERMINAL",0,0,"2\t ",,terminal_output +1363,773378,"TERMINAL",0,0,"[?25ll 8[?25h",,terminal_output +1364,773379,"TERMINAL",0,0,"3\t ",,terminal_output +1365,773634,"TERMINAL",0,0,"[?25li -- INSERT --5,18Top[?25h",,terminal_output +1366,773923,"TERMINAL",0,0,"[?25l:00:07[?25h",,terminal_output +1367,774014,"TERMINAL",0,0,"[?25l:00:06[?25h",,terminal_output +1368,774215,"TERMINAL",0,0,"[?25l1:00:007[?25h[?25l2:00:008[?25h",,terminal_output +1369,774404,"TERMINAL",0,0,"4\t ",,terminal_output +1370,774806,"TERMINAL",0,0,"[?25l^[",,terminal_output +1371,774890,"TERMINAL",0,0," 5,17Top[?25h",,terminal_output +1372,775203,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +1373,775300,"TERMINAL",0,0,"w",,terminal_output +1374,775384,"TERMINAL",0,0,"q",,terminal_output +1375,775440,"TERMINAL",0,0,"5\t ",,terminal_output +1376,775974,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch""\r[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_2]$ ",,terminal_output +1377,776468,"TERMINAL",0,0,"6\t ",,terminal_output +1378,776660,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1379,776876,"TERMINAL",0,0,"runner-2\r\n\r",,terminal_output +1380,777544,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1381,777544,"TERMINAL",0,0,"7\t ",,terminal_output +1382,777878,"TERMINAL",0,0,"",,terminal_output +1383,778545,"TERMINAL",0,0,"8\t ",,terminal_output +1384,778692,"TERMINAL",0,0,"",,terminal_output +1385,779196,"TERMINAL",0,0,"",,terminal_output +1386,779593,"TERMINAL",0,0,"9\t ",,terminal_output +1387,779654,"TERMINAL",0,0,"[?25lm slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch \r[?25h",,terminal_output +1388,779862,"TERMINAL",0,0,"[?25li\r\n\r[?25h",,terminal_output +1389,779948,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +1390,780102,"TERMINAL",0,0,"[1@s",,terminal_output +1391,780321,"TERMINAL",0,0,"[?25lb slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1392,780404,"TERMINAL",0,0,"[?25la slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1393,780595,"TERMINAL",0,0,"[?25lts slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch [?25h",,terminal_output +1394,780683,"TERMINAL",0,0,"50\t ",,terminal_output +1395,780683,"TERMINAL",0,0,"c slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch ",,terminal_output +1396,780778,"TERMINAL",0,0,"[?25lh slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbat[1@c[?25h",,terminal_output +1397,781252,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r[?25hSubmitted batch job 3388153\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_2]$ ",,terminal_output +1398,781736,"TERMINAL",0,0,"1\t ",,terminal_output +1399,781805,"TERMINAL",0,0,"[?25lqu[?25h",,terminal_output +1400,781863,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1401,782032,"TERMINAL",0,0,"[?25lue[?25h",,terminal_output +1402,782713,"TERMINAL",0,0,"2\t ",,terminal_output +1403,783176,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1404,783239,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1405,783384,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1406,783792,"TERMINAL",0,0,"4\t ",,terminal_output +1407,783965,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:11:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3386719 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3386718 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3388153 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3388147 accelerat train_dy tum_cte0 R\t0:14\t 3 hkn[0713-0715]3388151 accelerat train_to tum_cte0 R\t0:14\t 2 hkn[0809,0811]3388140 accelerat train_dy tum_cte0 R\t4:14\t 8 hkn[0703-0710]3388128 accelerat interact tum_cte0 R10:15\t 1 hkn0802",,terminal_output +1408,784831,"TERMINAL",0,0,"5\t ",,terminal_output +1409,784981,"TERMINAL",0,0,"55556",,terminal_output +1410,785944,"TERMINAL",0,0,"6\t ",,terminal_output +1411,786004,"TERMINAL",0,0,"66667",,terminal_output +1412,786967,"TERMINAL",0,0,"7\t ",,terminal_output +1413,787028,"TERMINAL",0,0,"77778",,terminal_output +1414,787989,"TERMINAL",0,0,"8\t ",,terminal_output +1415,788052,"TERMINAL",0,0,"88889",,terminal_output +1416,789023,"TERMINAL",0,0,"9\t ",,terminal_output +1417,789058,"TERMINAL",0,0,"999920",,terminal_output +1418,790044,"TERMINAL",0,0,"2:00\t ",,terminal_output +1419,790095,"TERMINAL",0,0,"2:002020201",,terminal_output +1420,791095,"TERMINAL",0,0,"1\t ",,terminal_output +1421,791096,"TERMINAL",0,0,"11112",,terminal_output +1422,792292,"TERMINAL",0,0,"2\t ",,terminal_output +1423,792313,"TERMINAL",0,0,"22223",,terminal_output +1424,793336,"TERMINAL",0,0,"3\t ",,terminal_output +1425,793337,"TERMINAL",0,0,"33334",,terminal_output +1426,794146,"TERMINAL",0,0,"44445",,terminal_output +1427,794162,"TERMINAL",0,0,"4\t ",,terminal_output +1428,795177,"TERMINAL",0,0,"55556",,terminal_output +1429,795205,"TERMINAL",0,0,"5\t ",,terminal_output +1430,796182,"TERMINAL",0,0,"66667",,terminal_output +1431,796242,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_2]$ ",,terminal_output +1432,796260,"TERMINAL",0,0,"6\t ",,terminal_output +1433,797332,"TERMINAL",0,0,"7\t ",,terminal_output +1434,797364,"TERMINAL",0,0,"f",,terminal_output +1435,797605,"TERMINAL",0,0,"[?25lq[?25h[?25lu[?25h",,terminal_output +1436,797721,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1437,797778,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1438,797869,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1439,798013,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn0802.localdomain: Thu Jul 31 17:12:08 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3386719\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3386718\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3388153\tacceleratedtrain_tokenizer_1e-4 tum_cte0 PENDING\t 0:00 12:00:002 (Priority)3388147\taccelerated train_dynamics_causal_2_node tum_cte0 RUNNING\t 0:28 8:00:003 hkn[0713-0715]3388151\tacceleratedtrain_tokenizer_1e-4 tum_cte0 RUNNING\t 0:28 12:00:002 hkn[0809,0811]3388140\taccelerated train_dynamics_causal_8_node tum_cte0 RUNNING\t 4:28 8:00:008 hkn[0703-0710]3388128\tacceleratedinteractive tum_cte0 RUNNING\t 10:29 5:00:001 hkn0802\t ",,terminal_output +1440,798334,"TERMINAL",0,0,"8\t ",,terminal_output +1441,799063,"TERMINAL",0,0,"999930",,terminal_output +1442,799389,"TERMINAL",0,0,"9\t ",,terminal_output +1443,799970,"TERMINAL",0,0,"103030301",,terminal_output +1444,800486,"TERMINAL",0,0,"10\t ",,terminal_output +1445,801198,"TERMINAL",0,0,"11112",,terminal_output +1446,801457,"TERMINAL",0,0,"1\t ",,terminal_output +1447,802037,"TERMINAL",0,0,"22223",,terminal_output +1448,802634,"TERMINAL",0,0,"2\t ",,terminal_output +1449,803044,"TERMINAL",0,0,"33334",,terminal_output +1450,803437,"TERMINAL",0,0,"watch",,terminal_focus +1451,803532,"TERMINAL",0,0,"3\t ",,terminal_output +1452,803797,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1453,804044,"TERMINAL",0,0,"44445",,terminal_output +1454,805051,"TERMINAL",0,0,"55556",,terminal_output +1455,806118,"TERMINAL",0,0,"66667",,terminal_output +1456,806491,"TERMINAL",0,0,"scancel 3386719",,terminal_command +1457,807066,"TERMINAL",0,0,"\r77778",,terminal_output +1458,808033,"TERMINAL",0,0,"scancel 3386718",,terminal_command +1459,808067,"TERMINAL",0,0,"]633;E;2025-07-31 17:12:18 scancel 3386718;fefc14ad-7c55-4796-a83b-9fc26af4ce88]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1460,808098,"TERMINAL",0,0,"\r88889",,terminal_output +1461,809275,"TERMINAL",0,0,"999940",,terminal_output +1462,810314,"TERMINAL",0,0,"204040401",,terminal_output +1463,811136,"TERMINAL",0,0,"11112",,terminal_output +1464,812161,"TERMINAL",0,0,"22223",,terminal_output +1465,813186,"TERMINAL",0,0,"33334",,terminal_output +1466,814154,"TERMINAL",0,0,"44445",,terminal_output +1467,815336,"TERMINAL",0,0,"55556",,terminal_output +1468,816369,"TERMINAL",0,0,"66667",,terminal_output +1469,817385,"TERMINAL",0,0,"77778",,terminal_output +1470,818301,"TERMINAL",0,0,"88889",,terminal_output +1471,819227,"TERMINAL",0,0,"999950",,terminal_output +1472,820247,"TERMINAL",0,0,"305050501",,terminal_output +1473,821275,"TERMINAL",0,0,"11112",,terminal_output +1474,822323,"TERMINAL",0,0,"22223",,terminal_output +1475,823348,"TERMINAL",0,0,"33334",,terminal_output +1476,824388,"TERMINAL",0,0,"44445",,terminal_output +1477,825367,"TERMINAL",0,0,"55556",,terminal_output +1478,826392,"TERMINAL",0,0,"66667",,terminal_output +1479,827415,"TERMINAL",0,0,"77778",,terminal_output +1480,828644,"TERMINAL",0,0,"88889",,terminal_output +1481,829363,"TERMINAL",0,0,"99991:00",,terminal_output +1482,830396,"TERMINAL",0,0,"401:001:005:001",,terminal_output +1483,831382,"TERMINAL",0,0,"11112",,terminal_output +1484,832451,"TERMINAL",0,0,"22223",,terminal_output +1485,833461,"TERMINAL",0,0,"33334",,terminal_output +1486,834367,"TERMINAL",0,0,"srun",,terminal_focus +1487,834436,"TERMINAL",0,0,"44445",,terminal_output +1488,835508,"TERMINAL",0,0,"55556",,terminal_output +1489,835658,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs_2]$ ",,terminal_output +1490,902428,"TERMINAL",0,0,"d",,terminal_output +1491,902556,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1492,902710,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +1493,902792,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1494,903406,"TERMINAL",0,0,"q",,terminal_output +1495,903508,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1496,903603,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1497,903662,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1498,903751,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1499,903839,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0802.localdomain: Thu Jul 31 17:13:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3388153 accelerat train_to tum_cte0 R\t0:13\t 2 hkn[0812-0813]3388147 accelerat train_dy tum_cte0 R\t2:14\t 3 hkn[0713-0715]3388151 accelerat train_to tum_cte0 R\t2:14\t 2 hkn[0809,0811]3388140 accelerat train_dy tum_cte0 R\t6:14\t 8 hkn[0703-0710]3388128 accelerat interact tum_cte0 R12:15\t 1 hkn0802",,terminal_output +1500,904849,"TERMINAL",0,0,"545556",,terminal_output +1501,905960,"TERMINAL",0,0,"656667",,terminal_output +1502,907083,"TERMINAL",0,0,"767778",,terminal_output +1503,908107,"TERMINAL",0,0,"878889",,terminal_output +1504,909112,"TERMINAL",0,0,"9899920",,terminal_output +1505,909948,"TERMINAL",0,0,"4:0092020201",,terminal_output +1506,911114,"TERMINAL",0,0,"1201112",,terminal_output +1507,911969,"TERMINAL",0,0,"212223",,terminal_output +1508,913011,"TERMINAL",0,0,"323334",,terminal_output +1509,914153,"TERMINAL",0,0,"434445",,terminal_output +1510,915171,"TERMINAL",0,0,"545556",,terminal_output +1511,916195,"TERMINAL",0,0,"656667",,terminal_output +1512,917222,"TERMINAL",0,0,"767778",,terminal_output +1513,918247,"TERMINAL",0,0,"878889",,terminal_output +1514,919148,"TERMINAL",0,0,"9899930",,terminal_output +1515,920194,"TERMINAL",0,0,"1093030301",,terminal_output +1516,921316,"TERMINAL",0,0,"1301112",,terminal_output +1517,922237,"TERMINAL",0,0,"212223",,terminal_output +1518,923268,"TERMINAL",0,0,"323334",,terminal_output +1519,924221,"TERMINAL",0,0,"434445",,terminal_output +1520,925217,"TERMINAL",0,0,"545556",,terminal_output +1521,926281,"TERMINAL",0,0,"656667",,terminal_output +1522,927401,"TERMINAL",0,0,"767778",,terminal_output +1523,928382,"TERMINAL",0,0,"878889",,terminal_output +1524,929323,"TERMINAL",0,0,"9899940",,terminal_output +1525,930329,"TERMINAL",0,0,"2094040401",,terminal_output +1526,931332,"TERMINAL",0,0,"1401112",,terminal_output +1527,932382,"TERMINAL",0,0,"212223",,terminal_output +1528,933446,"TERMINAL",0,0,"323334",,terminal_output +1529,934381,"TERMINAL",0,0,"434445",,terminal_output +1530,935450,"TERMINAL",0,0,"545556",,terminal_output +1531,936410,"TERMINAL",0,0,"656667",,terminal_output +1532,937432,"TERMINAL",0,0,"767778",,terminal_output +1533,938447,"TERMINAL",0,0,"878889",,terminal_output +1534,939463,"TERMINAL",0,0,"9899950",,terminal_output +1535,940478,"TERMINAL",0,0,"3095050501",,terminal_output +1536,941498,"TERMINAL",0,0,"1501112",,terminal_output +1537,942522,"TERMINAL",0,0,"212223",,terminal_output +1538,943541,"TERMINAL",0,0,"323334",,terminal_output +1539,944582,"TERMINAL",0,0,"434445",,terminal_output +1540,945603,"TERMINAL",0,0,"545556",,terminal_output +1541,946588,"TERMINAL",0,0,"656667",,terminal_output +1542,947630,"TERMINAL",0,0,"767778",,terminal_output +1543,948675,"TERMINAL",0,0,"878889",,terminal_output +1544,949645,"TERMINAL",0,0,"989993:00",,terminal_output +1545,950661,"TERMINAL",0,0,"4093:003:007:001",,terminal_output +1546,951851,"TERMINAL",0,0,"11:001112",,terminal_output +1547,952704,"TERMINAL",0,0,"223334",,terminal_output +1548,953795,"TERMINAL",0,0,"434445",,terminal_output +1549,954825,"TERMINAL",0,0,"545556",,terminal_output +1550,955841,"TERMINAL",0,0,"656667",,terminal_output +1551,956783,"TERMINAL",0,0,"767778",,terminal_output +1552,957793,"TERMINAL",0,0,"878889",,terminal_output +1553,958921,"TERMINAL",0,0,"9899910",,terminal_output +1554,959821,"TERMINAL",0,0,"5091010101",,terminal_output +1555,960962,"TERMINAL",0,0,"1101112",,terminal_output +1556,961988,"TERMINAL",0,0,"212223",,terminal_output +1557,962869,"TERMINAL",0,0,"323334",,terminal_output +1558,963887,"TERMINAL",0,0,"434445",,terminal_output +1559,964956,"TERMINAL",0,0,"545556",,terminal_output +1560,965931,"TERMINAL",0,0,"656667",,terminal_output +1561,967108,"TERMINAL",0,0,"767778",,terminal_output +1562,968028,"TERMINAL",0,0,"878889",,terminal_output +1563,968990,"TERMINAL",0,0,"9899920",,terminal_output +1564,970016,"TERMINAL",0,0,"5:0092020201",,terminal_output +1565,971034,"TERMINAL",0,0,"1201112",,terminal_output +1566,972103,"TERMINAL",0,0,"212223",,terminal_output +1567,973062,"TERMINAL",0,0,"323334",,terminal_output +1568,974076,"TERMINAL",0,0,"434445",,terminal_output +1569,975137,"TERMINAL",0,0,"545556",,terminal_output +1570,976118,"TERMINAL",0,0,"656667",,terminal_output +1571,977143,"TERMINAL",0,0,"767778",,terminal_output +1572,978172,"TERMINAL",0,0,"878889",,terminal_output +1573,979239,"TERMINAL",0,0,"9899930",,terminal_output +1574,980182,"TERMINAL",0,0,"1093030301",,terminal_output +1575,981202,"TERMINAL",0,0,"1301112",,terminal_output +1576,982265,"TERMINAL",0,0,"212223",,terminal_output +1577,983287,"TERMINAL",0,0,"323334",,terminal_output +1578,984267,"TERMINAL",0,0,"434445",,terminal_output +1579,985270,"TERMINAL",0,0,"545556",,terminal_output +1580,986289,"TERMINAL",0,0,"656667",,terminal_output +1581,987394,"TERMINAL",0,0,"767778",,terminal_output +1582,988507,"TERMINAL",0,0,"878889",,terminal_output +1583,989381,"TERMINAL",0,0,"9899940",,terminal_output +1584,990558,"TERMINAL",0,0,"2094040401",,terminal_output +1585,991394,"TERMINAL",0,0,"1401112",,terminal_output +1586,992391,"TERMINAL",0,0,"212223",,terminal_output +1587,993412,"TERMINAL",0,0,"323334",,terminal_output +1588,994429,"TERMINAL",0,0,"434445",,terminal_output +1589,995476,"TERMINAL",0,0,"545556",,terminal_output +1590,996466,"TERMINAL",0,0,"656667",,terminal_output +1591,997508,"TERMINAL",0,0,"767778",,terminal_output +1592,998505,"TERMINAL",0,0,"878889",,terminal_output +1593,999516,"TERMINAL",0,0,"9899950",,terminal_output +1594,1000541,"TERMINAL",0,0,"3095050501",,terminal_output +1595,1001562,"TERMINAL",0,0,"1501112",,terminal_output +1596,1002640,"TERMINAL",0,0,"212223",,terminal_output +1597,1003595,"TERMINAL",0,0,"323334",,terminal_output +1598,1004610,"TERMINAL",0,0,"434445",,terminal_output +1599,1005822,"TERMINAL",0,0,"545556",,terminal_output +1600,1006699,"TERMINAL",0,0,"656667",,terminal_output +1601,1007763,"TERMINAL",0,0,"767778",,terminal_output +1602,1008785,"TERMINAL",0,0,"878889",,terminal_output +1603,1009810,"TERMINAL",0,0,"994:004:008:004:01",,terminal_output +1604,1010738,"TERMINAL",0,0,"412:001112",,terminal_output +1605,1011741,"TERMINAL",0,0,"212223",,terminal_output +1606,1012882,"TERMINAL",0,0,"323334",,terminal_output +1607,1013830,"TERMINAL",0,0,"434445",,terminal_output +1608,1014825,"TERMINAL",0,0,"545556",,terminal_output +1609,1015852,"TERMINAL",0,0,"656667",,terminal_output +1610,1016879,"TERMINAL",0,0,"767778",,terminal_output +1611,1017853,"TERMINAL",0,0,"878889",,terminal_output +1612,1018869,"TERMINAL",0,0,"9899910",,terminal_output +1613,1019945,"TERMINAL",0,0,"5091010101",,terminal_output +1614,1020970,"TERMINAL",0,0,"1101112",,terminal_output +1615,1021995,"TERMINAL",0,0,"212223",,terminal_output +1616,1023021,"TERMINAL",0,0,"323334",,terminal_output +1617,1024149,"TERMINAL",0,0,"434445",,terminal_output +1618,1025066,"TERMINAL",0,0,"545556",,terminal_output +1619,1026088,"TERMINAL",0,0,"656667",,terminal_output +1620,1026997,"TERMINAL",0,0,"767778",,terminal_output +1621,1028035,"TERMINAL",0,0,"878889",,terminal_output +1622,1029165,"TERMINAL",0,0,"9899920",,terminal_output +1623,1030186,"TERMINAL",0,0,"6:0092020201",,terminal_output +1624,1031106,"TERMINAL",0,0,"1201112",,terminal_output +1625,1032234,"TERMINAL",0,0,"212223",,terminal_output +1626,1033155,"TERMINAL",0,0,"323334",,terminal_output +1627,1034222,"TERMINAL",0,0,"434445",,terminal_output +1628,1035207,"TERMINAL",0,0,"545556",,terminal_output +1629,1036229,"TERMINAL",0,0,"656667",,terminal_output +1630,1037252,"TERMINAL",0,0,"767778",,terminal_output +1631,1038274,"TERMINAL",0,0,"878889",,terminal_output +1632,1039212,"TERMINAL",0,0,"9899930",,terminal_output +1633,1040222,"TERMINAL",0,0,"1093030301",,terminal_output +1634,1041246,"TERMINAL",0,0,"1301112",,terminal_output +1635,1042396,"TERMINAL",0,0,"212223",,terminal_output +1636,1043262,"TERMINAL",0,0,"323334",,terminal_output +1637,1044278,"TERMINAL",0,0,"434445",,terminal_output +1638,1045342,"TERMINAL",0,0,"545556",,terminal_output +1639,1046404,"TERMINAL",0,0,"656667",,terminal_output +1640,1047392,"TERMINAL",0,0,"767778",,terminal_output +1641,1048416,"TERMINAL",0,0,"878889",,terminal_output +1642,1049392,"TERMINAL",0,0,"9899940",,terminal_output +1643,1050564,"TERMINAL",0,0,"2094040401",,terminal_output +1644,1051400,"TERMINAL",0,0,"1401112",,terminal_output +1645,1052416,"TERMINAL",0,0,"212223",,terminal_output +1646,1053537,"TERMINAL",0,0,"323334",,terminal_output +1647,1054458,"TERMINAL",0,0,"434445",,terminal_output +1648,1055468,"TERMINAL",0,0,"545556",,terminal_output +1649,1056482,"TERMINAL",0,0,"656667",,terminal_output +1650,1057501,"TERMINAL",0,0,"767778",,terminal_output +1651,1058554,"TERMINAL",0,0,"878889",,terminal_output +1652,1059534,"TERMINAL",0,0,"9899950",,terminal_output +1653,1060546,"TERMINAL",0,0,"3095050501",,terminal_output +1654,1061571,"TERMINAL",0,0,"1501112",,terminal_output +1655,1062582,"TERMINAL",0,0,"212223",,terminal_output +1656,1063597,"TERMINAL",0,0,"323334",,terminal_output +1657,1064784,"TERMINAL",0,0,"434445",,terminal_output +1658,1065727,"TERMINAL",0,0,"545556",,terminal_output +1659,1066652,"TERMINAL",0,0,"656667",,terminal_output +1660,1067769,"TERMINAL",0,0,"767778",,terminal_output +1661,1068795,"TERMINAL",0,0,"878889",,terminal_output +1662,1069714,"TERMINAL",0,0,"995:005:009:005:01",,terminal_output +1663,1070743,"TERMINAL",0,0,"413:001112",,terminal_output +1664,1071758,"TERMINAL",0,0,"212223",,terminal_output +1665,1072787,"TERMINAL",0,0,"323334",,terminal_output +1666,1073770,"TERMINAL",0,0,"434445",,terminal_output +1667,1074835,"TERMINAL",0,0,"545556",,terminal_output +1668,1075858,"TERMINAL",0,0,"656667",,terminal_output +1669,1076882,"TERMINAL",0,0,"767778",,terminal_output +1670,1077846,"TERMINAL",0,0,"878889",,terminal_output +1671,1078856,"TERMINAL",0,0,"9899910",,terminal_output +1672,1080055,"TERMINAL",0,0,"5091010101",,terminal_output +1673,1080345,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1674,1083194,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +1675,1083263,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1676,1083412,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1677,1083530,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1678,1083878,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +1679,1083967,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1680,1084127,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1681,1084202,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1682,1084307,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1683,1084404,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1684,1084608,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +1685,1084744,"TERMINAL",0,0," add-wandb-name-and-tags\r\n before-nnx\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n:",,terminal_output +1686,1088453,"TERMINAL",0,0,"\r revised-dataloader\r\n:",,terminal_output +1687,1089253,"TERMINAL",0,0,"\r runner\r\n:\r runner-grain\r\n:\r* sample-ali-branch\r\n:\r sample-from-different-topologies\r\n:\r speedup-tfrecord-preprocessing\r\n:\r tmp\r\n:\r\r(END)\r\r(END)[?25l\r\r(END)[?25h[?25l\r\r(END)[?25h[?25l\r\r(END)[?25h",,terminal_output +1688,1089531,"TERMINAL",0,0,"[?25l\r\r(END)[?25h[?25l\r\r(END)\r\r(END)[?25h[?25l\r\r(END)[?25h[?25l\r\r(END)[?25h[?25l\r\r(END)[?25h[?25l\r\r(END)[?25h\r\r(END)\r\r(END)",,terminal_output +1689,1090210,"TERMINAL",0,0,"[?25l\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ [?25h",,terminal_output +1690,1092743,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +1691,1092823,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1692,1092885,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1693,1093163,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1694,1093270,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1695,1093358,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1696,1093417,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1697,1093538,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1698,1093602,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1699,1093758,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1700,1094096,"TERMINAL",0,0,"On branch sample-ali-branch\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tgenie_fixed_maskgit.py\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3373409.out\r\n\tslurm-3373410.out\r\n\tslurm-3379613.out\r\n\tslurm-3379615.out\r\n\tslurm-3379616.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/nn_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1701,1095911,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +1702,1095999,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1703,1096083,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1704,1096143,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1705,1096314,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1706,1096418,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1707,1096499,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1708,1096672,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1709,1096814,"TERMINAL",0,0,"[?25li[?25h[?25lt[?25h",,terminal_output +1710,1096937,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1711,1097008,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +1712,1097103,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1713,1097186,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1714,1097310,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1715,1097674,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +1716,1098505,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1717,1098562,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1718,1098732,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +1719,1098984,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1720,1099079,"TERMINAL",0,0,"[?25ld[?25h[?25l [?25h",,terminal_output +1721,1099256,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1722,1099424,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1723,1099497,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1724,1099667,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1725,1099726,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1726,1099940,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1727,1099984,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1728,1100144,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +1729,1100409,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +1730,1100704,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1731,1101161,"TERMINAL",0,0,"[sample-ali-branch c9aa243] fixed sampling\r\n 1 file changed, 12 insertions(+), 5 deletions(-)\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1732,1101900,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +1733,1102021,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1734,1102153,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1735,1102209,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1736,1103456,"TERMINAL",0,0,"",,terminal_output +1737,1104807,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\nimport einops\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask = jnp.arange(seq_len) == step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n # number of tokens that should be unmasked after this step\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n # N-num_unmasked_tokens is how many tokens should be left masked after this step.\n # mask those tokens i.e. true; false for rest\n idx_mask = jnp.arange(final_token_probs.shape[1]*N) <= N-num_unmasked_tokens\n final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n sorted_idxs = jnp.argsort(final_token_probs_flat, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +1738,1115811,"genie.py",9606,0,"",python,selection_mouse +1739,1115958,"genie.py",9598,13,"new_mask_flat",python,selection_mouse +1740,1161972,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1741,1165240,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +1742,1165302,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1743,1165432,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +1744,1165582,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1745,1165670,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1746,1165780,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1747,1165856,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1748,1165960,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +1749,1166064,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1750,1166164,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1751,1166257,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1752,1166325,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1753,1167024,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1754,1167083,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1755,1167207,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1756,1168153,"TERMINAL",0,0,"[?25li[?25h[?25ln[?25h",,terminal_output +1757,1169020,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1758,1169434,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1759,1170308,"",0,0,"Switched from branch 'sample-ali-branch' to 'main'",,git_branch_checkout +1760,1171013,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +1761,1171200,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1762,1171258,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1763,1176020,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1764,1176250,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1765,1176410,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1766,1176507,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1767,1176608,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1768,1178579,"TERMINAL",0,0,"remote: Enumerating objects: 54, done.\r\nremote: Counting objects: 1% (1/54)\rremote: Counting objects: 3% (2/54)\rremote: Counting objects: 5% (3/54)\rremote: Counting objects: 7% (4/54)\rremote: Counting objects: 9% (5/54)\rremote: Counting objects: 11% (6/54)\rremote: Counting objects: 12% (7/54)\rremote: Counting objects: 14% (8/54)\rremote: Counting objects: 16% (9/54)\rremote: Counting objects: 18% (10/54)\rremote: Counting objects: 20% (11/54)\rremote: Counting objects: 22% (12/54)\rremote: Counting objects: 24% (13/54)\rremote: Counting objects: 25% (14/54)\rremote: Counting objects: 27% (15/54)\rremote: Counting objects: 29% (16/54)\rremote: Counting objects: 31% (17/54)\rremote: Counting objects: 33% (18/54)\rremote: Counting objects: 35% (19/54)\rremote: Counting objects: 37% (20/54)\rremote: Counting objects: 38% (21/54)\rremote: Counting objects: 40% (22/54)\rremote: Counting objects: 42% (23/54)\rremote: Counting objects: 44% (24/54)\rremote: Counting objects: 46% (25/54)\rremote: Counting objects: 48% (26/54)\rremote: Counting objects: 50% (27/54)\rremote: Counting objects: 51% (28/54)\rremote: Counting objects: 53% (29/54)\rremote: Counting objects: 55% (30/54)\rremote: Counting objects: 57% (31/54)\rremote: Counting objects: 59% (32/54)\rremote: Counting objects: 61% (33/54)\rremote: Counting objects: 62% (34/54)\rremote: Counting objects: 64% (35/54)\rremote: Counting objects: 66% (36/54)\rremote: Counting objects: 68% (37/54)\rremote: Counting objects: 70% (38/54)\rremote: Counting objects: 72% (39/54)\rremote: Counting objects: 74% (40/54)\rremote: Counting objects: 75% (41/54)\rremote: Counting objects: 77% (42/54)\rremote: Counting objects: 79% (43/54)\rremote: Counting objects: 81% (44/54)\rremote: Counting objects: 83% (45/54)\rremote: Counting objects: 85% (46/54)\rremote: Counting objects: 87% (47/54)\rremote: Counting objects: 88% (48/54)\rremote: Counting objects: 90% (49/54)\rremote: Counting objects: 92% (50/54)\rremote: Counting objects: 94% (51/54)\rremote: Counting objects: 96% (52/54)\rremote: Counting objects: 98% (53/54)\rremote: Counting objects: 100% (54/54)\rremote: Counting objects: 100% (54/54), done.\r\nremote: Compressing objects: 2% (1/35)\rremote: Compressing objects: 5% (2/35)\rremote: Compressing objects: 8% (3/35)\rremote: Compressing objects: 11% (4/35)\rremote: Compressing objects: 14% (5/35)\rremote: Compressing objects: 17% (6/35)\rremote: Compressing objects: 20% (7/35)\rremote: Compressing objects: 22% (8/35)\rremote: Compressing objects: 25% (9/35)\rremote: Compressing objects: 28% (10/35)\rremote: Compressing objects: 31% (11/35)\rremote: Compressing objects: 34% (12/35)\rremote: Compressing objects: 37% (13/35)\rremote: Compressing objects: 40% (14/35)\rremote: Compressing objects: 42% (15/35)\rremote: Compressing objects: 45% (16/35)\rremote: Compressing objects: 48% (17/35)\rremote: Compressing objects: 51% (18/35)\rremote: Compressing objects: 54% (19/35)\rremote: Compressing objects: 57% (20/35)\rremote: Compressing objects: 60% (21/35)\rremote: Compressing objects: 62% (22/35)\rremote: Compressing objects: 65% (23/35)\rremote: Compressing objects: 68% (24/35)\rremote: Compressing objects: 71% (25/35)\rremote: Compressing objects: 74% (26/35)\rremote: Compressing objects: 77% (27/35)\rremote: Compressing objects: 80% (28/35)\rremote: Compressing objects: 82% (29/35)\rremote: Compressing objects: 85% (30/35)\rremote: Compressing objects: 88% (31/35)\rremote: Compressing objects: 91% (32/35)\rremote: Compressing objects: 94% (33/35)\rremote: Compressing objects: 97% (34/35)\rremote: Compressing objects: 100% (35/35)\rremote: Compressing objects: 100% (35/35), done.\r\nremote: Total 54 (delta 28), reused 38 (delta 19), pack-reused 0 (from 0)\r\nUnpacking objects: 1% (1/54)\rUnpacking objects: 3% (2/54)\rUnpacking objects: 5% (3/54)\rUnpacking objects: 7% (4/54)\rUnpacking objects: 9% (5/54)\r",,terminal_output +1769,1178648,"TERMINAL",0,0,"Unpacking objects: 11% (6/54)\rUnpacking objects: 12% (7/54)\rUnpacking objects: 14% (8/54)\rUnpacking objects: 16% (9/54)\rUnpacking objects: 18% (10/54)\rUnpacking objects: 20% (11/54)\rUnpacking objects: 22% (12/54)\r",,terminal_output +1770,1178857,"TERMINAL",0,0,"Unpacking objects: 24% (13/54)\rUnpacking objects: 25% (14/54)\rUnpacking objects: 27% (15/54)\rUnpacking objects: 29% (16/54)\rUnpacking objects: 31% (17/54)\rUnpacking objects: 33% (18/54)\rUnpacking objects: 35% (19/54)\rUnpacking objects: 37% (20/54)\rUnpacking objects: 38% (21/54)\rUnpacking objects: 40% (22/54)\rUnpacking objects: 42% (23/54)\rUnpacking objects: 44% (24/54)\r",,terminal_output +1771,1179126,"TERMINAL",0,0,"Unpacking objects: 46% (25/54)\rUnpacking objects: 48% (26/54)\rUnpacking objects: 50% (27/54)\rUnpacking objects: 51% (28/54)\rUnpacking objects: 53% (29/54)\rUnpacking objects: 55% (30/54)\rUnpacking objects: 57% (31/54)\rUnpacking objects: 59% (32/54)\rUnpacking objects: 61% (33/54)\rUnpacking objects: 62% (34/54)\rUnpacking objects: 64% (35/54)\rUnpacking objects: 66% (36/54)\rUnpacking objects: 68% (37/54)\rUnpacking objects: 70% (38/54)\rUnpacking objects: 72% (39/54)\rUnpacking objects: 74% (40/54)\rUnpacking objects: 75% (41/54)\rUnpacking objects: 77% (42/54)\rUnpacking objects: 79% (43/54)\rUnpacking objects: 81% (44/54)\rUnpacking objects: 83% (45/54)\rUnpacking objects: 85% (46/54)\rUnpacking objects: 87% (47/54)\rUnpacking objects: 88% (48/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 90% (49/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 92% (50/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 94% (51/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 96% (52/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 98% (53/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 100% (54/54), 25.37 KiB | 50.00 KiB/s\rUnpacking objects: 100% (54/54), 26.20 KiB | 49.00 KiB/s, done.\r\n",,terminal_output +1772,1179265,"TERMINAL",0,0,"From github.com:p-doom/jafar\r\n e3e7d65..ab43d16 main -> origin/main\r\n * [new branch] fix-reshape-typo-2 -> origin/fix-reshape-typo-2\r\n 5ae0d3e..d96e06c full-precision-before-loss-calc -> origin/full-precision-before-loss-calc\r\n * [new branch] missing-reshape-sample-py -> origin/missing-reshape-sample-py\r\n * [new branch] reinitialize-opt-states-on-component-restore -> origin/reinitialize-opt-states-on-component-restore\r\n * [new branch] shape-suffixes-sttransformer -> origin/shape-suffixes-sttransformer\r\n * [new branch] typo-action-reshape -> origin/typo-action-reshape\r\n 49c9193..0787135 video-jnp-array-before-dtype-cast -> origin/video-jnp-array-before-dtype-cast\r\n",,terminal_output +1773,1179368,"TERMINAL",0,0,"Updating e3e7d65..ab43d16\r\nFast-forward\r\n",,terminal_output +1774,1179441,"TERMINAL",0,0," genie.py | 34 ++++++++++++++++------------------\r\n models/dynamics.py | 1 -\r\n sample.py | 29 ++++++++++++++++-------------\r\n train_dynamics.py | 8 +++++---\r\n train_lam.py | 6 ++++--\r\n train_tokenizer.py | 8 +++++---\r\n utils/nn.py | 59 ++++++++++++++++++++++++++++++++++-------------------------\r\n 7 files changed, 80 insertions(+), 65 deletions(-)\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1775,1186124,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/train_dynamics_causal_2_node_3388135.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=3\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=08:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=3373407\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n/var/spool/slurmd/job3388135/slurm_script: line 42: .venv/bin/activate: No such file or directory\nSLURM_STEP_NUM_TASKS=1\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4(x3)\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=1740698\nSLURM_JOB_GPUS=0,1,2,3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3\nSLURMD_NODENAME=hkn0811\nSLURM_JOB_START_TIME=1753974219\nSLURM_STEP_NODELIST=hkn0802\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1754003019\nSLURM_PMI2_SRUN_PORT=45055\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24(x3)\nSLURM_GPUS_ON_NODE=4\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=3\nSLURM_STEPID=4294967290\nSLURM_JOBID=3388135\nSLURM_PTY_PORT=41681\nSLURM_JOB_QOS=normal\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\nSLURM_PTY_WIN_ROW=25\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\nSLURMD_DEBUG=2\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=12\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e27.hkn0811\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SRUN_COMM_HOST=10.0.7.198\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_PTY_WIN_COL=122\nSLURM_NODELIST=hkn[0811-0813]\nSLURM_SRUN_COMM_PORT=40457\nSLURM_STEP_ID=4294967290\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=12\nSLURM_NNODES=3\nSLURM_SUBMIT_HOST=hkn0802.localdomain\nSLURM_JOB_ID=3388135\nSLURM_NODEID=0\nSLURM_STEP_NUM_NODES=1\nSLURM_STEP_TASKS_PER_NODE=1\nSLURM_MPI_TYPE=pmi2\nSLURM_PMI2_STEP_NODES=hkn0802\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_dynamics_causal_2_node\nSLURM_NTASKS_PER_NODE=4\nSLURM_STEP_LAUNCHER_PORT=40457\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn[0811-0813]\nGpuFreq=control_disabled\nGpuFreq=control_disabled\nGpuFreq=control_disabled\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\n warnings.warn(message)\n2025-07-31 17:04:39.842772: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.860386: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.865618: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.888429: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908642: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908647: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:39.908645: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.023638: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.051915: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.136830: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:04:40.136830: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.322056: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.409087: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.780628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.783363: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.804234: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:09.900002: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.521547: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.751082: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:11.769259: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:12.354135: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:12.641217: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:13.326244: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:24.807011: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:24.914904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.032184: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.057194: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.140488: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:25.234489: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:27.650083: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:27.827769: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:28.323172: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:28.621175: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:29.040613: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-31 17:05:29.717997: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\nwandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\nwandb: Tracking run with wandb version 0.19.11\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/wandb/run-20250731_170531-3388135\nwandb: Run `wandb offline` to turn off syncing.\nwandb: Syncing run dynamics-causal-2-node-3373407\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3388135\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 40000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/040000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 41000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 20000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/020000/metrics/metrics not found.\nWARNING:absl:Missing metrics for step 39000\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/039000/metrics/metrics not found.\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nWARNING:absl:Dropping 6 examples of 89394 examples (shard 12).\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_4-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_5-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_6-of-12.json does not exist.\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_10-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_8-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_9-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_11-of-12.json does not exist.\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_2-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_1-of-12.json does not exist.\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_3-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_7-of-12.json does not exist.\nRunning on 12 devices.\nCounting all components: ['tokenizer', 'lam', 'dynamics']\nParameter counts:\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271932416, 'total': 322912464}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/train_dynamics.py"", line 328, in \n restored = checkpoint_manager.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\n restored = self._checkpointer.restore(restore_directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\n return super().restore(directory, *args, **kwargs)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\n restored = self._restore(directory, args=ckpt_args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\n return self._handler.restore(directory, args=args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\n restored[item_name] = handler.restore(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/grain/_src/python/checkpoint_handlers.py"", line 62, in restore\n raise ValueError(f""File {filename} does not exist."")\nValueError: File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/train_dynamics_causal_2_node/3373407/041000/dataloader_state/process_0-of-12.json does not exist.\nwandb: \nwandb: 🚀 View run dynamics-causal-2-node-3373407 at: https://wandb.ai/instant-uv/jafar/runs/3388135\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_3/wandb/run-20250731_170531-3388135/logs\nsrun: error: hkn0811: task 2: Exited with exit code 1\nsrun: error: hkn0811: task 0: Exited with exit code 1\nsrun: error: hkn0812: tasks 4-6: Exited with exit code 1\nsrun: error: hkn0812: task 7: Exited with exit code 1\nsrun: error: hkn0811: tasks 1,3: Exited with exit code 1\nsrun: error: hkn0813: tasks 8,10-11: Exited with exit code 1\nsrun: error: hkn0813: task 9: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3388135\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 3\nCores per node: 24\nNodelist: hkn[0811-0813]\nCPU Utilized: 00:29:42\nCPU Efficiency: 12.07% of 04:06:00 core-walltime\nJob Wall-clock time: 00:03:25\nStarttime: Thu Jul 31 17:03:39 2025\nEndtime: Thu Jul 31 17:07:04 2025\nMemory Utilized: 60.77 GB (estimated maximum)\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\nEnergy Consumed: 332493 Joule / 92.3591666666667 Watthours\nAverage node power draw: 1621.91707317073 Watt\n",log,tab +1776,1190902,"genie_fixed_maskgit.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\nimport einops\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask = jnp.arange(seq_len) == step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n # number of tokens that should be unmasked after this step\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n # N-num_unmasked_tokens is how many tokens should be left masked after this step.\n # mask those tokens i.e. true; false for rest\n idx_mask = jnp.arange(final_token_probs.shape[1]*N) <= N-num_unmasked_tokens\n final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n sorted_idxs = jnp.argsort(final_token_probs_flat, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +1777,1192256,"genie_fixed_maskgit.py",0,0,"",python,tab +1778,1192450,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1779,1193347,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1780,1195760,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +1781,1199068,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\nimport einops\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask = jnp.arange(seq_len) == step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n # number of tokens that should be unmasked after this step\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n # N-num_unmasked_tokens is how many tokens should be left masked after this step.\n # mask those tokens i.e. true; false for rest\n idx_mask = jnp.arange(final_token_probs.shape[1]*N) <= N-num_unmasked_tokens\n final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n sorted_idxs = jnp.argsort(final_token_probs_flat, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +1782,1199316,"genie.py",0,14630,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n latent_actions_E = batch[""latent_actions""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_BSN, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # --- Construct + encode video ---\n vid_embed_BSNM = self.dynamics.patch_embed(token_idxs_BSN)\n mask_token_111M = self.dynamics.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = self.dynamics.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1]))\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = self.dynamics.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) > num_unmasked_tokens\n sorted_idxs_BSN = jnp.argsort(final_token_probs_BSN, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_N))\n new_mask_BSN = mask_update_fn(mask_BSN, sorted_idxs_BSN)\n\n new_carry = (rng, token_idxs_BSN, new_mask_BSN, action_tokens_EL)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs_BSN = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask_S = jnp.arange(seq_len) >= step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output = self.lam.vq_encode(video_BTHWC, training=training)\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n \n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,content +1783,1199592,"genie.py",10103,0,"",python,selection_mouse +1784,1199616,"genie.py",10102,0,"",python,selection_command +1785,1229080,"genie.py",9299,0,"",python,selection_mouse +1786,1229612,"genie.py",9498,0,"",python,selection_mouse +1787,1230158,"genie.py",9497,0,"",python,selection_mouse +1788,1230159,"genie.py",9496,0,"",python,selection_command +1789,1238353,"genie_fixed_maskgit.py",0,0,"",python,tab +1790,1238354,"genie_fixed_maskgit.py",8912,0,"",python,selection_mouse +1791,1240647,"genie_fixed_maskgit.py",8901,19,"num_unmasked_tokens",python,selection_mouse +1792,1240650,"genie_fixed_maskgit.py",8912,0,"",python,selection_mouse +1793,1240651,"genie_fixed_maskgit.py",9212,0,"",python,selection_mouse +1794,1243113,"genie_fixed_maskgit.py",9189,0,"",python,selection_mouse +1795,1243265,"genie_fixed_maskgit.py",9182,19,"num_unmasked_tokens",python,selection_mouse +1796,1257434,"genie.py",0,0,"",python,tab +1797,1257438,"genie.py",9076,0,"",python,selection_mouse +1798,1257848,"genie.py",9110,0,"",python,selection_mouse +1799,1259005,"genie.py",9078,0,"",python,selection_mouse +1800,1259006,"genie.py",9077,0,"",python,selection_command +1801,1260459,"genie_fixed_maskgit.py",0,0,"",python,tab +1802,1260460,"genie_fixed_maskgit.py",8972,0,"",python,selection_mouse +1803,1260462,"genie_fixed_maskgit.py",8971,0,"",python,selection_command +1804,1260607,"genie_fixed_maskgit.py",8969,2,"nt",python,selection_mouse +1805,1260608,"genie_fixed_maskgit.py",8969,3,"nt)",python,selection_command +1806,1260627,"genie_fixed_maskgit.py",8965,7,"pe(int)",python,selection_mouse +1807,1260641,"genie_fixed_maskgit.py",8972,70,"\n # N-num_unmasked_tokens is how many tokens should be left mas",python,selection_mouse +1808,1260664,"genie_fixed_maskgit.py",8957,15,"o)).astype(int)",python,selection_mouse +1809,1260675,"genie_fixed_maskgit.py",8950,22,"ed_ratio)).astype(int)",python,selection_mouse +1810,1260698,"genie_fixed_maskgit.py",8946,26,"masked_ratio)).astype(int)",python,selection_mouse +1811,1260730,"genie_fixed_maskgit.py",8942,30,"- unmasked_ratio)).astype(int)",python,selection_mouse +1812,1260731,"genie_fixed_maskgit.py",8938,34,"1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1813,1260763,"genie_fixed_maskgit.py",8935,37,"* (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1814,1260763,"genie_fixed_maskgit.py",8932,40,"(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1815,1260802,"genie_fixed_maskgit.py",8929,43,"und(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1816,1260802,"genie_fixed_maskgit.py",8927,45,"round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1817,1260836,"genie_fixed_maskgit.py",8925,47,"p.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1818,1260837,"genie_fixed_maskgit.py",8922,50," jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1819,1260869,"genie_fixed_maskgit.py",8921,51,"= jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1820,1260870,"genie_fixed_maskgit.py",8920,52," = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1821,1260902,"genie_fixed_maskgit.py",8918,54,"ns = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1822,1260934,"genie_fixed_maskgit.py",8917,55,"ens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1823,1260935,"genie_fixed_maskgit.py",8916,56,"kens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1824,1260966,"genie_fixed_maskgit.py",8915,57,"okens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1825,1260994,"genie_fixed_maskgit.py",8914,58,"tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1826,1260995,"genie_fixed_maskgit.py",8912,60,"d_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1827,1261025,"genie_fixed_maskgit.py",8911,61,"ed_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1828,1261026,"genie_fixed_maskgit.py",8910,62,"ked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1829,1261054,"genie_fixed_maskgit.py",8909,63,"sked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1830,1261055,"genie_fixed_maskgit.py",8908,64,"asked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1831,1261082,"genie_fixed_maskgit.py",8907,65,"masked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1832,1261108,"genie_fixed_maskgit.py",8906,66,"nmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)",python,selection_mouse +1833,1261436,"genie_fixed_maskgit.py",8906,0,"",python,selection_mouse +1834,1261437,"genie_fixed_maskgit.py",8901,19,"num_unmasked_tokens",python,selection_mouse +1835,1266025,"genie.py",0,0,"",python,tab +1836,1266026,"genie.py",9129,0,"",python,selection_mouse +1837,1266838,"genie.py",9116,0,"",python,selection_mouse +1838,1267356,"genie.py",9288,0,"",python,selection_mouse +1839,1267899,"genie.py",9196,0,"",python,selection_mouse +1840,1268456,"genie.py",9182,0,"",python,selection_mouse +1841,1268581,"genie.py",9175,10,"idx_mask_N",python,selection_mouse +1842,1280081,"genie_fixed_maskgit.py",0,0,"",python,tab +1843,1280082,"genie_fixed_maskgit.py",9126,0,"",python,selection_mouse +1844,1281274,"genie.py",0,0,"",python,tab +1845,1281382,"genie.py",9182,0,"",python,selection_mouse +1846,1281383,"genie.py",9175,10,"idx_mask_N",python,selection_mouse +1847,1284867,"genie.py",9233,0,"",python,selection_mouse +1848,1300972,"TERMINAL",0,0,"bash",,terminal_focus +1849,1335712,"TERMINAL",0,0,"git checkout -b ""maskgit-sampling-iterative-unmasking-fix""",,terminal_command +1850,1335778,"TERMINAL",0,0,"]633;E;2025-07-31 17:21:06 git checkout -b ""maskgit-sampling-iterative-unmasking-fix"";fefc14ad-7c55-4796-a83b-9fc26af4ce88]633;C",,terminal_output +1851,1335891,"TERMINAL",0,0,"Switched to a new branch 'maskgit-sampling-iterative-unmasking-fix'\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1852,1336786,"genie.py",8032,0,"",python,selection_mouse +1853,1337346,"genie.py",8117,0,"",python,selection_mouse +1854,1337347,"genie.py",8116,0,"",python,selection_command +1855,1340293,"genie.py",0,0,"Switched from branch 'main' to 'maskgit-sampling-iterative-unmasking-fix'",python,git_branch_checkout +1856,1346143,"genie.py",7608,0,"",python,selection_mouse +1857,1346775,"genie.py",7874,0,"",python,selection_mouse +1858,1357718,"genie.py",7540,0,"",python,selection_mouse +1859,1358260,"genie.py",7381,0,"",python,selection_mouse +1860,1361578,"genie.py",9971,0,"",python,selection_mouse +1861,1362877,"genie.py",9971,1,"=",python,content +1862,1363360,"genie.py",9906,0,"",python,selection_command +1863,1363794,"genie.py",9905,0,"",python,selection_command +1864,1364324,"genie.py",9904,0,"",python,selection_command +1865,1364331,"genie.py",9903,0,"",python,selection_command +1866,1364469,"genie.py",9902,0,"",python,selection_command +1867,1364470,"genie.py",9901,0,"",python,selection_command +1868,1364471,"genie.py",9900,0,"",python,selection_command +1869,1364472,"genie.py",9899,0,"",python,selection_command +1870,1364472,"genie.py",9898,0,"",python,selection_command +1871,1364613,"genie.py",9897,0,"",python,selection_command +1872,1364614,"genie.py",9896,0,"",python,selection_command +1873,1364615,"genie.py",9895,0,"",python,selection_command +1874,1364616,"genie.py",9894,0,"",python,selection_command +1875,1364648,"genie.py",9893,0,"",python,selection_command +1876,1365002,"genie.py",9892,0,"",python,selection_command +1877,1365455,"genie.py",9892,4,"",python,content +1878,1365818,"genie.py",9892,7,"",python,content +1879,1366374,"genie.py",9893,0,"",python,selection_command +1880,1366529,"genie.py",9894,0,"",python,selection_command +1881,1366674,"genie.py",9895,0,"",python,selection_command +1882,1366815,"genie.py",9896,0,"",python,selection_command +1883,1367043,"genie.py",9897,0,"",python,selection_command +1884,1367907,"genie.py",9897,1,"",python,content +1885,1368375,"genie.py",9898,0,"",python,selection_command +1886,1368860,"genie.py",9899,0,"",python,selection_command +1887,1368898,"genie.py",9900,0,"",python,selection_command +1888,1368935,"genie.py",9901,0,"",python,selection_command +1889,1368968,"genie.py",9902,0,"",python,selection_command +1890,1368998,"genie.py",9903,0,"",python,selection_command +1891,1368999,"genie.py",9904,0,"",python,selection_command +1892,1369060,"genie.py",9905,0,"",python,selection_command +1893,1369061,"genie.py",9906,0,"",python,selection_command +1894,1369093,"genie.py",9907,0,"",python,selection_command +1895,1370278,"genie.py",9907,1,"=",python,content +1896,1371693,"genie.py",9960,0,"",python,selection_command +1897,1371960,"genie.py",10011,0,"",python,selection_command +1898,1372118,"genie.py",10076,0,"",python,selection_command +1899,1372250,"genie.py",10090,0,"",python,selection_command +1900,1372394,"genie.py",10134,0,"",python,selection_command +1901,1398929,"genie_fixed_maskgit.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\nimport einops\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask = jnp.arange(seq_len) == step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n # number of tokens that should be unmasked after this step\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n # N-num_unmasked_tokens is how many tokens should be left masked after this step.\n # mask those tokens i.e. true; false for rest\n idx_mask = jnp.arange(final_token_probs.shape[1]*N) <= N-num_unmasked_tokens\n final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n sorted_idxs = jnp.argsort(final_token_probs_flat, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +1902,1398930,"genie_fixed_maskgit.py",9298,0,"",python,selection_mouse +1903,1399370,"genie_fixed_maskgit.py",9131,0,"",python,selection_mouse +1904,1399523,"genie_fixed_maskgit.py",9125,8,"idx_mask",python,selection_mouse +1905,1401922,"genie.py",0,0,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n latent_actions_E = batch[""latent_actions""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_BSN, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # --- Construct + encode video ---\n vid_embed_BSNM = self.dynamics.patch_embed(token_idxs_BSN)\n mask_token_111M = self.dynamics.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = self.dynamics.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1]))\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = self.dynamics.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) > num_unmasked_tokens\n sorted_idxs_BSN = jnp.argsort(final_token_probs_BSN, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_N))\n new_mask_BSN = mask_update_fn(mask_BSN, sorted_idxs_BSN)\n\n new_carry = (rng, token_idxs_BSN, new_mask_BSN, action_tokens_EL)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs_BSN = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output = self.lam.vq_encode(video_BTHWC, training=training)\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n \n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +1906,1401924,"genie.py",11100,0,"",python,selection_mouse +1907,1401924,"genie.py",11099,0,"",python,selection_command +1908,1404924,"genie.py",9175,0,"",python,selection_command +1909,1407028,"genie.py",9155,0,"",python,selection_mouse +1910,1408361,"genie.py",9239,0,"",python,selection_command +1911,1409368,"genie.py",9238,0,"",python,selection_command +1912,1409875,"genie.py",9237,0,"",python,selection_command +1913,1409892,"genie.py",9236,0,"",python,selection_command +1914,1409928,"genie.py",9235,0,"",python,selection_command +1915,1409999,"genie.py",9234,0,"",python,selection_command +1916,1410086,"genie.py",9233,0,"",python,selection_command +1917,1410617,"genie.py",9232,1,"",python,content +1918,1411763,"genie.py",9232,0,"<",python,content +1919,1411764,"genie.py",9233,0,"",python,selection_keyboard +1920,1411954,"genie.py",9233,0,"=",python,content +1921,1411955,"genie.py",9234,0,"",python,selection_keyboard +1922,1412903,"genie.py",9233,0,"",python,selection_command +1923,1413605,"genie.py",9234,0,"",python,selection_command +1924,1414568,"genie.py",9234,0," ",python,content +1925,1414569,"genie.py",9235,0,"",python,selection_keyboard +1926,1414839,"genie.py",9235,0,"N",python,content +1927,1414840,"genie.py",9236,0,"",python,selection_keyboard +1928,1415477,"genie.py",9236,0," ",python,content +1929,1415478,"genie.py",9237,0,"",python,selection_keyboard +1930,1416098,"genie.py",9237,0,"-",python,content +1931,1416099,"genie.py",9238,0,"",python,selection_keyboard +1932,1416540,"genie.py",9237,0,"",python,selection_command +1933,1420036,"genie.py",9333,0,"",python,selection_command +1934,1421061,"genie_fixed_maskgit.py",0,0,"",python,tab +1935,1421062,"genie_fixed_maskgit.py",9227,0,"",python,selection_mouse +1936,1421254,"genie_fixed_maskgit.py",9210,22,"final_token_probs_flat",python,selection_mouse +1937,1421412,"genie_fixed_maskgit.py",9202,89," final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n",python,selection_mouse +1938,1426019,"genie.py",0,0,"",python,tab +1939,1426020,"genie.py",9228,0,"",python,selection_mouse +1940,1427333,"genie.py",9258,0,"\n ",python,content +1941,1427782,"genie.py",9271,0," final_token_probs_flat = einops.rearrange(final_token_probs, ""b t n -> b (t n)"")\n",python,content +1942,1428462,"genie.py",9259,0,"",python,selection_command +1943,1428911,"genie.py",9279,0,"",python,selection_command +1944,1429141,"genie.py",9275,4,"",python,content +1945,1429280,"genie.py",9271,4,"",python,content +1946,1429598,"genie.py",9270,0,"",python,selection_command +1947,1431108,"genie.py",9352,0,"",python,selection_mouse +1948,1431612,"genie.py",9322,0,"",python,selection_mouse +1949,1431772,"genie.py",9313,17,"final_token_probs",python,selection_mouse +1950,1433281,"genie.py",9352,0,"",python,selection_mouse +1951,1434415,"genie.py",9259,0,"",python,selection_command +1952,1434701,"genie.py",9260,0,"",python,selection_command +1953,1435197,"genie.py",9261,0,"",python,selection_command +1954,1435226,"genie.py",9262,0,"",python,selection_command +1955,1435260,"genie.py",9263,0,"",python,selection_command +1956,1435296,"genie.py",9264,0,"",python,selection_command +1957,1435336,"genie.py",9265,0,"",python,selection_command +1958,1435368,"genie.py",9266,0,"",python,selection_command +1959,1435369,"genie.py",9267,0,"",python,selection_command +1960,1435440,"genie.py",9268,0,"",python,selection_command +1961,1435440,"genie.py",9269,0,"",python,selection_command +1962,1435475,"genie.py",9270,0,"",python,selection_command +1963,1435496,"genie.py",9271,0,"",python,selection_command +1964,1435524,"genie.py",9272,0,"",python,selection_command +1965,1435558,"genie.py",9273,0,"",python,selection_command +1966,1435587,"genie.py",9274,0,"",python,selection_command +1967,1435619,"genie.py",9275,0,"",python,selection_command +1968,1435667,"genie.py",9276,0,"",python,selection_command +1969,1435686,"genie.py",9277,0,"",python,selection_command +1970,1435714,"genie.py",9278,0,"",python,selection_command +1971,1435745,"genie.py",9279,0,"",python,selection_command +1972,1435790,"genie.py",9280,0,"",python,selection_command +1973,1435823,"genie.py",9281,0,"",python,selection_command +1974,1435854,"genie.py",9282,0,"",python,selection_command +1975,1435882,"genie.py",9283,0,"",python,selection_command +1976,1435911,"genie.py",9284,0,"",python,selection_command +1977,1435970,"genie.py",9285,0,"",python,selection_command +1978,1435971,"genie.py",9286,0,"",python,selection_command +1979,1436006,"genie.py",9287,0,"",python,selection_command +1980,1436066,"genie.py",9288,0,"",python,selection_command +1981,1436067,"genie.py",9289,0,"",python,selection_command +1982,1436098,"genie.py",9290,0,"",python,selection_command +1983,1436130,"genie.py",9291,0,"",python,selection_command +1984,1436163,"genie.py",9292,0,"",python,selection_command +1985,1436191,"genie.py",9293,0,"",python,selection_command +1986,1436219,"genie.py",9294,0,"",python,selection_command +1987,1436250,"genie.py",9295,0,"",python,selection_command +1988,1436281,"genie.py",9296,0,"",python,selection_command +1989,1436310,"genie.py",9297,0,"",python,selection_command +1990,1436340,"genie.py",9298,0,"",python,selection_command +1991,1436374,"genie.py",9299,0,"",python,selection_command +1992,1436404,"genie.py",9300,0,"",python,selection_command +1993,1436430,"genie.py",9301,0,"",python,selection_command +1994,1436458,"genie.py",9302,0,"",python,selection_command +1995,1436485,"genie.py",9303,0,"",python,selection_command +1996,1436514,"genie.py",9304,0,"",python,selection_command +1997,1436541,"genie.py",9305,0,"",python,selection_command +1998,1436570,"genie.py",9306,0,"",python,selection_command +1999,1436626,"genie.py",9307,0,"",python,selection_command +2000,1436627,"genie.py",9308,0,"",python,selection_command +2001,1436691,"genie.py",9309,0,"",python,selection_command +2002,1436719,"genie.py",9310,0,"",python,selection_command +2003,1436749,"genie.py",9311,0,"",python,selection_command +2004,1436785,"genie.py",9312,0,"",python,selection_command +2005,1436817,"genie.py",9313,0,"",python,selection_command +2006,1436845,"genie.py",9314,0,"",python,selection_command +2007,1436873,"genie.py",9315,0,"",python,selection_command +2008,1436874,"genie.py",9316,0,"",python,selection_command +2009,1436935,"genie.py",9317,0,"",python,selection_command +2010,1436965,"genie.py",9318,0,"",python,selection_command +2011,1436966,"genie.py",9319,0,"",python,selection_command +2012,1436992,"genie.py",9320,0,"",python,selection_command +2013,1437020,"genie.py",9321,0,"",python,selection_command +2014,1437056,"genie.py",9322,0,"",python,selection_command +2015,1437085,"genie.py",9323,0,"",python,selection_command +2016,1437112,"genie.py",9324,0,"",python,selection_command +2017,1437140,"genie.py",9325,0,"",python,selection_command +2018,1437172,"genie.py",9326,0,"",python,selection_command +2019,1437232,"genie.py",9327,0,"",python,selection_command +2020,1437262,"genie.py",9328,0,"",python,selection_command +2021,1437262,"genie.py",9329,0,"",python,selection_command +2022,1437518,"genie.py",9330,0,"",python,selection_command +2023,1438381,"genie.py",9330,0,"_",python,content +2024,1438383,"genie.py",9331,0,"",python,selection_keyboard +2025,1440846,"genie.py",9313,18,"final_token_probs_BSN",python,content +2026,1442250,"genie.py",9333,0,"",python,selection_command +2027,1443369,"genie_fixed_maskgit.py",0,0,"",python,tab +2028,1443369,"genie_fixed_maskgit.py",8795,0,"",python,selection_mouse +2029,1444616,"genie.py",0,0,"",python,tab +2030,1444618,"genie.py",9334,0,"",python,selection_mouse +2031,1445682,"genie.py",0,0,"",python,selection_command +2032,1446346,"genie.py",24,0,"",python,selection_command +2033,1446502,"genie.py",25,0,"",python,selection_command +2034,1446682,"genie.py",38,0,"",python,selection_command +2035,1447007,"genie.py",25,0,"",python,selection_command +2036,1448261,"genie.py",37,0,"",python,selection_command +2037,1448704,"genie.py",32,5,"",python,content +2038,1449339,"genie.py",32,0,"e",python,content +2039,1449341,"genie.py",33,0,"",python,selection_keyboard +2040,1449746,"genie.py",33,0,"i",python,content +2041,1449748,"genie.py",34,0,"",python,selection_keyboard +2042,1449864,"genie.py",34,0,"n",python,content +2043,1449865,"genie.py",35,0,"",python,selection_keyboard +2044,1450065,"genie.py",35,0,"o",python,content +2045,1450066,"genie.py",36,0,"",python,selection_keyboard +2046,1450156,"genie.py",36,0,"p",python,content +2047,1450157,"genie.py",37,0,"",python,selection_keyboard +2048,1450258,"genie.py",37,0,"s",python,content +2049,1450259,"genie.py",38,0,"",python,selection_keyboard +2050,1450582,"genie.py",37,0,"",python,selection_command +2051,1455348,"genie.py",9290,0,"",python,selection_mouse +2052,1455457,"genie.py",9272,22,"final_token_probs_flat",python,selection_mouse +2053,1457274,"genie.py",9289,0,"",python,selection_mouse +2054,1457668,"genie.py",9294,0,"",python,selection_mouse +2055,1458703,"genie.py",9104,0,"",python,selection_mouse +2056,1459452,"genie.py",8900,0,"",python,selection_mouse +2057,1460438,"genie.py",9293,0,"",python,selection_mouse +2058,1461017,"genie.py",9294,0,"",python,selection_mouse +2059,1464608,"genie.py",9294,0,"_",python,content +2060,1464610,"genie.py",9295,0,"",python,selection_keyboard +2061,1466624,"genie.py",9295,0,"B",python,content +2062,1466626,"genie.py",9296,0,"",python,selection_keyboard +2063,1470428,"genie.py",6489,0,"",python,selection_mouse +2064,1470841,"genie.py",6477,0,"",python,selection_mouse +2065,1474179,"genie.py",6477,0,"\n ",python,content +2066,1474468,"genie.py",6490,0,"F",python,content +2067,1474469,"genie.py",6491,0,"",python,selection_keyboard +2068,1475079,"genie.py",6491,0,":",python,content +2069,1475080,"genie.py",6492,0,"",python,selection_keyboard +2070,1475130,"genie.py",6492,0,":",python,content +2071,1475131,"genie.py",6493,0,"",python,selection_keyboard +2072,1475663,"genie.py",6492,1,"",python,content +2073,1475910,"genie.py",6492,0," ",python,content +2074,1475911,"genie.py",6493,0,"",python,selection_keyboard +2075,1477497,"genie.py",6493,0,"S",python,content +2076,1477498,"genie.py",6494,0,"",python,selection_keyboard +2077,1478172,"genie.py",6494,0," ",python,content +2078,1478173,"genie.py",6495,0,"",python,selection_keyboard +2079,1478511,"genie.py",6495,0,"*",python,content +2080,1478512,"genie.py",6496,0,"",python,selection_keyboard +2081,1478599,"genie.py",6496,0," ",python,content +2082,1478600,"genie.py",6497,0,"",python,selection_keyboard +2083,1478986,"genie.py",6497,0,"N",python,content +2084,1478987,"genie.py",6498,0,"",python,selection_keyboard +2085,1479559,"genie.py",6497,0,"",python,selection_command +2086,1481785,"genie.py",9317,0,"",python,selection_mouse +2087,1482769,"genie.py",9317,0,"F",python,content +2088,1482770,"genie.py",9318,0,"",python,selection_keyboard +2089,1487526,"genie.py",9365,0,"",python,selection_mouse +2090,1488430,"genie.py",9364,1,"",python,content +2091,1488492,"genie.py",9364,0,"s",python,content +2092,1488493,"genie.py",9365,0,"",python,selection_keyboard +2093,1490214,"genie.py",9376,0,"",python,selection_command +2094,1490402,"genie.py",9375,0,"",python,selection_command +2095,1490591,"genie.py",9374,1,"",python,content +2096,1491379,"genie.py",9374,0,"s",python,content +2097,1491380,"genie.py",9375,0,"",python,selection_keyboard +2098,1493675,"genie.py",9316,0,"",python,selection_mouse +2099,1494798,"genie.py",9374,0,"",python,selection_mouse +2100,1496465,"genie.py",9431,0,"",python,selection_mouse +2101,1497064,"genie.py",9332,0,"",python,selection_mouse +2102,1497700,"genie.py",9311,0,"",python,selection_mouse +2103,1497805,"genie.py",9293,25,"final_token_probs_flat_BF",python,selection_mouse +2104,1501978,"genie.py",9131,0,"",python,selection_mouse +2105,1502124,"genie.py",9113,19,"num_unmasked_tokens",python,selection_mouse +2106,1503171,"genie.py",9133,0,"",python,selection_mouse +2107,1504643,"genie.py",9132,0,"",python,selection_mouse +2108,1521023,"genie_fixed_maskgit.py",0,0,"",python,tab +2109,1521024,"genie_fixed_maskgit.py",9221,0,"",python,selection_mouse +2110,1521168,"genie_fixed_maskgit.py",9210,22,"final_token_probs_flat",python,selection_mouse +2111,1522139,"genie_fixed_maskgit.py",9307,0,"",python,selection_mouse +2112,1522287,"genie_fixed_maskgit.py",9299,11,"sorted_idxs",python,selection_mouse +2113,1526516,"genie.py",0,0,"",python,tab +2114,1526517,"genie.py",9381,0,"",python,selection_mouse +2115,1527414,"genie.py",9381,0,"d",python,content +2116,1527416,"genie.py",9382,0,"",python,selection_keyboard +2117,1527529,"genie.py",9382,0,"d",python,content +2118,1527530,"genie.py",9383,0,"",python,selection_keyboard +2119,1528317,"genie.py",9382,1,"",python,content +2120,1528450,"genie.py",9381,1,"",python,content +2121,1529259,"genie.py",9381,1,"",python,content +2122,1529260,"genie.py",9393,0,"",python,selection_command +2123,1529862,"genie.py",9471,0,"",python,selection_command +2124,1530192,"genie.py",9470,0,"",python,selection_command +2125,1530510,"genie.py",9466,4,"",python,content +2126,1530846,"genie.py",9465,1,"",python,content +2127,1531101,"genie.py",9455,10,"",python,content +2128,1531489,"genie.py",9454,1,"",python,content +2129,1531622,"genie.py",9453,1,"",python,content +2130,1531960,"genie.py",9452,0,"",python,selection_command +2131,1538384,"genie_fixed_maskgit.py",0,0,"",python,tab +2132,1538385,"genie_fixed_maskgit.py",9385,0,"",python,selection_mouse +2133,1538496,"genie_fixed_maskgit.py",9383,3,"jax",python,selection_mouse +2134,1539106,"genie_fixed_maskgit.py",9373,0,"",python,selection_mouse +2135,1539250,"genie_fixed_maskgit.py",9366,14,"mask_update_fn",python,selection_mouse +2136,1540601,"genie.py",0,0,"",python,tab +2137,1540601,"genie.py",9475,0,"",python,selection_mouse +2138,1540700,"genie.py",9467,14,"mask_update_fn",python,selection_mouse +2139,1542945,"genie_fixed_maskgit.py",0,0,"",python,tab +2140,1542945,"genie_fixed_maskgit.py",9452,0,"",python,selection_mouse +2141,1545319,"genie.py",0,0,"",python,tab +2142,1545319,"genie.py",9588,0,"",python,selection_mouse +2143,1546345,"genie_fixed_maskgit.py",0,0,"",python,tab +2144,1546346,"genie_fixed_maskgit.py",9449,0,"",python,selection_mouse +2145,1546458,"genie_fixed_maskgit.py",9444,9,"mask_flat",python,selection_mouse +2146,1546607,"genie_fixed_maskgit.py",9436,63," mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n",python,selection_mouse +2147,1546989,"genie_fixed_maskgit.py",9436,126," mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n",python,selection_mouse +2148,1548582,"genie_fixed_maskgit.py",9436,202," mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n",python,selection_mouse +2149,1551491,"genie.py",0,0,"",python,tab +2150,1551492,"genie.py",9511,0,"",python,selection_mouse +2151,1552038,"genie.py",9538,0,"\n ",python,content +2152,1552763,"genie.py",9551,0," mask_flat = einops.rearrange(mask, ""b t n -> b (t n)"")\n new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)\n",python,content +2153,1553662,"genie.py",9677,0,"",python,selection_command +2154,1553985,"genie.py",9614,0,"",python,selection_command +2155,1554802,"genie.py",9614,62," new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)",python,selection_command +2156,1555036,"genie.py",9614,138," new_mask_flat = mask_update_fn(mask_flat, sorted_idxs)\n new_mask = einops.rearrange(new_mask_flat, ""b (t n) -> b t n"", n=N)",python,selection_command +2157,1555303,"genie.py",9622,0,"",python,selection_command +2158,1555673,"genie.py",9685,0," ",python,content +2159,1555674,"genie.py",9622,0," ",python,content +2160,1555986,"genie.py",9625,0,"",python,selection_command +2161,1556172,"genie.py",9550,0,"",python,selection_command +2162,1556495,"genie.py",9559,0,"",python,selection_command +2163,1556729,"genie.py",9555,4,"",python,content +2164,1556911,"genie.py",9551,4,"",python,content +2165,1557109,"genie.py",9550,0,"",python,selection_command +2166,1557316,"genie.py",9617,0,"",python,selection_command +2167,1557521,"genie.py",9618,0,"",python,selection_command +2168,1557941,"genie.py",9551,0,"",python,selection_command +2169,1558135,"genie.py",9467,0,"",python,selection_command +2170,1558420,"genie.py",9551,0,"",python,selection_command +2171,1558622,"genie.py",9552,0,"",python,selection_command +2172,1559154,"genie.py",9553,0,"",python,selection_command +2173,1559159,"genie.py",9554,0,"",python,selection_command +2174,1559184,"genie.py",9555,0,"",python,selection_command +2175,1559218,"genie.py",9556,0,"",python,selection_command +2176,1559251,"genie.py",9557,0,"",python,selection_command +2177,1559285,"genie.py",9558,0,"",python,selection_command +2178,1559318,"genie.py",9559,0,"",python,selection_command +2179,1559352,"genie.py",9560,0,"",python,selection_command +2180,1559385,"genie.py",9561,0,"",python,selection_command +2181,1559419,"genie.py",9562,0,"",python,selection_command +2182,1559452,"genie.py",9563,0,"",python,selection_command +2183,1559453,"genie.py",9564,0,"",python,selection_command +2184,1559512,"genie.py",9565,0,"",python,selection_command +2185,1559512,"genie.py",9566,0,"",python,selection_command +2186,1559544,"genie.py",9567,0,"",python,selection_command +2187,1559676,"genie.py",9483,0,"",python,selection_command +2188,1559906,"genie.py",9538,0,"\n ",python,content +2189,1560094,"genie.py",9539,12,"",python,content +2190,1560254,"genie.py",9540,0,"",python,selection_command +2191,1560437,"genie.py",9607,0,"",python,selection_command +2192,1560801,"genie.py",9674,0,"",python,selection_command +2193,1560946,"genie.py",9754,0,"",python,selection_command +2194,1561295,"genie.py",9755,0,"",python,selection_command +2195,1561769,"genie.py",9754,0,"",python,selection_command +2196,1561920,"genie.py",9674,0,"",python,selection_command +2197,1562038,"genie.py",9607,0,"",python,selection_command +2198,1562611,"genie.py",9674,0,"",python,selection_command +2199,1562762,"genie.py",9675,0,"",python,selection_command +2200,1563261,"genie.py",9676,0,"",python,selection_command +2201,1563283,"genie.py",9677,0,"",python,selection_command +2202,1563316,"genie.py",9678,0,"",python,selection_command +2203,1563348,"genie.py",9679,0,"",python,selection_command +2204,1563380,"genie.py",9680,0,"",python,selection_command +2205,1563416,"genie.py",9681,0,"",python,selection_command +2206,1563448,"genie.py",9682,0,"",python,selection_command +2207,1563488,"genie.py",9683,0,"",python,selection_command +2208,1563519,"genie.py",9684,0,"",python,selection_command +2209,1563519,"genie.py",9685,0,"",python,selection_command +2210,1563573,"genie.py",9686,0,"",python,selection_command +2211,1563592,"genie.py",9687,0,"",python,selection_command +2212,1563604,"genie.py",9688,0,"",python,selection_command +2213,1563635,"genie.py",9689,0,"",python,selection_command +2214,1563667,"genie.py",9690,0,"",python,selection_command +2215,1563692,"genie.py",9691,0,"",python,selection_command +2216,1563725,"genie.py",9692,0,"",python,selection_command +2217,1563852,"genie.py",9693,0,"",python,selection_command +2218,1564003,"genie.py",9694,0,"",python,selection_command +2219,1564942,"genie.py",9694,0,"_",python,content +2220,1564944,"genie.py",9695,0,"",python,selection_keyboard +2221,1565381,"genie.py",9695,0,"B",python,content +2222,1565382,"genie.py",9696,0,"",python,selection_keyboard +2223,1565538,"genie.py",9696,0,"S",python,content +2224,1565540,"genie.py",9697,0,"",python,selection_keyboard +2225,1565644,"genie.py",9697,0,"N",python,content +2226,1565645,"genie.py",9698,0,"",python,selection_keyboard +2227,1566019,"genie.py",9697,0,"",python,selection_command +2228,1566262,"genie.py",9630,0,"",python,selection_command +2229,1566624,"genie.py",9631,0,"",python,selection_command +2230,1566781,"genie.py",9632,0,"",python,selection_command +2231,1566973,"genie.py",9633,0,"",python,selection_command +2232,1567326,"genie.py",9632,0,"",python,selection_command +2233,1568620,"genie.py",9632,0,"_",python,content +2234,1568621,"genie.py",9633,0,"",python,selection_keyboard +2235,1569012,"genie.py",9633,0,"B",python,content +2236,1569013,"genie.py",9634,0,"",python,selection_keyboard +2237,1569198,"genie.py",9634,0,"F",python,content +2238,1569199,"genie.py",9635,0,"",python,selection_keyboard +2239,1569564,"genie.py",9634,0,"",python,selection_command +2240,1569707,"genie.py",9567,0,"",python,selection_command +2241,1570115,"genie.py",9634,0,"",python,selection_command +2242,1570337,"genie.py",9704,0,"",python,selection_command +2243,1570745,"genie.py",9705,0,"",python,selection_command +2244,1571249,"genie.py",9706,0,"",python,selection_command +2245,1571264,"genie.py",9707,0,"",python,selection_command +2246,1571301,"genie.py",9708,0,"",python,selection_command +2247,1571335,"genie.py",9709,0,"",python,selection_command +2248,1571367,"genie.py",9710,0,"",python,selection_command +2249,1571400,"genie.py",9711,0,"",python,selection_command +2250,1571433,"genie.py",9712,0,"",python,selection_command +2251,1571464,"genie.py",9713,0,"",python,selection_command +2252,1571494,"genie.py",9714,0,"",python,selection_command +2253,1571525,"genie.py",9715,0,"",python,selection_command +2254,1571557,"genie.py",9716,0,"",python,selection_command +2255,1571591,"genie.py",9717,0,"",python,selection_command +2256,1571621,"genie.py",9718,0,"",python,selection_command +2257,1571657,"genie.py",9719,0,"",python,selection_command +2258,1571691,"genie.py",9720,0,"",python,selection_command +2259,1571692,"genie.py",9721,0,"",python,selection_command +2260,1571723,"genie.py",9722,0,"",python,selection_command +2261,1571790,"genie.py",9723,0,"",python,selection_command +2262,1571822,"genie.py",9724,0,"",python,selection_command +2263,1571823,"genie.py",9725,0,"",python,selection_command +2264,1571858,"genie.py",9726,0,"",python,selection_command +2265,1571887,"genie.py",9727,0,"",python,selection_command +2266,1571926,"genie.py",9728,0,"",python,selection_command +2267,1571956,"genie.py",9729,0,"",python,selection_command +2268,1571957,"genie.py",9730,0,"",python,selection_command +2269,1572023,"genie.py",9731,0,"",python,selection_command +2270,1572054,"genie.py",9732,0,"",python,selection_command +2271,1572055,"genie.py",9733,0,"",python,selection_command +2272,1572176,"genie.py",9734,0,"",python,selection_command +2273,1573318,"genie.py",9734,0,"_",python,content +2274,1573319,"genie.py",9735,0,"",python,selection_keyboard +2275,1573921,"genie.py",9735,0,"B",python,content +2276,1573922,"genie.py",9736,0,"",python,selection_keyboard +2277,1574006,"genie.py",9736,0,"F",python,content +2278,1574007,"genie.py",9737,0,"",python,selection_keyboard +2279,1574964,"genie.py",9736,0,"",python,selection_command +2280,1576488,"genie.py",9737,0,"",python,selection_command +2281,1576998,"genie.py",9738,0,"",python,selection_command +2282,1577038,"genie.py",9739,0,"",python,selection_command +2283,1577072,"genie.py",9740,0,"",python,selection_command +2284,1577073,"genie.py",9741,0,"",python,selection_command +2285,1577107,"genie.py",9742,0,"",python,selection_command +2286,1577134,"genie.py",9743,0,"",python,selection_command +2287,1577165,"genie.py",9744,0,"",python,selection_command +2288,1577195,"genie.py",9745,0,"",python,selection_command +2289,1577537,"genie.py",9744,0,"",python,selection_command +2290,1577692,"genie.py",9743,0,"",python,selection_command +2291,1578702,"genie.py",9743,1,"s",python,content +2292,1578873,"genie.py",9744,0,"",python,selection_command +2293,1579374,"genie.py",9745,0,"",python,selection_command +2294,1579380,"genie.py",9746,0,"",python,selection_command +2295,1579414,"genie.py",9747,0,"",python,selection_command +2296,1579446,"genie.py",9748,0,"",python,selection_command +2297,1579473,"genie.py",9749,0,"",python,selection_command +2298,1579503,"genie.py",9750,0,"",python,selection_command +2299,1579533,"genie.py",9751,0,"",python,selection_command +2300,1579566,"genie.py",9752,0,"",python,selection_command +2301,1579595,"genie.py",9753,0,"",python,selection_command +2302,1580218,"genie.py",9753,1,"s",python,content +2303,1581128,"genie.py",9675,0,"",python,selection_command +2304,1582649,"genie.py",9674,0,"",python,selection_command +2305,1583028,"genie.py",9673,0,"",python,selection_command +2306,1583508,"genie.py",9672,0,"",python,selection_command +2307,1583528,"genie.py",9671,0,"",python,selection_command +2308,1583556,"genie.py",9670,0,"",python,selection_command +2309,1583596,"genie.py",9669,0,"",python,selection_command +2310,1583632,"genie.py",9668,0,"",python,selection_command +2311,1583664,"genie.py",9667,0,"",python,selection_command +2312,1583698,"genie.py",9666,0,"",python,selection_command +2313,1583729,"genie.py",9665,0,"",python,selection_command +2314,1583764,"genie.py",9664,0,"",python,selection_command +2315,1583796,"genie.py",9663,0,"",python,selection_command +2316,1583797,"genie.py",9662,0,"",python,selection_command +2317,1583837,"genie.py",9661,0,"",python,selection_command +2318,1583870,"genie.py",9660,0,"",python,selection_command +2319,1583896,"genie.py",9659,0,"",python,selection_command +2320,1583928,"genie.py",9658,0,"",python,selection_command +2321,1583956,"genie.py",9657,0,"",python,selection_command +2322,1583985,"genie.py",9656,0,"",python,selection_command +2323,1584403,"genie.py",9655,0,"",python,selection_command +2324,1584574,"genie.py",9654,0,"",python,selection_command +2325,1584720,"genie.py",9653,0,"",python,selection_command +2326,1585302,"genie.py",9586,0,"",python,selection_command +2327,1590064,"genie.py",9561,0,"",python,selection_mouse +2328,1591220,"genie.py",9561,0,"_",python,content +2329,1591221,"genie.py",9562,0,"",python,selection_keyboard +2330,1595112,"genie.py",9562,0,"B",python,content +2331,1595114,"genie.py",9563,0,"",python,selection_keyboard +2332,1595950,"genie.py",9563,0,"F",python,content +2333,1595951,"genie.py",9564,0,"",python,selection_keyboard +2334,1596476,"genie.py",9563,0,"",python,selection_command +2335,1598014,"genie.py",9588,0,"",python,selection_mouse +2336,1599171,"genie.py",9587,1,"",python,content +2337,1599298,"genie.py",9586,1,"",python,content +2338,1600165,"genie.py",9586,0,"a",python,content +2339,1600166,"genie.py",9587,0,"",python,selection_keyboard +2340,1600553,"genie.py",9586,1,"",python,content +2341,1600666,"genie.py",9586,0,"s",python,content +2342,1600668,"genie.py",9587,0,"",python,selection_keyboard +2343,1600783,"genie.py",9587,0,"k",python,content +2344,1600784,"genie.py",9588,0,"",python,selection_keyboard +2345,1601932,"genie.py",9584,4,"mask_BSN",python,content +2346,1603706,"genie.py",9597,1,"",python,content +2347,1603794,"genie.py",9597,0,"s",python,content +2348,1603795,"genie.py",9598,0,"",python,selection_keyboard +2349,1604093,"genie.py",9599,0,"",python,selection_command +2350,1605575,"genie.py",9609,0,"",python,selection_command +2351,1605739,"genie.py",9608,0,"",python,selection_command +2352,1605886,"genie.py",9607,1,"",python,content +2353,1606006,"genie.py",9607,0,"s",python,content +2354,1606007,"genie.py",9608,0,"",python,selection_keyboard +2355,1607309,"genie.py",9663,0,"",python,selection_mouse +2356,1607443,"genie.py",9660,9,"mask_flat",python,selection_mouse +2357,1608234,"genie.py",9735,0,"",python,selection_mouse +2358,1608766,"genie.py",9666,0,"",python,selection_mouse +2359,1608879,"genie.py",9660,9,"mask_flat",python,selection_mouse +2360,1610191,"genie.py",9669,0,"",python,selection_mouse +2361,1610919,"genie.py",9669,0,"_",python,content +2362,1610920,"genie.py",9670,0,"",python,selection_keyboard +2363,1611793,"genie.py",9660,10,"mask_flat_BF",python,content +2364,1614238,"genie.py",9685,0,"_",python,content +2365,1614240,"genie.py",9686,0,"",python,selection_keyboard +2366,1616593,"genie.py",9674,12,"sorted_idxs_BSN",python,content +2367,1617679,"genie.py",9407,0,"",python,selection_mouse +2368,1618888,"genie.py",9406,1,"",python,content +2369,1619050,"genie.py",9406,1,"",python,content +2370,1619818,"genie.py",9406,0,"F",python,content +2371,1619819,"genie.py",9407,0,"",python,selection_keyboard +2372,1621305,"genie.py",9688,0,"",python,selection_mouse +2373,1621891,"genie.py",9687,1,"",python,content +2374,1622027,"genie.py",9686,1,"",python,content +2375,1622210,"genie.py",9686,0,"F",python,content +2376,1622211,"genie.py",9687,0,"",python,selection_keyboard +2377,1623135,"genie.py",9681,0,"",python,selection_mouse +2378,1623255,"genie.py",9673,14,"sorted_idxs_BF",python,selection_mouse +2379,1624129,"genie.py",9844,0,"",python,selection_mouse +2380,1625078,"genie.py",9843,1,"",python,content +2381,1625203,"genie.py",9842,1,"",python,content +2382,1625758,"genie.py",9842,0,"F",python,content +2383,1625758,"genie.py",9843,0,"",python,selection_keyboard +2384,1628523,"genie.py",9726,0,"",python,selection_mouse +2385,1629019,"genie.py",9776,0,"",python,selection_mouse +2386,1629519,"genie.py",9814,0,"",python,selection_mouse +2387,1629904,"genie.py",9813,0,"",python,selection_command +2388,1630623,"genie.py",9777,68,"",python,content +2389,1631810,"genie.py",9777,1,"",python,content +2390,1631811,"genie.py",9789,0,"",python,selection_command +2391,1633587,"genie.py",9776,0,"",python,selection_command +2392,1634225,"genie.py",9701,0,"",python,selection_command +2393,1634313,"genie.py",9625,0,"",python,selection_command +2394,1634507,"genie.py",9551,0,"",python,selection_command +2395,1635798,"genie.py",9306,0,"",python,selection_mouse +2396,1635956,"genie.py",9293,25,"final_token_probs_flat_BF",python,selection_mouse +2397,1642799,"genie_fixed_maskgit.py",0,0,"",python,tab +2398,1642799,"genie_fixed_maskgit.py",9330,0,"",python,selection_mouse +2399,1644209,"genie_fixed_maskgit.py",9338,0,"",python,selection_mouse +2400,1644380,"genie_fixed_maskgit.py",9325,22,"final_token_probs_flat",python,selection_mouse +2401,1646869,"genie.py",0,0,"",python,tab +2402,1646870,"genie.py",9443,0,"",python,selection_mouse +2403,1648122,"genie.py",9442,1,"",python,content +2404,1648278,"genie.py",9441,1,"",python,content +2405,1648424,"genie.py",9440,1,"",python,content +2406,1648552,"genie.py",9439,1,"",python,content +2407,1649233,"genie.py",9439,0,"_",python,content +2408,1649234,"genie.py",9440,0,"",python,selection_keyboard +2409,1650407,"genie.py",9422,18,"final_token_probs_flat_BF",python,content +2410,1652133,"genie.py",9444,0,"",python,selection_mouse +2411,1723326,"genie.py",9432,0,"",python,selection_mouse +2412,1723507,"genie.py",9422,25,"final_token_probs_flat_BF",python,selection_mouse +2413,1725161,"genie_fixed_maskgit.py",0,0,"",python,tab +2414,1725195,"genie_fixed_maskgit.py",9332,0,"",python,selection_mouse +2415,1725508,"genie_fixed_maskgit.py",9325,22,"final_token_probs_flat",python,selection_mouse +2416,1739207,"genie_fixed_maskgit.py",9466,0,"",python,selection_mouse +2417,1740046,"genie_fixed_maskgit.py",9476,0,"",python,selection_mouse +2418,1744304,"genie.py",0,0,"",python,tab +2419,1744307,"genie.py",9542,0,"",python,selection_mouse +2420,1745167,"genie.py",9541,1,"",python,content +2421,1745797,"genie.py",9615,0,"",python,selection_command +2422,1745995,"genie.py",9689,0,"",python,selection_command +2423,1746233,"genie.py",9765,0,"",python,selection_command +2424,1746324,"genie.py",9779,0,"",python,selection_command +2425,1746691,"genie.py",9780,0,"",python,selection_command +2426,1747064,"genie.py",9858,0,"",python,selection_command +2427,1748637,"genie.py",9779,0,"",python,selection_mouse +2428,1758653,"genie.py",0,0,"",python,tab +2429,1758750,"genie.py",25,0,"",python,selection_command +2430,1819765,"TERMINAL",0,0,"srun",,terminal_focus +2431,1819978,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2432,1821773,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +2433,1821891,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2434,1822422,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2435,1822493,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2436,1822684,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2437,1822787,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2438,1822882,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2439,1822984,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2440,1823056,"TERMINAL",0,0,"\r\n[?2004l\rbash: gir: command not found...\r\n",,terminal_output +2441,1823960,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2442,1825240,"TERMINAL",0,0,"g",,terminal_output +2443,1825326,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2444,1825525,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2445,1825583,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2446,1825863,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2447,1826273,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2448,1826334,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2449,1826534,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2450,1826595,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2451,1827220,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +2452,1827304,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2453,1827754,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2454,1827806,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2455,1828055,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2456,1828178,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2457,1828232,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +2458,1829380,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +2459,1829539,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2460,1829649,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2461,1829802,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2462,1829905,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2463,1830030,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2464,1830206,"TERMINAL",0,0,"]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2465,1831944,"TERMINAL",0,0,"git diff > diff-diff",,terminal_output +2466,1833140,"TERMINAL",0,0,"[?25l-diff[?25h",,terminal_output +2467,1833618,"TERMINAL",0,0,".diff",,terminal_output +2468,1833721,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2469,1837617,"diff.diff",0,0,"",diff,tab +2470,1840155,"diff.log",0,0,"diff --git a/genie.py b/genie.py\nindex 0e66676..8186d03 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -7,7 +7,7 @@ import flax.linen as nn\n from flax.training.train_state import TrainState\n import orbax.checkpoint as ocp\n \n-from models.dynamics import DynamicsMaskGIT\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\n from models.lam import LatentActionModel\n from models.tokenizer import TokenizerVQVAE\n \n@@ -38,6 +38,7 @@ class Genie(nn.Module):\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n+ use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n- self.dynamics = DynamicsMaskGIT(\n- model_dim=self.dyna_dim,\n- num_latents=self.num_patch_latents,\n- num_blocks=self.dyna_num_blocks,\n- num_heads=self.dyna_num_heads,\n- dropout=self.dropout,\n- mask_limit=self.mask_limit,\n- param_dtype=self.param_dtype,\n- dtype=self.dtype,\n- )\n+\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\ndiff --git a/models/dynamics.py b/models/dynamics.py\nindex 8b183dc..76af7a1 100644\n--- a/models/dynamics.py\n+++ b/models/dynamics.py\n@@ -28,6 +28,7 @@ class DynamicsMaskGIT(nn.Module):\n self.dropout,\n self.param_dtype,\n self.dtype,\n+ spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n@@ -58,3 +59,41 @@ class DynamicsMaskGIT(nn.Module):\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n+\n+\n+class DynamicsAutoregressive(nn.Module):\n+ """"""Autoregressive (causal) dynamics model""""""\n+\n+ model_dim: int\n+ num_latents: int\n+ num_blocks: int\n+ num_heads: int\n+ dropout: float\n+ param_dtype: jnp.dtype\n+ dtype: jnp.dtype\n+\n+ def setup(self):\n+ self.dynamics = STTransformer(\n+ self.model_dim,\n+ self.num_latents,\n+ self.num_blocks,\n+ self.num_heads,\n+ self.dropout,\n+ self.param_dtype,\n+ self.dtype,\n+ spacial_bert=False,\n+ )\n+ self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n+ self.action_up = nn.Dense(\n+ self.model_dim,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n+\n+ def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n+ vid_embed = self.patch_embed(batch[""video_tokens""])\n+ act_embed = self.action_up(batch[""latent_actions""])\n+ vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n+ logits = self.dynamics(vid_embed)\n+ mask = jnp.ones(vid_embed.shape[:-1])\n+ return dict(token_logits=logits, mask=mask)\n\ No newline at end of file\ndiff --git a/train_dynamics.py b/train_dynamics.py\ndeleted file mode 100644\nindex a8e6a2a..0000000\n--- a/train_dynamics.py\n+++ /dev/null\n@@ -1,373 +0,0 @@\n-from dataclasses import dataclass, field\n-import os\n-\n-import einops\n-from flax.training.train_state import TrainState\n-from jax.sharding import Mesh, PartitionSpec, NamedSharding\n-from jax.experimental.mesh_utils import create_device_mesh\n-import optax\n-import orbax.checkpoint as ocp\n-import numpy as np\n-import dm_pix as pix\n-import jax\n-import jax.numpy as jnp\n-import tyro\n-import wandb\n-import grain\n-\n-from genie import Genie, restore_genie_components\n-from utils.dataloader import get_dataloader\n-from utils.lr_utils import get_lr_schedule\n-from utils.parameter_utils import count_parameters_by_component\n-\n-@dataclass\n-class Args:\n- # Experiment\n- num_steps: int = 200_000\n- seed: int = 0\n- seq_len: int = 16\n- image_channels: int = 3\n- image_height: int = 90\n- image_width: int = 160\n- data_dir: str = """"\n- save_ckpt: bool = False\n- restore_ckpt: bool = False\n- # Optimization\n- batch_size: int = 36\n- init_lr: float = 0.0\n- max_lr: float = 3e-5\n- decay_end: float = 0.0\n- wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n- warmup_steps: int = 5000\n- lr_schedule : str = ""wsd"" # supported options: wsd, cos\n- # Tokenizer\n- tokenizer_dim: int = 512\n- latent_patch_dim: int = 32\n- num_patch_latents: int = 1024\n- patch_size: int = 4\n- tokenizer_num_blocks: int = 8\n- tokenizer_num_heads: int = 8\n- tokenizer_checkpoint: str = """"\n- # LAM\n- lam_dim: int = 512\n- latent_action_dim: int = 32\n- num_latent_actions: int = 6\n- lam_patch_size: int = 16\n- lam_num_blocks: int = 8\n- lam_num_heads: int = 8\n- lam_checkpoint: str = """"\n- # Dynamics\n- dyna_dim: int = 512\n- dyna_num_blocks: int = 12\n- dyna_num_heads: int = 8\n- dropout: float = 0.0\n- mask_limit: float = 0.5\n- param_dtype: jnp.dtype = jnp.float32\n- dtype: jnp.dtype = jnp.bfloat16\n- # Logging\n- log: bool = False\n- entity: str = """"\n- project: str = """"\n- name: str = ""train_dynamics""\n- tags: list[str] = field(default_factory=lambda: [""dynamics""])\n- log_interval: int = 5\n- log_image_interval: int = 250\n- ckpt_dir: str = """"\n- log_checkpoint_interval: int = 25000\n- log_checkpoint_keep_period: int = 20000\n- log_gradients: bool = False\n- wandb_id: str = """"\n-\n-\n-args = tyro.cli(Args)\n-\n-\n-def dynamics_loss_fn(params, state, inputs):\n- """"""Compute masked dynamics loss""""""\n- inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n- outputs = state.apply_fn(\n- params,\n- inputs,\n- training=True,\n- rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n- )\n- mask = outputs[""mask""]\n- outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n- ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n- outputs[""token_logits""], outputs[""video_tokens""]\n- )\n- ce_loss = (mask * ce_loss).sum() / mask.sum()\n- acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n- acc = (mask * acc).sum() / mask.sum()\n- select_probs = jax.nn.softmax(outputs[""token_logits""])\n- gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n- recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n- psnr = pix.psnr(gt, recon).mean() # type: ignore\n- ssim = pix.ssim(gt, recon).mean() # type: ignore\n- _, index_counts_lam = jnp.unique_counts(\n- jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n- )\n- _, index_counts_tokenizer = jnp.unique_counts(\n- jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n- )\n- codebook_usage_lam = (index_counts_lam != 0).mean()\n- codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n- metrics = dict(\n- cross_entropy_loss=ce_loss,\n- masked_token_accuracy=acc,\n- select_logit=outputs[""token_logits""].max(-1).mean(),\n- select_p=select_probs.max(-1).mean(),\n- entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n- psnr=psnr,\n- ssim=ssim,\n- codebook_usage_lam=codebook_usage_lam,\n- codebook_usage_tokenizer=codebook_usage_tokenizer,\n- )\n- return ce_loss, (outputs[""recon""], metrics)\n-\n-\n-@jax.jit\n-def train_step(state, inputs):\n- """"""Update state and compute metrics""""""\n- grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n- (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n- state = state.apply_gradients(grads=grads)\n- if args.log_gradients:\n- metrics[""gradients_std/""] = jax.tree.map(\n- lambda x: x.std(), grads[""params""][""dynamics""]\n- )\n- return state, loss, recon, metrics\n-\n-\n-if __name__ == ""__main__"":\n- jax.distributed.initialize()\n- num_devices = jax.device_count()\n- if num_devices == 0:\n- raise ValueError(""No JAX devices found."")\n- print(f""Running on {num_devices} devices."")\n-\n- if args.batch_size % num_devices != 0:\n- raise ValueError(\n- f""Global batch size {args.batch_size} must be divisible by ""\n- f""number of devices {num_devices}.""\n- )\n-\n- per_device_batch_size_for_init = args.batch_size // num_devices\n-\n- rng = jax.random.PRNGKey(args.seed)\n-\n- # --- Initialize model ---\n- genie = Genie(\n- # Tokenizer\n- in_dim=args.image_channels,\n- tokenizer_dim=args.tokenizer_dim,\n- latent_patch_dim=args.latent_patch_dim,\n- num_patch_latents=args.num_patch_latents,\n- patch_size=args.patch_size,\n- tokenizer_num_blocks=args.tokenizer_num_blocks,\n- tokenizer_num_heads=args.tokenizer_num_heads,\n- # LAM\n- lam_dim=args.lam_dim,\n- latent_action_dim=args.latent_action_dim,\n- num_latent_actions=args.num_latent_actions,\n- lam_patch_size=args.lam_patch_size,\n- lam_num_blocks=args.lam_num_blocks,\n- lam_num_heads=args.lam_num_heads,\n- lam_co_train=not args.lam_checkpoint,\n- # Dynamics\n- dyna_dim=args.dyna_dim,\n- dyna_num_blocks=args.dyna_num_blocks,\n- dyna_num_heads=args.dyna_num_heads,\n- dropout=args.dropout,\n- mask_limit=args.mask_limit,\n- param_dtype=args.param_dtype,\n- dtype=args.dtype,\n- )\n- rng, _rng = jax.random.split(rng)\n- image_shape = (args.image_height, args.image_width, args.image_channels)\n- dummy_inputs = dict(\n- videos=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len, *image_shape),\n- dtype=args.dtype,\n- ),\n- action=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n- ),\n- mask_rng=_rng,\n- )\n- rng, _rng = jax.random.split(rng)\n- init_params = genie.init(_rng, dummy_inputs)\n-\n- param_counts = count_parameters_by_component(init_params)\n-\n- if args.log and jax.process_index() == 0:\n- wandb_init_kwargs = {\n- ""entity"": args.entity,\n- ""project"": args.project,\n- ""name"": args.name,\n- ""tags"": args.tags,\n- ""group"": ""debug"",\n- ""config"": args,\n- }\n-\n- if args.wandb_id:\n- wandb_init_kwargs.update(\n- {\n- ""id"": args.wandb_id,\n- ""resume"": ""allow"",\n- }\n- )\n- wandb.init(**wandb_init_kwargs)\n-\n- wandb.config.update({""model_param_count"": param_counts})\n-\n- print(""Parameter counts:"")\n- print(param_counts)\n-\n- # --- Initialize optimizer ---\n- lr_schedule = get_lr_schedule(args.lr_schedule, \n- args.init_lr, \n- args.max_lr, \n- args.decay_end, \n- args.num_steps, \n- args.warmup_steps, \n- args.wsd_decay_steps)\n- tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n- train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n-\n- device_mesh_arr = create_device_mesh((num_devices,))\n- mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n-\n- replicated_sharding = NamedSharding(mesh, PartitionSpec())\n- videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n- train_state = jax.device_put(train_state, replicated_sharding)\n-\n- # --- Initialize checkpoint manager ---\n- step = 0\n- handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n-\n- checkpoint_options = ocp.CheckpointManagerOptions(\n- save_interval_steps=args.log_checkpoint_interval,\n- max_to_keep=3,\n- keep_period=args.log_checkpoint_keep_period,\n- step_format_fixed_length=6,\n- cleanup_tmp_directories=True,\n- )\n-\n- checkpoint_manager = ocp.CheckpointManager(\n- args.ckpt_dir,\n- options=checkpoint_options,\n- handler_registry=handler_registry,\n- )\n-\n- # --- Create DataLoaderIterator from dataloader ---\n- array_record_files = [\n- os.path.join(args.data_dir, x)\n- for x in os.listdir(args.data_dir)\n- if x.endswith("".array_record"")\n- ]\n- grain_dataloader = get_dataloader(\n- array_record_files,\n- args.seq_len,\n- # NOTE: We deliberately pass the global batch size\n- # The dataloader shards the dataset across all processes\n- args.batch_size,\n- *image_shape,\n- num_workers=8,\n- prefetch_buffer_size=1,\n- seed=args.seed,\n- )\n- initial_state = grain_dataloader._create_initial_state()\n- grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n-\n- # --- Restore checkpoint ---\n- if args.restore_ckpt:\n- # Restore full dynamics model\n- abstract_train_state = jax.tree_util.tree_map(\n- ocp.utils.to_shape_dtype_struct, train_state\n- )\n- restored = checkpoint_manager.restore(\n- checkpoint_manager.latest_step(),\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardRestore(abstract_train_state),\n- dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n- ),\n- )\n- train_state = restored[""model_state""]\n- grain_iterator = restored[""dataloader_state""]\n- step = checkpoint_manager.latest_step() or 0\n- print(f""Restored dataloader and model state from step {step}"")\n- else:\n- # Restore from pre-trained tokenizer (and LAM)\n- train_state = restore_genie_components(\n- train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n- )\n-\n- # --- TRAIN LOOP ---\n- dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n- while step < args.num_steps:\n- for videos in dataloader:\n- # --- Train step ---\n- rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n-\n- inputs = dict(\n- videos=videos,\n- rng=_rng,\n- dropout_rng=_rng_dropout,\n- mask_rng=_rng_mask,\n- )\n- train_state, loss, recon, metrics = train_step(train_state, inputs)\n- metrics[""lr""] = lr_schedule(step)\n- print(f""Step {step}, loss: {loss}"")\n- step += 1\n-\n- # --- Logging ---\n- if args.log:\n- if step % args.log_interval == 0 and jax.process_index() == 0:\n- wandb.log(\n- {\n- ""loss"": loss,\n- ""step"": step,\n- **metrics,\n- }\n- )\n- if step % args.log_image_interval == 0:\n- gt_seq = inputs[""videos""][0]\n- recon_seq = recon[0].clip(0, 1)\n- comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n- comparison_seq = einops.rearrange(\n- comparison_seq * 255, ""t h w c -> h (t w) c""\n- )\n- if jax.process_index() == 0:\n- log_images = dict(\n- image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n- recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n- true_vs_recon=wandb.Image(\n- np.asarray(comparison_seq.astype(np.uint8))\n- ),\n- )\n- wandb.log(log_images)\n- # --- Checkpointing ---\n- if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n- checkpoint_manager.save(\n- step,\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardSave(train_state),\n- dataloader_state=grain.checkpoint.CheckpointSave(\n- grain_iterator\n- ),\n- ),\n- )\n- print(f""Saved checkpoint at step {step}"")\n- if step >= args.num_steps:\n- break\n-\n- checkpoint_manager.close()\ndiff --git a/utils/nn.py b/utils/nn.py\nindex b7bec9f..3b64fa0 100644\n--- a/utils/nn.py\n+++ b/utils/nn.py\n@@ -26,6 +26,112 @@ class PositionalEncoding(nn.Module):\n x = x + self.pe[: x.shape[2]]\n return x\n \n+# class STBlock2(nn.Module):\n+ # dim: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.remat\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # --- Spatial attention ---\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+\n+ # # --- Temporal attention ---\n+ # x = x.swapaxes(1, 2)\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+ # x = x.swapaxes(1, 2)\n+\n+ # # --- Feedforward ---\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n+ # z = nn.Dense(\n+ # self.dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # z = nn.gelu(z)\n+ # x = x + z\n+\n+ # return x\n+\n+# class CausalTransformer(nn.Module):\n+ # model_dim: int\n+ # out_dim: int\n+ # num_blocks: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # Input projection and normalization\n+ # x = nn.Sequential(\n+ # [\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.Dense(self.model_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # ]\n+ # )(x)\n+ # # Causal transformer blocks\n+ # for _ in range(self.num_blocks):\n+ # x = STBlock2(\n+ # dim=self.model_dim,\n+ # num_heads=self.num_heads,\n+ # dropout=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+\n+ # # Output projection\n+ # x = nn.Dense(\n+ # self.out_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # return x # (B, T, E)\n+\n \n class STBlock(nn.Module):\n dim: int\n@@ -33,6 +139,7 @@ class STBlock(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.remat\n @nn.compact\n@@ -43,13 +150,14 @@ class STBlock(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n+ spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n- )(z)\n+ )(z, mask=spacial_mask)\n x = x + z\n \n # --- Temporal attention ---\n@@ -95,6 +203,7 @@ class STTransformer(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n@@ -121,6 +230,7 @@ class STTransformer(nn.Module):\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n+ spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n",log,tab +2471,1845728,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +2472,1845890,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +2473,1846110,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +2474,1846265,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2475,1846412,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2476,1846470,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2477,1846713,"TERMINAL",0,0,"[?25lu[?25h[?25ls[?25h",,terminal_output +2478,1846874,"TERMINAL",0,0,"\r\n[?2004l\rOn branch maskgit-sampling-iterative-unmasking-fix\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tmodified: genie.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tgenie_fixed_maskgit.py\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3373409.out\r\n\tslurm-3373410.out\r\n\tslurm-3379613.out\r\n\tslurm-3379615.out\r\n\tslurm-3379616.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/nn_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2479,1848407,"TERMINAL",0,0,"q",,terminal_output +2480,1849767,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +2481,1850318,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2482,1850356,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2483,1850590,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2484,1850748,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +2485,1850932,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +2486,1851097,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +2487,1852363,"TERMINAL",0,0,"",,terminal_output +2488,1854136,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +2489,1854386,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2490,1855264,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2491,1855362,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2492,1855450,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2493,1855692,"TERMINAL",0,0,"[?25lf[?25h\r\n[?2004l\r[?1h=\r\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2494,1859977,"TERMINAL",0,0,"git restore --staged",,terminal_output +2495,1860416,"TERMINAL",0,0,"git restore --staged ",,terminal_output +2496,1861539,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +2497,1861621,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2498,1861776,"TERMINAL",0,0,"n",,terminal_output +2499,1862752,"TERMINAL",0,0,"[?25li[?25he",,terminal_output +2500,1863556,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2501,1863612,"TERMINAL",0,0,"py ",,terminal_output +2502,1864099,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2503,1866437,"TERMINAL",0,0,"g",,terminal_output +2504,1866699,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2505,1866954,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2506,1867036,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2507,1867218,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2508,1867300,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2509,1867493,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2510,1867668,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2511,1867760,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2512,1867969,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +2513,1868218,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +2514,1868867,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +2515,1869084,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2516,1870363,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2517,1870460,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2518,1870585,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2519,1870810,"TERMINAL",0,0,"[?25lf[?25h[?25l-[?25h",,terminal_output +2520,1871993,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2521,1872486,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2522,1872549,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2523,1872733,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2524,1872834,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +2525,1872890,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2526,1873002,"TERMINAL",0,0,"]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2527,1874124,"diff.diff",0,0,"",diff,tab +2528,1875681,"diff.diff",0,32,"diff --git a/genie.py b/genie.py",diff,selection_command +2529,1875918,"diff.diff",0,2102,"diff --git a/genie.py b/genie.py\nindex 4c86c01..565ea31 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -1,6 +1,6 @@\n from typing import Dict\n \n-import optax\n+import einops\n import jax\n import jax.numpy as jnp\n import flax.nnx as nnx\n@@ -179,6 +179,7 @@ class Genie(nnx.Module):\n H: height\n W: width\n E: B * (S - 1)\n+ F: S * N\n """"""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n@@ -230,10 +231,13 @@ class Genie(nnx.Module):\n \n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n- idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) > num_unmasked_tokens\n- sorted_idxs_BSN = jnp.argsort(final_token_probs_BSN, axis=-1, descending=True)\n+ idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) <= N - num_unmasked_tokens\n+ final_token_probs_flat_BF = einops.rearrange(final_token_probs_BSN, ""b s n -> b (s n)"")\n+ sorted_idxs_BF = jnp.argsort(final_token_probs_flat_BF, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_N))\n- new_mask_BSN = mask_update_fn(mask_BSN, sorted_idxs_BSN)\n+ mask_flat_BF = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n+ new_mask_flat_BF = mask_update_fn(mask_flat_BF, sorted_idxs_BF)\n+ new_mask_BSN = einops.rearrange(new_mask_flat_BF, ""b (s n) -> b s n"", n=N)\n \n new_carry = (rng, token_idxs_BSN, new_mask_BSN, action_tokens_EL)\n return new_carry, None\n@@ -244,8 +248,8 @@ class Genie(nnx.Module):\n rng, current_token_idxs_BSN = carry\n rng, step_rng = jax.random.split(rng)\n \n- # Mask current and future frames (i.e., t >= step_t)\n- mask_S = jnp.arange(seq_len) >= step_t\n+ # Mask current frame (i.e., t == step_t)\n+ mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n",diff,selection_command +2530,1934225,"genie.py",0,0,"",python,tab +2531,1942482,"genie.py",0,0,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n latent_actions_E = batch[""latent_actions""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_BSN, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # --- Construct + encode video ---\n vid_embed_BSNM = self.dynamics.patch_embed(token_idxs_BSN)\n mask_token_111M = self.dynamics.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = self.dynamics.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1]))\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = self.dynamics.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask_N = jnp.arange(final_token_probs_BSN.shape[-1]) > num_unmasked_tokens\n sorted_idxs_BSN = jnp.argsort(final_token_probs_BSN, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_N))\n new_mask_BSN = mask_update_fn(mask_BSN, sorted_idxs_BSN)\n\n new_carry = (rng, token_idxs_BSN, new_mask_BSN, action_tokens_EL)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs_BSN = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask_S = jnp.arange(seq_len) >= step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output = self.lam.vq_encode(video_BTHWC, training=training)\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n \n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +2532,1943884,"genie.py",0,0,"",python,tab +2533,1945208,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2534,1946248,"TERMINAL",0,0,"g",,terminal_output +2535,1946358,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2536,1946488,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2537,1946652,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2538,1946845,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2539,1947010,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2540,1947170,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +2541,1947266,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2542,1947711,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +2543,1947805,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2544,1947905,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2545,1947997,"TERMINAL",0,0,"ie",,terminal_output +2546,1948882,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2547,1949050,"TERMINAL",0,0,"py ",,terminal_output +2548,1949612,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2549,2009372,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +2550,2009457,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2551,2009512,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2552,2010114,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2553,2010197,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2554,2010427,"TERMINAL",0,0,"[?25la[?25h[?25ln[?25h",,terminal_output +2555,2010541,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2556,2010621,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2557,2010792,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n before-nnx\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n* maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2558,2011715,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +2559,2011778,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2560,2011928,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2561,2012013,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2562,2012419,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +2563,2012632,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2564,2012686,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +2565,2012746,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2566,2012994,"TERMINAL",0,0,"\r\n[?2004l\rfatal: The current branch maskgit-sampling-iterative-unmasking-fix has no upstream branch.\r\nTo push the current branch and set the remote as upstream, use\r\n\r\n git push --set-upstream origin maskgit-sampling-iterative-unmasking-fix\r\n\r\nTo have this happen automatically for branches without a tracking\r\nupstream, see 'push.autoSetupRemote' in 'git help config'.\r\n\r\n]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2567,2015834,"TERMINAL",0,0,"git push --set-upstream origin maskgit-sampling-iterative-unmasking-fix",,terminal_output +2568,2016016,"TERMINAL",0,0,"\rgit push --set-upstream origin maskgit-sampling-iterative-unmasking-fix\r\n[?2004l\r",,terminal_output +2569,2017366,"TERMINAL",0,0,"Enumerating objects: 5, done.\r\nCounting objects: 20% (1/5)\rCounting objects: 40% (2/5)\rCounting objects: 60% (3/5)\rCounting objects: 80% (4/5)\rCounting objects: 100% (5/5)\rCounting objects: 100% (5/5), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 33% (1/3)\rCompressing objects: 66% (2/3)\rCompressing objects: 100% (3/3)\rCompressing objects: 100% (3/3), done.\r\nWriting objects: 33% (1/3)\rWriting objects: 66% (2/3)\rWriting objects: 100% (3/3)\rWriting objects: 100% (3/3), 799 bytes | 799.00 KiB/s, done.\r\nTotal 3 (delta 2), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +2570,2017444,"TERMINAL",0,0,"remote: This repository moved. Please use the new location:\r\nremote: git@github.com:p-doom/jasmine.git\r\n",,terminal_output +2571,2017656,"TERMINAL",0,0,"remote: \r\nremote: Create a pull request for 'maskgit-sampling-iterative-unmasking-fix' on GitHub by visiting:\r\nremote: https://github.com/p-doom/jasmine/pull/new/maskgit-sampling-iterative-unmasking-fix\r\nremote: \r\nTo github.com:p-doom/jafar.git\r\n * [new branch] maskgit-sampling-iterative-unmasking-fix -> maskgit-sampling-iterative-unmasking-fix\r\nbranch 'maskgit-sampling-iterative-unmasking-fix' set up to track 'origin/maskgit-sampling-iterative-unmasking-fix'.\r\n",,terminal_output +2572,2017708,"TERMINAL",0,0,"]0;tum_cte0515@hkn0802:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0802 jafar]$ ",,terminal_output +2573,2112200,"TERMINAL",0,0,"[?25lru[?25h",,terminal_output +2574,2112266,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2575,2112328,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2576,2112529,"TERMINAL",0,0,"[?25ln[?25h[?25le[?25h",,terminal_output +2577,2112595,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2578,2113330,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ ",,terminal_output +2579,2114974,"TERMINAL",0,0,"[?25lvi[?25h",,terminal_output +2580,2115061,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2581,2115148,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +2582,2115204,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2583,2116436,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +2584,2116789,"TERMINAL",0,0,"[?25lv': git push --set-upstream origin maskgit-sampling-iterative-unmasking-fix[?25h",,terminal_output +2585,2116918,"TERMINAL",0,0,"[?25lev\ri': vim slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch \r[?25h[1@m': vim",,terminal_output +2586,2118315,"TERMINAL",0,0,"\r[17@jafar) [tum_cte0515@hkn0802 jafar_jobs]$ vim\r\n[?2004l\r[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch"" 72L, 2049B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=2#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=12:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big_run/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n # optional: trigger checkpoint saving here\r\n # e.g., touch $checkpoint_dir/requeue_trigger\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}\r\n\r\ntrap requeue_job sigusr1\r\n\r\n# set checkpoint flag based on restart count\r\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\r\n\r\nif [ $restart_count -eq 0 ]; then\r\n restore_ckpt_flag=""--no-restore-ckpt""\r\nelse\r\n restore_ckpt_flag=""--restore-ckpt""\r\nfi\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.05,17Top[?25hP+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +2587,2118851,"TERMINAL",0,0,"[?25lj 6[?25h",,terminal_output +2588,2119362,"TERMINAL",0,0,"[?25lj 7[?25h[?25l8[?25h[?25lj 9[?25h",,terminal_output +2589,2120164,"TERMINAL",0,0,"[?25lj 10,17[?25h[?25lj 1[?25h[?25lj 2[?25h[?25lj 3[?25h[?25lj 4,0-1[?25h[?25lj 5,17 [?25h[?25lj {}6,15[?25h[?25lj {}7,17[?25h[?25lj 8[?25h[?25lj 9[?25h[?25lj 20[?25h[?25lj 1,10[?25h[?25lj {}2,1 [?25h[?25lj {}3,0-1[?25h[?25lj 4,17 [?25h[?25lj 5,0-1[?25h[?25l6,17 [?25h[?25lj 7[?25h[?25lj 8,0-1[?25h[?25lj 9,17 [?25h[?25lj 30[?25h[?25lj 1,4 [?25h[?25lj 2,17[?25h[?25lj 3,2 [?25h[?25lj \r\nmodule unload devel/cuda/12.434,0-12%[?25h[?25lj \r\nsource .venv/bin/activate35,175%[?25h",,terminal_output +2590,2120490,"TERMINAL",0,0,"[?25l\r\n36,68%[?25h[?25lj \r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked37,0-111%[?25h[?25lj \r\n38,1714%[?25h[?25lj \r\njob_name=$SLURM_JOB_NAME39,1717%[?25h[?25lj \r\nslurm_job_id=$SLURM_JOB_ID40,1720%[?25h[?25lj \r\n41,0-123%[?25h[?25lj CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_jobb_id42,1728%[?25h[?25lj 3,0-1[?25h[?25lj \r\nmkdir -p $CHECKPOINT_DIR44,1731%[?25h[?25l\r\n45,1734%[?25h[?25lj \r\nenv | grep SLURM46,0-137%[?25h",,terminal_output +2591,2120924,"TERMINAL",0,0,"[?25lj srun python train_tokenizer.py \47,1742%[?25h[?25lj \r\n--save_ckpt \48,1745%[?25h[?25lj \r\n$restore_ckpt_flag \49,0-148%[?25h[?25lj \r\n--wandb_id $SLURM_JOB_ID \50,1651%[?25h[?25lj \r\n--ckpt_dir $CHECKPOINT_DIR \51,0-154%[?25h[?25lj \r\n--batch_size=96 \52,1757%[?25h[?25lj \r\n--init_lr=0 \53,1760%[?25h[?25lj \r\n--max_lr=1e-4 \54,1762%[?25h[?25lj \r\n--log_image_interval=1000 \55,1765%[?25h[?25lj \r\n--log_checkpoint_interval=1000 \56,1768%[?25h[?25lj \r\n--log \57,1771%[?25h[?25lj \r\n--name=tokenizer-1e-4-$slurm_job_id \58,1774%[?25h[?25lj \r\n--tags tokenizer big-run 1e-4 \59,1777%[?25h[?25lj \r\n--entity instant-uv \60,1780%[?25h",,terminal_output +2592,2121083,"TERMINAL",0,0,"[?25lj \r\n--project jafar \61,1782%[?25h",,terminal_output +2593,2121494,"TERMINAL",0,0,"[?25lj \r\n--data_dir $array_records_dir &62,1185%[?25h",,terminal_output +2594,2121804,"TERMINAL",0,0,"[?25lk 1,17[?25h",,terminal_output +2595,2121986,"TERMINAL",0,0,"[?25lk 0[?25h",,terminal_output +2596,2122541,"TERMINAL",0,0,"[?25lk 59[?25h",,terminal_output +2597,2122668,"TERMINAL",0,0,"[?25lk 8[?25h",,terminal_output +2598,2122875,"TERMINAL",0,0,"[?25lk 7[?25h",,terminal_output +2599,2122999,"TERMINAL",0,0,"[?25lk 6[?25h",,terminal_output +2600,2123068,"TERMINAL",0,0,"[?25lj 7[?25h",,terminal_output +2601,2123644,"TERMINAL",0,0,"[?25l8[?25h[?25lj 9[?25h[?25lj 60[?25h[?25lj 1[?25h",,terminal_output +2602,2124198,"TERMINAL",0,0,"[?25lj 2,11[?25h[?25lj \r\n63,1788%[?25h[?25lj \r\nchild_pid=$!64,1791%[?25h[?25lj \r\n65,1794%[?25h[?25lj \r\nwait $child_pid66,1797%[?25h[?25lj \r\n67,17Bot[?25h[?25lj 8,0-1[?25h[?25lj 9,12 [?25h[?25lj 70,0-1[?25h[?25lj 1,15 [?25h[?25lj 2,0-1[?25h[?25lj [?25h[?25lj [?25h[?25lj [?25h[?25lj [?25h[?25lj [?25h[?25lj [?25h[?25lj [?25h",,terminal_output +2603,2124475,"TERMINAL",0,0,"[?25lk 1,15 [?25h",,terminal_output +2604,2124758,"TERMINAL",0,0,"[?25lk 0,0-1[?25h",,terminal_output +2605,2124853,"TERMINAL",0,0,"[?25lk 69,12 [?25h",,terminal_output +2606,2125524,"TERMINAL",0,0,"[?25l8,0-1[?25h[?25lk 7,17 [?25h[?25lk 6[?25h[?25lk 5[?25h[?25lk 4[?25h[?25lk 3[?25h[?25lk 2,11[?25h",,terminal_output +2607,2125678,"TERMINAL",0,0,"[?25lk 1,17[?25h",,terminal_output +2608,2125884,"TERMINAL",0,0,"[?25lk 0[?25h",,terminal_output +2609,2126076,"TERMINAL",0,0,"[?25lk 59[?25h",,terminal_output +2610,2126201,"TERMINAL",0,0,"[?25lk 8[?25h",,terminal_output +2611,2126546,"TERMINAL",0,0,"[?25lk 7[?25h",,terminal_output +2612,2126673,"TERMINAL",0,0,"[?25lk 6[?25h",,terminal_output +2613,2127825,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +2614,2128254,"TERMINAL",0,0,"q",,terminal_output +2615,2128572,"TERMINAL",0,0,"!",,terminal_output +2616,2128951,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn0802:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0802 jafar_jobs]$ ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-25569aaa-6e77-4ce2-b9b6-8ae8c33420051753180192494-2025_07_22-12.30.11.399/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-25569aaa-6e77-4ce2-b9b6-8ae8c33420051753180192494-2025_07_22-12.30.11.399/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..5fd5140ddf9c20b632c372bfcc3208d7c149b2e0 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-25569aaa-6e77-4ce2-b9b6-8ae8c33420051753180192494-2025_07_22-12.30.11.399/source.csv @@ -0,0 +1,16 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,5,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\nsrun python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=16 \\n --start_frame=10 \\n --data_dir $array_records_dir\n\n# srun python sample.py \\n # --checkpoint $dynamics_ckpt_dir \\n # --start_frame=0 \\n # --batch_size=12 \\n # --seq_len=2 \\n # --data_dir $array_records_dir\n",shellscript,tab +2,446,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:30:11 PM [info] Activating crowd-code\n12:30:11 PM [info] Recording started\n12:30:11 PM [info] Initializing git provider using file system watchers...\n12:30:11 PM [info] Git repository found\n12:30:11 PM [info] Git provider initialized successfully\n12:30:11 PM [info] Initial git state: [object Object]\n",Log,tab +3,1898,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +4,8030,"TERMINAL",0,0,"cd checkpoints/big-runs/",,terminal_command +5,8072,"TERMINAL",0,0,"]633;E;2025-07-22 12:30:19 cd checkpoints/big-runs/;5fcdc89e-3b1c-4d05-a6d2-6f1ce0ba6ffb]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs]633;D;0",,terminal_output +6,10557,"TERMINAL",0,0,"cd tokenizer-lr-scaling/",,terminal_command +7,12047,"TERMINAL",0,0,"ls",,terminal_command +8,12130,"TERMINAL",0,0,"]633;E;2025-07-22 12:30:23 ls;5fcdc89e-3b1c-4d05-a6d2-6f1ce0ba6ffb]633;Ctrain_tokenizer_lr_sweep_1e-4 train_tokenizer_lr_sweep_5e-5\r\ntrain_tokenizer_lr_sweep_1e-4_8nodes train_tokenizer_lr_sweep_5e-5_8nodes\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling]633;D;0",,terminal_output +9,16772,"TERMINAL",0,0,"cd train_tokenizer_lr_sweep_1e-4",,terminal_command +10,16797,"TERMINAL",0,0,"]633;E;2025-07-22 12:30:28 cd train_tokenizer_lr_sweep_1e-4;5fcdc89e-3b1c-4d05-a6d2-6f1ce0ba6ffb]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4]633;D;0",,terminal_output +11,17848,"TERMINAL",0,0,"ls",,terminal_command +12,17849,"TERMINAL",0,0,"]633;E;2025-07-22 12:30:28 ls;5fcdc89e-3b1c-4d05-a6d2-6f1ce0ba6ffb]633;C",,terminal_output +13,17862,"TERMINAL",0,0,"020000 060000 100000 140000 145000 146000.zip\r\n040000 080000 120000 144000 146000\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4]633;D;0",,terminal_output +14,21855,"TERMINAL",0,0,"cursor .",,terminal_command +15,21897,"TERMINAL",0,0,"]633;E;2025-07-22 12:30:33 cursor .;5fcdc89e-3b1c-4d05-a6d2-6f1ce0ba6ffb]633;C",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2d6141f6-e173-4058-869e-6db42349a8771759955838997-2025_10_08-22.37.25.627/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2d6141f6-e173-4058-869e-6db42349a8771759955838997-2025_10_08-22.37.25.627/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..d83566945ccf6e472c13d895f461dec5a50503b3 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2d6141f6-e173-4058-869e-6db42349a8771759955838997-2025_10_08-22.37.25.627/source.csv @@ -0,0 +1,6533 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default_500k.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_60x80_500k\n#SBATCH --exclude=hkn0735\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_gpu_60x80/3547697\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --num_actions=20 \\n --num_steps=500_000 \\n --wsd_decay_steps=50_000 \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-60x80-500k-$slurm_job_id \\n --image_height=60 \\n --image_width=80 \\n --tags doom dynamics maskgit default 60x80 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2,315,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:37:25 PM [info] Activating crowd-code\n10:37:25 PM [info] Recording started\n10:37:25 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,510,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:37:25 PM [info] Git repository found\n10:37:25 PM [info] Git provider initialized successfully\n10:37:25 PM [info] Initial git state: [object Object]\n",Log,content +4,1985,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default_500k.sh",0,0,"",shellscript,tab +5,55380,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=sample_dynamics_doom_60x80\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80/3551397\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=8 \\n --start_frame=4 \\n --image_height=60 \\n --image_width=80 \\n --num_actions=18 \\n --patch_size=16 \\n --output_dir=gifs/doom/action-prepend-branch/60x80/test/ $@",shellscript,tab +6,57906,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",749,0,"",shellscript,selection_mouse +7,59117,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",749,108,"",shellscript,content +8,59120,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",748,0,"",shellscript,selection_command +9,59485,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",749,0,"",shellscript,selection_command +10,60214,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",749,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run",shellscript,content +11,60223,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1040,0,"",shellscript,selection_mouse +12,61875,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1039,0,"",shellscript,selection_command +13,67524,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",554,0,"",shellscript,selection_mouse +14,69626,"TERMINAL",0,0,"",,terminal_focus +15,72686,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate",,terminal_command +16,77958,"TERMINAL",0,0,"deactivate",,terminal_command +17,79438,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +18,79491,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3554157\r\n",,terminal_output +19,79666,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +20,106654,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +21,107453,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +22,108730,"TERMINAL",0,0,"s",,terminal_output +23,108845,"TERMINAL",0,0,"o",,terminal_output +24,109028,"TERMINAL",0,0,"ur",,terminal_output +25,109210,"TERMINAL",0,0,"c",,terminal_output +26,109347,"TERMINAL",0,0,"e",,terminal_output +27,109483,"TERMINAL",0,0," ",,terminal_output +28,109605,"TERMINAL",0,0,".",,terminal_output +29,110651,"TERMINAL",0,0,"v",,terminal_output +30,110828,"TERMINAL",0,0,"e",,terminal_output +31,111041,"TERMINAL",0,0,"nv/",,terminal_output +32,111265,"TERMINAL",0,0,"b",,terminal_output +33,111466,"TERMINAL",0,0,"in/",,terminal_output +34,112072,"TERMINAL",0,0,"activate",,terminal_output +35,112653,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +36,114461,"TERMINAL",0,0,"s",,terminal_output +37,114644,"TERMINAL",0,0,"h ",,terminal_output +38,115628,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",,terminal_output +39,116250,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh ",,terminal_output +40,117084,"TERMINAL",0,0,"-",,terminal_output +41,117191,"TERMINAL",0,0,"-",,terminal_output +42,117324,"TERMINAL",0,0,"s",,terminal_output +43,117562,"TERMINAL",0,0,"e",,terminal_output +44,117671,"TERMINAL",0,0,"e",,terminal_output +45,117802,"TERMINAL",0,0,"d",,terminal_output +46,118031,"TERMINAL",0,0," ",,terminal_output +47,118231,"TERMINAL",0,0,"=",,terminal_output +48,118328,"TERMINAL",0,0," ",,terminal_output +49,118494,"TERMINAL",0,0,"0",,terminal_output +50,118715,"TERMINAL",0,0,"",,terminal_output +51,118914,"TERMINAL",0,0,"",,terminal_output +52,119025,"TERMINAL",0,0,"",,terminal_output +53,119183,"TERMINAL",0,0,"",,terminal_output +54,119514,"TERMINAL",0,0,"=",,terminal_output +55,119714,"TERMINAL",0,0,"0",,terminal_output +56,121498,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"",shellscript,tab +57,122885,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1235,0,"",shellscript,selection_mouse +58,124305,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1225,0,"",shellscript,selection_mouse +59,125235,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1231,0,"",shellscript,selection_mouse +60,126719,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1231,0,"5",shellscript,content +61,126721,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1232,0,"",shellscript,selection_keyboard +62,126811,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1232,0,"0",shellscript,content +63,126812,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1233,0,"",shellscript,selection_keyboard +64,126951,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1233,0,"0",shellscript,content +65,126954,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1234,0,"",shellscript,selection_keyboard +66,128097,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1234,0,"k",shellscript,content +67,128099,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1235,0,"",shellscript,selection_keyboard +68,128422,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1235,0,"-",shellscript,content +69,128423,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",1236,0,"",shellscript,selection_keyboard +70,131721,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +71,131892,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run\r\n",,terminal_output +72,132029,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +73,149648,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +74,194514,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.8223411 0.7938965 0.783105 0.7727617 0.73659605 0.7177268\r\n 0.70336735 0.6322166 0.6711414 0.6476595 0.6301328 0.6190219 ]\r\nPer-frame PSNR:\r\n [31.862307 30.408188 29.973576 29.497492 28.283073 27.945354 28.082792\r\n 25.033325 26.47439 24.897293 25.41381 25.012306]\r\nSSIM: 0.710830569267273\r\nPSNR: 27.740325927734375\r\n",,terminal_output +75,200193,"TERMINAL",0,0,"W1008 22:40:45.716021 1279433 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +76,200991,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +77,262178,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh --seed=0",,terminal_output +78,263633,"TERMINAL",0,0,"",,terminal_output +79,263870,"TERMINAL",0,0,"1",,terminal_output +80,263960,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +81,264099,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run\r\n",,terminal_output +82,264183,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +83,273653,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +84,317302,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.8105514 0.777755 0.7460115 0.78165984 0.78135467 0.764021\r\n 0.74124724 0.7448625 0.7440955 0.75268126 0.72584796 0.7205828 ]\r\nPer-frame PSNR:\r\n [30.68988 29.873669 28.772396 29.946163 29.971062 29.04021 28.249447\r\n 28.21004 27.875742 27.709225 27.671282 27.532646]\r\nSSIM: 0.7575559020042419\r\nPSNR: 28.795146942138672\r\n",,terminal_output +85,318019,"TERMINAL",0,0,"W1008 22:42:43.251227 1280877 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +86,318515,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +87,421403,"TERMINAL",0,0,"s",,terminal_output +88,421535,"TERMINAL",0,0,"c",,terminal_output +89,421731,"TERMINAL",0,0,"a",,terminal_output +90,421828,"TERMINAL",0,0,"n",,terminal_output +91,421933,"TERMINAL",0,0,"c",,terminal_output +92,422042,"TERMINAL",0,0,"e",,terminal_output +93,422178,"TERMINAL",0,0,"l",,terminal_output +94,422275,"TERMINAL",0,0," ",,terminal_output +95,422551,"TERMINAL",0,0,"3547802",,terminal_output +96,425982,"TERMINAL",0,0,"3547802\r\n[?2004l\r",,terminal_output +97,426313,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +98,447305,"TERMINAL",0,0,"q",,terminal_output +99,447401,"TERMINAL",0,0,"u",,terminal_output +100,447523,"TERMINAL",0,0,"e",,terminal_output +101,447641,"TERMINAL",0,0,"ue",,terminal_output +102,448231,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Wed Oct 8 22:44:53 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3547802 accelerat tokenize tum_cte0 CG 1-23:42:39\t 1 hkn06353549097 accelerat tokenize tum_cte0 R 1-12:01:40\t 1 hkn06323554157 dev_accel interact tum_cte0 R\t6:09\t 1 hkn0401",,terminal_output +103,449175,"TERMINAL",0,0,"4110",,terminal_output +104,450206,"TERMINAL",0,0,"521",,terminal_output +105,451263,"TERMINAL",0,0,"632",,terminal_output +106,452219,"TERMINAL",0,0,"743",,terminal_output +107,453277,"TERMINAL",0,0,"854",,terminal_output +108,454252,"TERMINAL",0,0,"965",,terminal_output +109,455272,"TERMINAL",0,0,"5:0076",,terminal_output +110,456283,"TERMINAL",0,0,"187",,terminal_output +111,457433,"TERMINAL",0,0,"298",,terminal_output +112,458163,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +113,459087,"TERMINAL",0,0,"bash",,terminal_focus +114,464768,"TERMINAL",0,0,"undefined[tum_cte0515@hkn1993 jasmine]$ watch branch",,terminal_command +115,464851,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: branchhkn1993.localdomain: Wed Oct 8 22:45:10 2025sh: line 1: branch: command not found",,terminal_output +116,465903,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +117,470441,"TERMINAL",0,0,"branch",,terminal_command +118,470463,"TERMINAL",0,0,"]633;Cprepend-action-maskgit\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +119,475736,"TERMINAL",0,0,"watch -n1 ""branch""",,terminal_command +120,475801,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: branchhkn1993.localdomain: Wed Oct 8 22:45:21 2025sh: line 1: branch: command not found",,terminal_output +121,476855,"TERMINAL",0,0,"2",,terminal_output +122,477022,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +123,485571,"TERMINAL",0,0,"alias | branch",,terminal_command +124,488268,"TERMINAL",0,0,"alias | grep branch",,terminal_command +125,496895,"TERMINAL",0,0,"watch -n1 git rev-parse --abbrev-ref HEAD",,terminal_command +126,496971,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: git rev-parse --abbrev-ref HEADhkn1993.localdomain: Wed Oct 8 22:45:42 2025prepend-action-maskgit",,terminal_output +127,497994,"TERMINAL",0,0,"3",,terminal_output +128,499015,"TERMINAL",0,0,"4",,terminal_output +129,499971,"TERMINAL",0,0,"srun",,terminal_focus +130,500050,"TERMINAL",0,0,"5",,terminal_output +131,501055,"TERMINAL",0,0,"6",,terminal_output +132,502078,"TERMINAL",0,0,"7",,terminal_output +133,503064,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"",shellscript,tab +134,503212,"TERMINAL",0,0,"8",,terminal_output +135,504157,"TERMINAL",0,0,"9",,terminal_output +136,505192,"TERMINAL",0,0,"50",,terminal_output +137,506190,"TERMINAL",0,0,"1",,terminal_output +138,507184,"TERMINAL",0,0,"2",,terminal_output +139,508258,"TERMINAL",0,0,"3",,terminal_output +140,509269,"TERMINAL",0,0,"4",,terminal_output +141,510268,"TERMINAL",0,0,"5",,terminal_output +142,511266,"TERMINAL",0,0,"6",,terminal_output +143,512303,"TERMINAL",0,0,"7",,terminal_output +144,513348,"TERMINAL",0,0,"8",,terminal_output +145,514368,"TERMINAL",0,0,"9",,terminal_output +146,515404,"TERMINAL",0,0,"6:00",,terminal_output +147,516432,"TERMINAL",0,0,"1",,terminal_output +148,517436,"TERMINAL",0,0,"2",,terminal_output +149,518566,"TERMINAL",0,0,"3",,terminal_output +150,519474,"TERMINAL",0,0,"4",,terminal_output +151,520507,"TERMINAL",0,0,"6",,terminal_output +152,521524,"TERMINAL",0,0,"7",,terminal_output +153,522550,"TERMINAL",0,0,"8",,terminal_output +154,523561,"TERMINAL",0,0,"9",,terminal_output +155,524586,"TERMINAL",0,0,"10",,terminal_output +156,525634,"TERMINAL",0,0,"1",,terminal_output +157,526630,"TERMINAL",0,0,"2",,terminal_output +158,527667,"TERMINAL",0,0,"3",,terminal_output +159,528677,"TERMINAL",0,0,"4",,terminal_output +160,529695,"TERMINAL",0,0,"5",,terminal_output +161,530718,"TERMINAL",0,0,"6",,terminal_output +162,531791,"TERMINAL",0,0,"7",,terminal_output +163,532760,"TERMINAL",0,0,"8",,terminal_output +164,533778,"TERMINAL",0,0,"9",,terminal_output +165,534802,"TERMINAL",0,0,"20",,terminal_output +166,535820,"TERMINAL",0,0,"1",,terminal_output +167,536872,"TERMINAL",0,0,"2",,terminal_output +168,537869,"TERMINAL",0,0,"3",,terminal_output +169,538882,"TERMINAL",0,0,"4",,terminal_output +170,540043,"TERMINAL",0,0,"5",,terminal_output +171,541020,"TERMINAL",0,0,"6",,terminal_output +172,541968,"TERMINAL",0,0,"7",,terminal_output +173,543053,"TERMINAL",0,0,"8",,terminal_output +174,544039,"TERMINAL",0,0,"9",,terminal_output +175,545123,"TERMINAL",0,0,"30",,terminal_output +176,546062,"TERMINAL",0,0,"1",,terminal_output +177,547063,"TERMINAL",0,0,"2",,terminal_output +178,548086,"TERMINAL",0,0,"3",,terminal_output +179,549196,"TERMINAL",0,0,"4",,terminal_output +180,550205,"TERMINAL",0,0,"5",,terminal_output +181,551175,"TERMINAL",0,0,"6",,terminal_output +182,552277,"TERMINAL",0,0,"7",,terminal_output +183,553270,"TERMINAL",0,0,"8",,terminal_output +184,554436,"TERMINAL",0,0,"9",,terminal_output +185,555327,"TERMINAL",0,0,"40",,terminal_output +186,556359,"TERMINAL",0,0,"1",,terminal_output +187,557518,"TERMINAL",0,0,"2",,terminal_output +188,558354,"TERMINAL",0,0,"3",,terminal_output +189,559360,"TERMINAL",0,0,"4",,terminal_output +190,560372,"TERMINAL",0,0,"5",,terminal_output +191,561390,"TERMINAL",0,0,"6",,terminal_output +192,561648,"TERMINAL",0,0,"watch",,terminal_focus +193,562357,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",493,0,"",shellscript,selection_mouse +194,562435,"TERMINAL",0,0,"7",,terminal_output +195,563437,"TERMINAL",0,0,"8",,terminal_output +196,564460,"TERMINAL",0,0,"9",,terminal_output +197,565480,"TERMINAL",0,0,"50",,terminal_output +198,566796,"TERMINAL",0,0,"2",,terminal_output +199,567705,"TERMINAL",0,0,"3",,terminal_output +200,568818,"TERMINAL",0,0,"4",,terminal_output +201,569636,"TERMINAL",0,0,"5",,terminal_output +202,570650,"TERMINAL",0,0,"6",,terminal_output +203,571671,"TERMINAL",0,0,"7",,terminal_output +204,572711,"TERMINAL",0,0,"8",,terminal_output +205,573118,"TERMINAL",0,0,"srun",,terminal_focus +206,573837,"TERMINAL",0,0,"9",,terminal_output +207,574752,"TERMINAL",0,0,"g",,terminal_output +208,574761,"TERMINAL",0,0,"7:00",,terminal_output +209,574804,"TERMINAL",0,0,"i",,terminal_output +210,574924,"TERMINAL",0,0,"t",,terminal_output +211,575004,"TERMINAL",0,0," ",,terminal_output +212,575105,"TERMINAL",0,0,"c",,terminal_output +213,575226,"TERMINAL",0,0,"h",,terminal_output +214,575315,"TERMINAL",0,0,"e",,terminal_output +215,575493,"TERMINAL",0,0,"ck",,terminal_output +216,575594,"TERMINAL",0,0,"o",,terminal_output +217,575693,"TERMINAL",0,0,"u",,terminal_output +218,575767,"TERMINAL",0,0,"t",,terminal_output +219,575768,"TERMINAL",0,0,"1",,terminal_output +220,575868,"TERMINAL",0,0," ",,terminal_output +221,575969,"TERMINAL",0,0,"m",,terminal_output +222,576136,"TERMINAL",0,0,"a",,terminal_output +223,576197,"TERMINAL",0,0,"in",,terminal_output +224,576431,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +225,576826,"TERMINAL",0,0,"2",,terminal_output +226,577348,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +227,577870,"TERMINAL",0,0,"3\rmain",,terminal_output +228,578882,"TERMINAL",0,0,"4",,terminal_output +229,579915,"TERMINAL",0,0,"5",,terminal_output +230,580318,"",0,0,"Switched from branch 'prepend-action-maskgit' to 'main'",,git_branch_checkout +231,580934,"TERMINAL",0,0,"6",,terminal_output +232,582029,"TERMINAL",0,0,"7",,terminal_output +233,583141,"TERMINAL",0,0,"8",,terminal_output +234,584063,"TERMINAL",0,0,"9",,terminal_output +235,585280,"TERMINAL",0,0,"10",,terminal_output +236,586161,"TERMINAL",0,0,"1",,terminal_output +237,587128,"TERMINAL",0,0,"2",,terminal_output +238,588104,"TERMINAL",0,0,"3",,terminal_output +239,589096,"TERMINAL",0,0,"4",,terminal_output +240,590134,"TERMINAL",0,0,"5",,terminal_output +241,591136,"TERMINAL",0,0,"6",,terminal_output +242,592157,"TERMINAL",0,0,"7",,terminal_output +243,593181,"TERMINAL",0,0,"8",,terminal_output +244,594196,"TERMINAL",0,0,"9",,terminal_output +245,595237,"TERMINAL",0,0,"20",,terminal_output +246,596240,"TERMINAL",0,0,"1",,terminal_output +247,597261,"TERMINAL",0,0,"2",,terminal_output +248,598293,"TERMINAL",0,0,"3",,terminal_output +249,599448,"TERMINAL",0,0,"4",,terminal_output +250,600333,"TERMINAL",0,0,"5",,terminal_output +251,601352,"TERMINAL",0,0,"6",,terminal_output +252,602376,"TERMINAL",0,0,"7",,terminal_output +253,603418,"TERMINAL",0,0,"8",,terminal_output +254,604433,"TERMINAL",0,0,"9",,terminal_output +255,605442,"TERMINAL",0,0,"30",,terminal_output +256,606466,"TERMINAL",0,0,"1",,terminal_output +257,607472,"TERMINAL",0,0,"2",,terminal_output +258,608575,"TERMINAL",0,0,"3",,terminal_output +259,609515,"TERMINAL",0,0,"5",,terminal_output +260,610559,"TERMINAL",0,0,"6",,terminal_output +261,611574,"TERMINAL",0,0,"7",,terminal_output +262,612591,"TERMINAL",0,0,"8",,terminal_output +263,613636,"TERMINAL",0,0,"9",,terminal_output +264,614647,"TERMINAL",0,0,"40",,terminal_output +265,615758,"TERMINAL",0,0,"1",,terminal_output +266,616691,"TERMINAL",0,0,"2",,terminal_output +267,617715,"TERMINAL",0,0,"3",,terminal_output +268,618789,"TERMINAL",0,0,"4",,terminal_output +269,619755,"TERMINAL",0,0,"5",,terminal_output +270,620776,"TERMINAL",0,0,"6",,terminal_output +271,621834,"TERMINAL",0,0,"7",,terminal_output +272,622880,"TERMINAL",0,0,"8",,terminal_output +273,623945,"TERMINAL",0,0,"9",,terminal_output +274,624940,"TERMINAL",0,0,"50",,terminal_output +275,625998,"TERMINAL",0,0,"1",,terminal_output +276,627106,"TERMINAL",0,0,"2",,terminal_output +277,628071,"TERMINAL",0,0,"3",,terminal_output +278,629028,"TERMINAL",0,0,"4",,terminal_output +279,630080,"TERMINAL",0,0,"5",,terminal_output +280,631061,"TERMINAL",0,0,"6",,terminal_output +281,632088,"TERMINAL",0,0,"7",,terminal_output +282,633105,"TERMINAL",0,0,"8",,terminal_output +283,634148,"TERMINAL",0,0,"9",,terminal_output +284,635149,"TERMINAL",0,0,"8:00",,terminal_output +285,636170,"TERMINAL",0,0,"1",,terminal_output +286,637194,"TERMINAL",0,0,"2",,terminal_output +287,638279,"TERMINAL",0,0,"3",,terminal_output +288,639278,"TERMINAL",0,0,"4",,terminal_output +289,640284,"TERMINAL",0,0,"5",,terminal_output +290,641281,"TERMINAL",0,0,"6",,terminal_output +291,642304,"TERMINAL",0,0,"7",,terminal_output +292,643329,"TERMINAL",0,0,"8",,terminal_output +293,644367,"TERMINAL",0,0,"9",,terminal_output +294,645393,"TERMINAL",0,0,"10",,terminal_output +295,646413,"TERMINAL",0,0,"1",,terminal_output +296,647429,"TERMINAL",0,0,"2",,terminal_output +297,648499,"TERMINAL",0,0,"3",,terminal_output +298,649503,"TERMINAL",0,0,"4",,terminal_output +299,650685,"TERMINAL",0,0,"5",,terminal_output +300,651595,"TERMINAL",0,0,"7",,terminal_output +301,652538,"TERMINAL",0,0,"8",,terminal_output +302,653634,"TERMINAL",0,0,"9",,terminal_output +303,654597,"TERMINAL",0,0,"20",,terminal_output +304,655623,"TERMINAL",0,0,"1",,terminal_output +305,656642,"TERMINAL",0,0,"2",,terminal_output +306,657661,"TERMINAL",0,0,"3",,terminal_output +307,658801,"TERMINAL",0,0,"4",,terminal_output +308,659707,"TERMINAL",0,0,"5",,terminal_output +309,660764,"TERMINAL",0,0,"6",,terminal_output +310,661752,"TERMINAL",0,0,"7",,terminal_output +311,662764,"TERMINAL",0,0,"8",,terminal_output +312,663808,"TERMINAL",0,0,"9",,terminal_output +313,664820,"TERMINAL",0,0,"30",,terminal_output +314,665841,"TERMINAL",0,0,"1",,terminal_output +315,666895,"TERMINAL",0,0,"2",,terminal_output +316,667915,"TERMINAL",0,0,"3",,terminal_output +317,668899,"TERMINAL",0,0,"4",,terminal_output +318,669919,"TERMINAL",0,0,"5",,terminal_output +319,670939,"TERMINAL",0,0,"6",,terminal_output +320,671984,"TERMINAL",0,0,"7",,terminal_output +321,672991,"TERMINAL",0,0,"8",,terminal_output +322,674099,"TERMINAL",0,0,"9",,terminal_output +323,675010,"TERMINAL",0,0,"40",,terminal_output +324,675969,"jasmine/train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +325,676139,"TERMINAL",0,0,"1",,terminal_output +326,677135,"TERMINAL",0,0,"2",,terminal_output +327,677955,"jasmine/train_tokenizer copy.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +328,678370,"TERMINAL",0,0,"3",,terminal_output +329,679142,"TERMINAL",0,0,"4",,terminal_output +330,680176,"TERMINAL",0,0,"5",,terminal_output +331,681138,"TERMINAL",0,0,"6",,terminal_output +332,682169,"TERMINAL",0,0,"7",,terminal_output +333,683188,"TERMINAL",0,0,"8",,terminal_output +334,684209,"TERMINAL",0,0,"9",,terminal_output +335,685233,"TERMINAL",0,0,"50",,terminal_output +336,686279,"TERMINAL",0,0,"1",,terminal_output +337,687397,"jasmine/train_tokenizer_appendix-c.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +338,687414,"TERMINAL",0,0,"2",,terminal_output +339,688361,"TERMINAL",0,0,"3",,terminal_output +340,689818,"TERMINAL",0,0,"4",,terminal_output +341,690423,"TERMINAL",0,0,"5",,terminal_output +342,691389,"TERMINAL",0,0,"6",,terminal_output +343,692405,"TERMINAL",0,0,"7",,terminal_output +344,693458,"TERMINAL",0,0,"8",,terminal_output +345,694470,"TERMINAL",0,0,"9",,terminal_output +346,695500,"TERMINAL",0,0,"9:00",,terminal_output +347,696479,"TERMINAL",0,0,"1",,terminal_output +348,697539,"TERMINAL",0,0,"3",,terminal_output +349,698528,"TERMINAL",0,0,"4",,terminal_output +350,699578,"TERMINAL",0,0,"5",,terminal_output +351,700601,"TERMINAL",0,0,"6",,terminal_output +352,701668,"TERMINAL",0,0,"7",,terminal_output +353,702707,"TERMINAL",0,0,"8",,terminal_output +354,703127,"jasmine/train_tokenizer_appendix-c.py",15054,0,"",python,selection_mouse +355,703129,"jasmine/train_tokenizer_appendix-c.py",15053,0,"",python,selection_command +356,703722,"jasmine/train_tokenizer_appendix-c.py",15034,0,"",python,selection_mouse +357,703748,"TERMINAL",0,0,"9",,terminal_output +358,704788,"TERMINAL",0,0,"10",,terminal_output +359,705831,"TERMINAL",0,0,"1",,terminal_output +360,706505,"jasmine/train_tokenizer_appendix-c.py",15566,0,"",python,selection_mouse +361,706752,"TERMINAL",0,0,"2",,terminal_output +362,707061,"jasmine/train_tokenizer_appendix-c.py",15532,0,"",python,selection_mouse +363,707811,"jasmine/train_tokenizer_appendix-c.py",15517,32," if jax.process_index() == 0:",python,selection_command +364,707818,"TERMINAL",0,0,"3",,terminal_output +365,707976,"jasmine/train_tokenizer_appendix-c.py",15517,77," if jax.process_index() == 0:\n first_batch = next(dataloader_train)",python,selection_command +366,708288,"jasmine/train_tokenizer_appendix-c.py",15517,147," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()",python,selection_command +367,708438,"jasmine/train_tokenizer_appendix-c.py",15517,211," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())",python,selection_command +368,708571,"jasmine/train_tokenizer_appendix-c.py",15517,274," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())",python,selection_command +369,708707,"jasmine/train_tokenizer_appendix-c.py",15517,328," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training",python,selection_command +370,708754,"TERMINAL",0,0,"4",,terminal_output +371,708851,"jasmine/train_tokenizer_appendix-c.py",15517,404," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +372,709064,"jasmine/train_tokenizer_appendix-c.py",15517,405,"",python,content +373,709092,"jasmine/train_tokenizer_appendix-c.py",15521,0,"",python,selection_command +374,709208,"jasmine/train_tokenizer_appendix-c.py",15511,0,"",python,selection_command +375,709398,"jasmine/train_tokenizer_appendix-c.py",15474,0,"",python,selection_command +376,709590,"jasmine/train_tokenizer_appendix-c.py",15460,0,"",python,selection_command +377,709953,"TERMINAL",0,0,"5",,terminal_output +378,710103,"jasmine/train_tokenizer_appendix-c.py",15441,0,"",python,selection_command +379,710108,"jasmine/train_tokenizer_appendix-c.py",15389,0,"",python,selection_command +380,710137,"jasmine/train_tokenizer_appendix-c.py",15323,0,"",python,selection_command +381,710176,"jasmine/train_tokenizer_appendix-c.py",15309,0,"",python,selection_command +382,710216,"jasmine/train_tokenizer_appendix-c.py",15282,0,"",python,selection_command +383,710232,"jasmine/train_tokenizer_appendix-c.py",15261,0,"",python,selection_command +384,710328,"jasmine/train_tokenizer_appendix-c.py",15235,0,"",python,selection_command +385,710333,"jasmine/train_tokenizer_appendix-c.py",15229,0,"",python,selection_command +386,710372,"jasmine/train_tokenizer_appendix-c.py",15194,0,"",python,selection_command +387,710373,"jasmine/train_tokenizer_appendix-c.py",15184,0,"",python,selection_command +388,710384,"jasmine/train_tokenizer_appendix-c.py",15169,0,"",python,selection_command +389,710426,"jasmine/train_tokenizer_appendix-c.py",15121,0,"",python,selection_command +390,710474,"jasmine/train_tokenizer_appendix-c.py",15059,0,"",python,selection_command +391,710475,"jasmine/train_tokenizer_appendix-c.py",15049,0,"",python,selection_command +392,710501,"jasmine/train_tokenizer_appendix-c.py",15024,0,"",python,selection_command +393,710586,"jasmine/train_tokenizer_appendix-c.py",14999,0,"",python,selection_command +394,710587,"jasmine/train_tokenizer_appendix-c.py",14994,0,"",python,selection_command +395,710597,"jasmine/train_tokenizer_appendix-c.py",14957,0,"",python,selection_command +396,710626,"jasmine/train_tokenizer_appendix-c.py",14914,0,"",python,selection_command +397,710683,"jasmine/train_tokenizer_appendix-c.py",14904,0,"",python,selection_command +398,710758,"jasmine/train_tokenizer_appendix-c.py",14914,0,"",python,selection_command +399,710866,"TERMINAL",0,0,"6",,terminal_output +400,710952,"jasmine/train_tokenizer_appendix-c.py",14957,0,"",python,selection_command +401,711136,"jasmine/train_tokenizer_appendix-c.py",14994,0,"",python,selection_command +402,711249,"jasmine/train_tokenizer_appendix-c.py",14999,0,"",python,selection_command +403,711434,"jasmine/train_tokenizer_appendix-c.py",15024,0,"",python,selection_command +404,711769,"jasmine/train_tokenizer_appendix-c.py",14999,0,"",python,selection_command +405,711912,"TERMINAL",0,0,"7",,terminal_output +406,712177,"jasmine/train_tokenizer_appendix-c.py",15024,0,"",python,selection_command +407,712676,"jasmine/train_tokenizer_appendix-c.py",15049,0,"",python,selection_command +408,712719,"jasmine/train_tokenizer_appendix-c.py",15059,0,"",python,selection_command +409,712725,"jasmine/train_tokenizer_appendix-c.py",15121,0,"",python,selection_command +410,712765,"jasmine/train_tokenizer_appendix-c.py",15169,0,"",python,selection_command +411,712791,"jasmine/train_tokenizer_appendix-c.py",15184,0,"",python,selection_command +412,712866,"jasmine/train_tokenizer_appendix-c.py",15194,0,"",python,selection_command +413,712887,"jasmine/train_tokenizer_appendix-c.py",15229,0,"",python,selection_command +414,712887,"jasmine/train_tokenizer_appendix-c.py",15235,0,"",python,selection_command +415,712909,"jasmine/train_tokenizer_appendix-c.py",15261,0,"",python,selection_command +416,712937,"jasmine/train_tokenizer_appendix-c.py",15282,0,"",python,selection_command +417,712975,"jasmine/train_tokenizer_appendix-c.py",15309,0,"",python,selection_command +418,712981,"TERMINAL",0,0,"8",,terminal_output +419,713019,"jasmine/train_tokenizer_appendix-c.py",15323,0,"",python,selection_command +420,713030,"jasmine/train_tokenizer_appendix-c.py",15389,0,"",python,selection_command +421,713084,"jasmine/train_tokenizer_appendix-c.py",15441,0,"",python,selection_command +422,713122,"jasmine/train_tokenizer_appendix-c.py",15460,0,"",python,selection_command +423,713152,"jasmine/train_tokenizer_appendix-c.py",15474,0,"",python,selection_command +424,713153,"jasmine/train_tokenizer_appendix-c.py",15511,0,"",python,selection_command +425,713197,"jasmine/train_tokenizer_appendix-c.py",15521,0,"",python,selection_command +426,713240,"jasmine/train_tokenizer_appendix-c.py",15573,0,"",python,selection_command +427,713404,"jasmine/train_tokenizer_appendix-c.py",15595,0,"",python,selection_command +428,713655,"jasmine/train_tokenizer_appendix-c.py",15628,0,"",python,selection_command +429,713905,"jasmine/train_tokenizer_appendix-c.py",15667,0,"",python,selection_command +430,713966,"TERMINAL",0,0,"9",,terminal_output +431,714427,"jasmine/train_tokenizer_appendix-c.py",15700,0,"",python,selection_command +432,714431,"jasmine/train_tokenizer_appendix-c.py",15764,0,"",python,selection_command +433,714464,"jasmine/train_tokenizer_appendix-c.py",15799,0,"",python,selection_command +434,714483,"jasmine/train_tokenizer_appendix-c.py",15859,0,"",python,selection_command +435,714508,"jasmine/train_tokenizer_appendix-c.py",15877,0,"",python,selection_command +436,714551,"jasmine/train_tokenizer_appendix-c.py",15882,0,"",python,selection_command +437,714569,"jasmine/train_tokenizer_appendix-c.py",15920,0,"",python,selection_command +438,714616,"jasmine/train_tokenizer_appendix-c.py",15949,0,"",python,selection_command +439,714634,"jasmine/train_tokenizer_appendix-c.py",16014,0,"",python,selection_command +440,714675,"jasmine/train_tokenizer_appendix-c.py",16073,0,"",python,selection_command +441,714710,"jasmine/train_tokenizer_appendix-c.py",16158,0,"",python,selection_command +442,714723,"jasmine/train_tokenizer_appendix-c.py",16210,0,"",python,selection_command +443,714766,"jasmine/train_tokenizer_appendix-c.py",16228,0,"",python,selection_command +444,714782,"jasmine/train_tokenizer_appendix-c.py",16310,0,"",python,selection_command +445,714827,"jasmine/train_tokenizer_appendix-c.py",16342,0,"",python,selection_command +446,714869,"jasmine/train_tokenizer_appendix-c.py",16386,0,"",python,selection_command +447,714962,"TERMINAL",0,0,"20",,terminal_output +448,714972,"jasmine/train_tokenizer_appendix-c.py",16342,0,"",python,selection_command +449,715486,"jasmine/train_tokenizer_appendix-c.py",16310,0,"",python,selection_command +450,715505,"jasmine/train_tokenizer_appendix-c.py",16228,0,"",python,selection_command +451,715538,"jasmine/train_tokenizer_appendix-c.py",16210,0,"",python,selection_command +452,715559,"jasmine/train_tokenizer_appendix-c.py",16158,0,"",python,selection_command +453,715589,"jasmine/train_tokenizer_appendix-c.py",16073,0,"",python,selection_command +454,715614,"jasmine/train_tokenizer_appendix-c.py",16014,0,"",python,selection_command +455,715648,"jasmine/train_tokenizer_appendix-c.py",15949,0,"",python,selection_command +456,715680,"jasmine/train_tokenizer_appendix-c.py",15920,0,"",python,selection_command +457,715706,"jasmine/train_tokenizer_appendix-c.py",15882,0,"",python,selection_command +458,715741,"jasmine/train_tokenizer_appendix-c.py",15877,0,"",python,selection_command +459,716150,"TERMINAL",0,0,"1",,terminal_output +460,716215,"jasmine/train_tokenizer_appendix-c.py",15859,0,"",python,selection_command +461,716491,"jasmine/train_tokenizer_appendix-c.py",15799,0,"",python,selection_command +462,716747,"jasmine/train_tokenizer_appendix-c.py",15764,0,"",python,selection_command +463,716899,"TERMINAL",0,0,"2",,terminal_output +464,716956,"jasmine/train_tokenizer_appendix-c.py",15799,0,"",python,selection_command +465,717963,"TERMINAL",0,0,"3",,terminal_output +466,718786,"jasmine/train_tokenizer_appendix-c.py",16191,0,"",python,selection_mouse +467,719000,"TERMINAL",0,0,"4",,terminal_output +468,719402,"jasmine/train_tokenizer_appendix-c.py",16090,0,"",python,selection_mouse +469,719997,"TERMINAL",0,0,"5",,terminal_output +470,720030,"jasmine/train_tokenizer_appendix-c.py",16085,0,"",python,selection_mouse +471,720946,"jasmine/train_tokenizer_appendix-c.py",15957,0,"",python,selection_mouse +472,720975,"TERMINAL",0,0,"6",,terminal_output +473,722030,"TERMINAL",0,0,"7",,terminal_output +474,723045,"TERMINAL",0,0,"8",,terminal_output +475,724042,"TERMINAL",0,0,"9",,terminal_output +476,725182,"TERMINAL",0,0,"30",,terminal_output +477,725651,"jasmine/train_tokenizer_appendix-c.py",15945,64," if dataloader_val and step % args.val_interval == 0:",python,selection_command +478,725910,"jasmine/train_tokenizer_appendix-c.py",15945,123," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")",python,selection_command +479,726073,"jasmine/train_tokenizer_appendix-c.py",15945,208," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(",python,selection_command +480,726114,"TERMINAL",0,0,"1",,terminal_output +481,726240,"jasmine/train_tokenizer_appendix-c.py",15945,260," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model",python,selection_command +482,726372,"jasmine/train_tokenizer_appendix-c.py",15945,278," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )",python,selection_command +483,726515,"jasmine/train_tokenizer_appendix-c.py",15945,360," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")",python,selection_command +484,726668,"jasmine/train_tokenizer_appendix-c.py",15945,392," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {",python,selection_command +485,726821,"jasmine/train_tokenizer_appendix-c.py",15945,436," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,",python,selection_command +486,726959,"jasmine/train_tokenizer_appendix-c.py",15945,482," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,",python,selection_command +487,727087,"jasmine/train_tokenizer_appendix-c.py",15945,522," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,",python,selection_command +488,727111,"TERMINAL",0,0,"2",,terminal_output +489,727259,"jasmine/train_tokenizer_appendix-c.py",15945,540," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }",python,selection_command +490,727464,"jasmine/train_tokenizer_appendix-c.py",15945,541,"",python,content +491,727573,"jasmine/train_tokenizer_appendix-c.py",15916,0,"",python,selection_command +492,728054,"jasmine/train_tokenizer_appendix-c.py",15878,0,"",python,selection_command +493,728074,"jasmine/train_tokenizer_appendix-c.py",15877,0,"",python,selection_command +494,728119,"jasmine/train_tokenizer_appendix-c.py",15855,0,"",python,selection_command +495,728151,"jasmine/train_tokenizer_appendix-c.py",15795,0,"",python,selection_command +496,728151,"TERMINAL",0,0,"3",,terminal_output +497,728165,"jasmine/train_tokenizer_appendix-c.py",15760,0,"",python,selection_command +498,728196,"jasmine/train_tokenizer_appendix-c.py",15696,0,"",python,selection_command +499,728256,"jasmine/train_tokenizer_appendix-c.py",15663,0,"",python,selection_command +500,728263,"jasmine/train_tokenizer_appendix-c.py",15624,0,"",python,selection_command +501,728306,"jasmine/train_tokenizer_appendix-c.py",15591,0,"",python,selection_command +502,728351,"jasmine/train_tokenizer_appendix-c.py",15569,0,"",python,selection_command +503,728352,"jasmine/train_tokenizer_appendix-c.py",15517,0,"",python,selection_command +504,728393,"jasmine/train_tokenizer_appendix-c.py",15507,0,"",python,selection_command +505,728764,"jasmine/train_tokenizer_appendix-c.py",15517,0,"",python,selection_command +506,729022,"jasmine/train_tokenizer_appendix-c.py",15568,0,"\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }",python,content +507,729043,"jasmine/train_tokenizer_appendix-c.py",15581,0,"",python,selection_command +508,729196,"TERMINAL",0,0,"4",,terminal_output +509,730105,"jasmine/train_tokenizer_appendix-c.py",15569,64," if dataloader_val and step % args.val_interval == 0:",python,selection_command +510,730190,"TERMINAL",0,0,"5",,terminal_output +511,730330,"jasmine/train_tokenizer_appendix-c.py",15569,123," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")",python,selection_command +512,730441,"jasmine/train_tokenizer_appendix-c.py",15569,208," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(",python,selection_command +513,730600,"jasmine/train_tokenizer_appendix-c.py",15569,260," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model",python,selection_command +514,730743,"jasmine/train_tokenizer_appendix-c.py",15569,278," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )",python,selection_command +515,731184,"TERMINAL",0,0,"6",,terminal_output +516,731244,"jasmine/train_tokenizer_appendix-c.py",15569,360," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")",python,selection_command +517,731284,"jasmine/train_tokenizer_appendix-c.py",15569,392," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {",python,selection_command +518,731358,"jasmine/train_tokenizer_appendix-c.py",15569,436," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,",python,selection_command +519,731515,"jasmine/train_tokenizer_appendix-c.py",15569,482," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,",python,selection_command +520,731677,"jasmine/train_tokenizer_appendix-c.py",15569,522," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,",python,selection_command +521,731844,"jasmine/train_tokenizer_appendix-c.py",15569,540," if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }",python,selection_command +522,732094,"jasmine/train_tokenizer_appendix-c.py",15581,0,"",python,selection_command +523,732211,"TERMINAL",0,0,"7",,terminal_output +524,732352,"jasmine/train_tokenizer_appendix-c.py",16104,4,"",python,content +525,732353,"jasmine/train_tokenizer_appendix-c.py",16068,4,"",python,content +526,732353,"jasmine/train_tokenizer_appendix-c.py",16022,4,"",python,content +527,732353,"jasmine/train_tokenizer_appendix-c.py",15978,4,"",python,content +528,732353,"jasmine/train_tokenizer_appendix-c.py",15942,4,"",python,content +529,732353,"jasmine/train_tokenizer_appendix-c.py",15860,4,"",python,content +530,732353,"jasmine/train_tokenizer_appendix-c.py",15842,4,"",python,content +531,732353,"jasmine/train_tokenizer_appendix-c.py",15794,4,"",python,content +532,732353,"jasmine/train_tokenizer_appendix-c.py",15705,4,"",python,content +533,732353,"jasmine/train_tokenizer_appendix-c.py",15646,4,"",python,content +534,732354,"jasmine/train_tokenizer_appendix-c.py",15577,4,"",python,content +535,732755,"jasmine/train_tokenizer_appendix-c.py",16060,4,"",python,content +536,732755,"jasmine/train_tokenizer_appendix-c.py",16028,4,"",python,content +537,732755,"jasmine/train_tokenizer_appendix-c.py",15986,4,"",python,content +538,732755,"jasmine/train_tokenizer_appendix-c.py",15946,4,"",python,content +539,732755,"jasmine/train_tokenizer_appendix-c.py",15914,4,"",python,content +540,732755,"jasmine/train_tokenizer_appendix-c.py",15836,4,"",python,content +541,732756,"jasmine/train_tokenizer_appendix-c.py",15822,4,"",python,content +542,732756,"jasmine/train_tokenizer_appendix-c.py",15778,4,"",python,content +543,732756,"jasmine/train_tokenizer_appendix-c.py",15693,4,"",python,content +544,732756,"jasmine/train_tokenizer_appendix-c.py",15638,4,"",python,content +545,732756,"jasmine/train_tokenizer_appendix-c.py",15573,4,"",python,content +546,733250,"TERMINAL",0,0,"8",,terminal_output +547,733969,"jasmine/train_tokenizer_appendix-c.py",15572,0,"",python,selection_command +548,734254,"TERMINAL",0,0,"9",,terminal_output +549,735307,"TERMINAL",0,0,"40",,terminal_output +550,735418,"jasmine/train_tokenizer_appendix-c.py",15590,0,"",python,selection_mouse +551,735941,"jasmine/train_tokenizer_appendix-c.py",15593,0,"",python,selection_mouse +552,736383,"TERMINAL",0,0,"1",,terminal_output +553,736449,"jasmine/train_tokenizer_appendix-c.py",15595,0,"",python,selection_mouse +554,736649,"jasmine/train_tokenizer_appendix-c.py",15595,1,"s",python,selection_mouse +555,736690,"jasmine/train_tokenizer_appendix-c.py",15595,3,"ste",python,selection_mouse +556,736690,"jasmine/train_tokenizer_appendix-c.py",15595,5,"step ",python,selection_mouse +557,736695,"jasmine/train_tokenizer_appendix-c.py",15595,8,"step % a",python,selection_mouse +558,736718,"jasmine/train_tokenizer_appendix-c.py",15595,13,"step % args.v",python,selection_mouse +559,736734,"jasmine/train_tokenizer_appendix-c.py",15595,15,"step % args.val",python,selection_mouse +560,736750,"jasmine/train_tokenizer_appendix-c.py",15595,17,"step % args.val_i",python,selection_mouse +561,736768,"jasmine/train_tokenizer_appendix-c.py",15595,18,"step % args.val_in",python,selection_mouse +562,736797,"jasmine/train_tokenizer_appendix-c.py",15595,20,"step % args.val_inte",python,selection_mouse +563,736801,"jasmine/train_tokenizer_appendix-c.py",15595,21,"step % args.val_inter",python,selection_mouse +564,736835,"jasmine/train_tokenizer_appendix-c.py",15595,22,"step % args.val_interv",python,selection_mouse +565,736880,"jasmine/train_tokenizer_appendix-c.py",15595,23,"step % args.val_interva",python,selection_mouse +566,736922,"jasmine/train_tokenizer_appendix-c.py",15595,24,"step % args.val_interval",python,selection_mouse +567,736967,"jasmine/train_tokenizer_appendix-c.py",15595,25,"step % args.val_interval ",python,selection_mouse +568,736968,"jasmine/train_tokenizer_appendix-c.py",15595,26,"step % args.val_interval =",python,selection_mouse +569,737042,"jasmine/train_tokenizer_appendix-c.py",15595,27,"step % args.val_interval ==",python,selection_mouse +570,737324,"TERMINAL",0,0,"2",,terminal_output +571,737429,"jasmine/train_tokenizer_appendix-c.py",15595,28,"step % args.val_interval == ",python,selection_mouse +572,737497,"jasmine/train_tokenizer_appendix-c.py",15595,29,"step % args.val_interval == 0",python,selection_mouse +573,737947,"jasmine/train_tokenizer_appendix-c.py",15595,29,"",python,content +574,738394,"TERMINAL",0,0,"3",,terminal_output +575,738617,"jasmine/train_tokenizer_appendix-c.py",15594,1,"",python,content +576,738734,"jasmine/train_tokenizer_appendix-c.py",15593,1,"",python,content +577,738863,"jasmine/train_tokenizer_appendix-c.py",15592,1,"",python,content +578,738999,"jasmine/train_tokenizer_appendix-c.py",15591,1,"",python,content +579,739214,"jasmine/train_tokenizer_appendix-c.py",15590,1,"",python,content +580,739306,"jasmine/train_tokenizer_appendix-c.py",15589,1,"",python,content +581,739387,"TERMINAL",0,0,"4",,terminal_output +582,740433,"jasmine/train_tokenizer_appendix-c.py",15589,0,"l",python,content +583,740434,"jasmine/train_tokenizer_appendix-c.py",15590,0,"",python,selection_keyboard +584,740438,"TERMINAL",0,0,"5",,terminal_output +585,740645,"jasmine/train_tokenizer_appendix-c.py",15589,0,"",python,selection_command +586,741404,"TERMINAL",0,0,"6",,terminal_output +587,742635,"TERMINAL",0,0,"7",,terminal_output +588,743259,"jasmine/train_tokenizer_appendix-c.py",15590,0,"",python,selection_command +589,743462,"TERMINAL",0,0,"8",,terminal_output +590,744463,"TERMINAL",0,0,"9",,terminal_output +591,745475,"jasmine/train_tokenizer_appendix-c.py",15864,0,"",python,selection_mouse +592,745484,"TERMINAL",0,0,"50",,terminal_output +593,746077,"jasmine/train_tokenizer_appendix-c.py",16022,0,"",python,selection_mouse +594,746503,"TERMINAL",0,0,"2",,terminal_output +595,746700,"jasmine/train_tokenizer_appendix-c.py",16020,0,"",python,selection_mouse +596,747234,"jasmine/train_tokenizer_appendix-c.py",15999,0,"",python,selection_mouse +597,747528,"TERMINAL",0,0,"3",,terminal_output +598,748719,"TERMINAL",0,0,"4",,terminal_output +599,749637,"TERMINAL",0,0,"5",,terminal_output +600,750595,"TERMINAL",0,0,"6",,terminal_output +601,751618,"TERMINAL",0,0,"7",,terminal_output +602,752651,"TERMINAL",0,0,"8",,terminal_output +603,753650,"TERMINAL",0,0,"9",,terminal_output +604,754685,"TERMINAL",0,0,"50:00",,terminal_output +605,755715,"TERMINAL",0,0,"1",,terminal_output +606,756739,"TERMINAL",0,0,"2",,terminal_output +607,757851,"TERMINAL",0,0,"3",,terminal_output +608,758779,"TERMINAL",0,0,"4",,terminal_output +609,759794,"TERMINAL",0,0,"5",,terminal_output +610,760811,"TERMINAL",0,0,"6",,terminal_output +611,761866,"TERMINAL",0,0,"7",,terminal_output +612,762853,"TERMINAL",0,0,"8",,terminal_output +613,763878,"TERMINAL",0,0,"9",,terminal_output +614,764895,"TERMINAL",0,0,"10",,terminal_output +615,765917,"TERMINAL",0,0,"1",,terminal_output +616,766939,"TERMINAL",0,0,"2",,terminal_output +617,767951,"TERMINAL",0,0,"3",,terminal_output +618,768927,"jasmine/train_tokenizer_appendix-c.py",16018,0,"",python,selection_mouse +619,768985,"TERMINAL",0,0,"4",,terminal_output +620,769407,"jasmine/train_tokenizer_appendix-c.py",16019,0,"",python,selection_mouse +621,769972,"jasmine/train_tokenizer_appendix-c.py",15999,0,"",python,selection_mouse +622,770012,"TERMINAL",0,0,"5",,terminal_output +623,771018,"TERMINAL",0,0,"6",,terminal_output +624,772040,"TERMINAL",0,0,"7",,terminal_output +625,773098,"TERMINAL",0,0,"8",,terminal_output +626,774120,"TERMINAL",0,0,"9",,terminal_output +627,775104,"TERMINAL",0,0,"20",,terminal_output +628,776151,"TERMINAL",0,0,"1",,terminal_output +629,777150,"TERMINAL",0,0,"2",,terminal_output +630,777189,"jasmine/train_tokenizer_appendix-c.py",15988,21," first_step = step",python,selection_command +631,777418,"jasmine/train_tokenizer_appendix-c.py",15988,54," first_step = step\n while step < args.num_steps:",python,selection_command +632,777915,"jasmine/train_tokenizer_appendix-c.py",15988,93," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:",python,selection_command +633,777933,"jasmine/train_tokenizer_appendix-c.py",15988,126," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---",python,selection_command +634,777962,"jasmine/train_tokenizer_appendix-c.py",15988,190," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)",python,selection_command +635,778003,"jasmine/train_tokenizer_appendix-c.py",15988,225," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:",python,selection_command +636,778044,"jasmine/train_tokenizer_appendix-c.py",15988,285," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")",python,selection_command +637,778045,"jasmine/train_tokenizer_appendix-c.py",15988,307," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1",python,selection_command +638,778100,"jasmine/train_tokenizer_appendix-c.py",15988,308," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n",python,selection_command +639,778115,"jasmine/train_tokenizer_appendix-c.py",15988,346," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---",python,selection_command +640,778153,"jasmine/train_tokenizer_appendix-c.py",15988,375," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}",python,selection_command +641,778173,"jasmine/train_tokenizer_appendix-c.py",15988,376," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n",python,selection_command +642,778203,"jasmine/train_tokenizer_appendix-c.py",15988,406," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---",python,selection_command +643,778232,"jasmine/train_tokenizer_appendix-c.py",15988,431," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:",python,selection_command +644,778233,"TERMINAL",0,0,"3",,terminal_output +645,778280,"jasmine/train_tokenizer_appendix-c.py",15988,510," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:",python,selection_command +646,778326,"jasmine/train_tokenizer_appendix-c.py",15988,581," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}",python,selection_command +647,778327,"jasmine/train_tokenizer_appendix-c.py",15988,617," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:",python,selection_command +648,778367,"jasmine/train_tokenizer_appendix-c.py",15988,681," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])",python,selection_command +649,778427,"jasmine/train_tokenizer_appendix-c.py",15988,721," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)",python,selection_command +650,778469,"jasmine/train_tokenizer_appendix-c.py",15988,777," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:",python,selection_command +651,778510,"jasmine/train_tokenizer_appendix-c.py",15988,853," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0",python,selection_command +652,778511,"jasmine/train_tokenizer_appendix-c.py",15988,905," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)",python,selection_command +653,778512,"jasmine/train_tokenizer_appendix-c.py",15988,987," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)",python,selection_command +654,778555,"jasmine/train_tokenizer_appendix-c.py",15988,1042," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(",python,selection_command +655,778593,"jasmine/train_tokenizer_appendix-c.py",15988,1111," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""",python,selection_command +656,778597,"jasmine/train_tokenizer_appendix-c.py",15988,1133," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,selection_command +657,778643,"jasmine/train_tokenizer_appendix-c.py",15988,1203," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:",python,selection_command +658,778649,"jasmine/train_tokenizer_appendix-c.py",15988,1257," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (",python,selection_command +659,778710,"jasmine/train_tokenizer_appendix-c.py",15988,1342," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)",python,selection_command +660,778711,"jasmine/train_tokenizer_appendix-c.py",15988,1378," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0",python,selection_command +661,778760,"jasmine/train_tokenizer_appendix-c.py",15988,1404," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )",python,selection_command +662,778803,"jasmine/train_tokenizer_appendix-c.py",15988,1489," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(",python,selection_command +663,778804,"jasmine/train_tokenizer_appendix-c.py",15988,1522," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1",python,selection_command +664,778831,"jasmine/train_tokenizer_appendix-c.py",15988,1548," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )",python,selection_command +665,778876,"jasmine/train_tokenizer_appendix-c.py",15988,1625," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(",python,selection_command +666,778941,"jasmine/train_tokenizer_appendix-c.py",15988,1712," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),",python,selection_command +667,778942,"jasmine/train_tokenizer_appendix-c.py",15988,1748," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,",python,selection_command +668,778981,"jasmine/train_tokenizer_appendix-c.py",15988,1774," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )",python,selection_command +669,778982,"jasmine/train_tokenizer_appendix-c.py",15988,1852," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(",python,selection_command +670,779023,"jasmine/train_tokenizer_appendix-c.py",15988,1921," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,",python,selection_command +671,779065,"jasmine/train_tokenizer_appendix-c.py",15988,1973," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",",python,selection_command +672,779145,"jasmine/train_tokenizer_appendix-c.py",15988,1999," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )",python,selection_command +673,779146,"jasmine/train_tokenizer_appendix-c.py",15988,2079," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens",python,selection_command +674,779195,"jasmine/train_tokenizer_appendix-c.py",15988,2157," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code",python,selection_command +675,779196,"jasmine/train_tokenizer_appendix-c.py",15988,2234," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.",python,selection_command +676,779197,"jasmine/train_tokenizer_appendix-c.py",15988,2283," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:",python,selection_command +677,779239,"jasmine/train_tokenizer_appendix-c.py",15988,2326," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(",python,selection_command +678,779239,"TERMINAL",0,0,"4",,terminal_output +679,779271,"jasmine/train_tokenizer_appendix-c.py",15988,2396," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),",python,selection_command +680,779312,"jasmine/train_tokenizer_appendix-c.py",15988,2469," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),",python,selection_command +681,779314,"jasmine/train_tokenizer_appendix-c.py",15988,2524," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(",python,selection_command +682,779353,"jasmine/train_tokenizer_appendix-c.py",15988,2600," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))",python,selection_command +683,779399,"jasmine/train_tokenizer_appendix-c.py",15988,2631," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_command +684,779447,"jasmine/train_tokenizer_appendix-c.py",15988,2657," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )",python,selection_command +685,779490,"jasmine/train_tokenizer_appendix-c.py",15988,2731," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:",python,selection_command +686,779491,"jasmine/train_tokenizer_appendix-c.py",15988,2778," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(",python,selection_command +687,779538,"jasmine/train_tokenizer_appendix-c.py",15988,2816," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(",python,selection_command +688,779539,"jasmine/train_tokenizer_appendix-c.py",15988,2875," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(",python,selection_command +689,779576,"jasmine/train_tokenizer_appendix-c.py",15988,2956," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])",python,selection_command +690,779578,"jasmine/train_tokenizer_appendix-c.py",15988,2995," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),",python,selection_command +691,779620,"jasmine/train_tokenizer_appendix-c.py",15988,3054," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(",python,selection_command +692,779663,"jasmine/train_tokenizer_appendix-c.py",15988,3138," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])",python,selection_command +693,779703,"jasmine/train_tokenizer_appendix-c.py",15988,3177," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),",python,selection_command +694,779704,"jasmine/train_tokenizer_appendix-c.py",15988,3244," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(",python,selection_command +695,779751,"jasmine/train_tokenizer_appendix-c.py",15988,3296," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(",python,selection_command +696,779798,"jasmine/train_tokenizer_appendix-c.py",15988,3382," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(",python,selection_command +697,779799,"jasmine/train_tokenizer_appendix-c.py",15988,3439," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8",python,selection_command +698,779857,"jasmine/train_tokenizer_appendix-c.py",15988,3485," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )",python,selection_command +699,779858,"jasmine/train_tokenizer_appendix-c.py",15988,3527," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )",python,selection_command +700,779903,"jasmine/train_tokenizer_appendix-c.py",15988,3566," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),",python,selection_command +701,779975,"jasmine/train_tokenizer_appendix-c.py",15988,3600," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )",python,selection_command +702,779976,"jasmine/train_tokenizer_appendix-c.py",15988,3630," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )",python,selection_command +703,780051,"jasmine/train_tokenizer_appendix-c.py",15988,3676," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)",python,selection_command +704,780052,"jasmine/train_tokenizer_appendix-c.py",15988,3712," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---",python,selection_command +705,780093,"jasmine/train_tokenizer_appendix-c.py",15988,3788," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:",python,selection_command +706,780095,"jasmine/train_tokenizer_appendix-c.py",15988,3842," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None",python,selection_command +707,780104,"jasmine/train_tokenizer_appendix-c.py",15988,3897," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)",python,selection_command +708,780141,"jasmine/train_tokenizer_appendix-c.py",15988,3930," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:",python,selection_command +709,780172,"jasmine/train_tokenizer_appendix-c.py",15988,3990," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(",python,selection_command +710,780186,"jasmine/train_tokenizer_appendix-c.py",15988,4080," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore",python,selection_command +711,780270,"jasmine/train_tokenizer_appendix-c.py",15988,4176," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +712,780271,"TERMINAL",0,0,"5",,terminal_output +713,780284,"jasmine/train_tokenizer_appendix-c.py",15988,4235," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore",python,selection_command +714,780313,"jasmine/train_tokenizer_appendix-c.py",15988,4262," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_command +715,780314,"jasmine/train_tokenizer_appendix-c.py",15988,4356," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +716,780376,"jasmine/train_tokenizer_appendix-c.py",15988,4413," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore",python,selection_command +717,780378,"jasmine/train_tokenizer_appendix-c.py",15988,4440," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +718,780416,"jasmine/train_tokenizer_appendix-c.py",15988,4462," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )",python,selection_command +719,780428,"jasmine/train_tokenizer_appendix-c.py",15988,4484," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:",python,selection_command +720,780499,"jasmine/train_tokenizer_appendix-c.py",15988,4544," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(",python,selection_command +721,780500,"jasmine/train_tokenizer_appendix-c.py",15988,4634," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore",python,selection_command +722,780553,"jasmine/train_tokenizer_appendix-c.py",15988,4730," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +723,780555,"jasmine/train_tokenizer_appendix-c.py",15988,4789," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore",python,selection_command +724,780613,"jasmine/train_tokenizer_appendix-c.py",15988,4816," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_command +725,780615,"jasmine/train_tokenizer_appendix-c.py",15988,4838," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )",python,selection_command +726,780680,"jasmine/train_tokenizer_appendix-c.py",15988,4908," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)",python,selection_command +727,780681,"jasmine/train_tokenizer_appendix-c.py",15988,4966," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")",python,selection_command +728,780699,"jasmine/train_tokenizer_appendix-c.py",15988,5005," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:",python,selection_command +729,780770,"jasmine/train_tokenizer_appendix-c.py",15988,5027," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +730,780774,"jasmine/train_tokenizer_appendix-c.py",15988,5028," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n",python,selection_command +731,780788,"jasmine/train_tokenizer_appendix-c.py",15988,5055," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:",python,selection_command +732,780839,"jasmine/train_tokenizer_appendix-c.py",15988,5090," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()",python,selection_command +733,780882,"jasmine/train_tokenizer_appendix-c.py",15988,5091," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n",python,selection_command +734,780883,"jasmine/train_tokenizer_appendix-c.py",15988,5092," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n",python,selection_command +735,780927,"jasmine/train_tokenizer_appendix-c.py",15988,5119," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":",python,selection_command +736,780977,"jasmine/train_tokenizer_appendix-c.py",15988,5145," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)",python,selection_command +737,780985,"jasmine/train_tokenizer_appendix-c.py",15988,5160," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)",python,selection_command +738,781113,"jasmine/train_tokenizer_appendix-c.py",15988,5145," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)",python,selection_command +739,781250,"TERMINAL",0,0,"6",,terminal_output +740,781691,"jasmine/train_tokenizer_appendix-c.py",15988,5119," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":",python,selection_command +741,781692,"jasmine/train_tokenizer_appendix-c.py",15988,5092," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n",python,selection_command +742,781693,"jasmine/train_tokenizer_appendix-c.py",15988,5091," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n",python,selection_command +743,781706,"jasmine/train_tokenizer_appendix-c.py",15988,5090," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()",python,selection_command +744,781721,"jasmine/train_tokenizer_appendix-c.py",15988,5055," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:",python,selection_command +745,781780,"jasmine/train_tokenizer_appendix-c.py",15988,5028," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n",python,selection_command +746,781902,"jasmine/train_tokenizer_appendix-c.py",15988,5027," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +747,782336,"TERMINAL",0,0,"7",,terminal_output +748,782676,"jasmine/train_tokenizer_appendix-c.py",15988,5028,"",python,content +749,783307,"TERMINAL",0,0,"8",,terminal_output +750,784395,"TERMINAL",0,0,"9",,terminal_output +751,785339,"jasmine/train_tokenizer_appendix-c.py",15865,0,"",python,selection_mouse +752,785377,"TERMINAL",0,0,"30",,terminal_output +753,785954,"jasmine/train_tokenizer_appendix-c.py",15963,0,"",python,selection_mouse +754,786348,"TERMINAL",0,0,"1",,terminal_output +755,786639,"jasmine/train_tokenizer_appendix-c.py",15865,0,"",python,selection_mouse +756,787066,"jasmine/train_tokenizer_appendix-c.py",15795,0,"",python,selection_mouse +757,787378,"TERMINAL",0,0,"2",,terminal_output +758,788380,"jasmine/train_tokenizer_appendix-c.py",15847,0,"\n ",python,content +759,788387,"TERMINAL",0,0,"3",,terminal_output +760,788593,"jasmine/train_tokenizer_appendix-c.py",15856,0,"p",python,content +761,788595,"jasmine/train_tokenizer_appendix-c.py",15857,0,"",python,selection_keyboard +762,788752,"jasmine/train_tokenizer_appendix-c.py",15857,0,"r",python,content +763,788754,"jasmine/train_tokenizer_appendix-c.py",15858,0,"",python,selection_keyboard +764,788848,"jasmine/train_tokenizer_appendix-c.py",15858,0,"i",python,content +765,788849,"jasmine/train_tokenizer_appendix-c.py",15859,0,"",python,selection_keyboard +766,788917,"jasmine/train_tokenizer_appendix-c.py",15859,0,"n",python,content +767,788919,"jasmine/train_tokenizer_appendix-c.py",15860,0,"",python,selection_keyboard +768,788989,"jasmine/train_tokenizer_appendix-c.py",15860,0,"t",python,content +769,788990,"jasmine/train_tokenizer_appendix-c.py",15861,0,"",python,selection_keyboard +770,789492,"TERMINAL",0,0,"4",,terminal_output +771,789755,"jasmine/train_tokenizer_appendix-c.py",15861,0,"()",python,content +772,789756,"jasmine/train_tokenizer_appendix-c.py",15862,0,"",python,selection_keyboard +773,790458,"TERMINAL",0,0,"5",,terminal_output +774,791496,"TERMINAL",0,0,"6",,terminal_output +775,791737,"jasmine/train_tokenizer_appendix-c.py",15862,0,"v",python,content +776,791738,"jasmine/train_tokenizer_appendix-c.py",15863,0,"",python,selection_keyboard +777,791860,"jasmine/train_tokenizer_appendix-c.py",15863,0,"a",python,content +778,791861,"jasmine/train_tokenizer_appendix-c.py",15864,0,"",python,selection_keyboard +779,791903,"jasmine/train_tokenizer_appendix-c.py",15864,0,"l",python,content +780,791904,"jasmine/train_tokenizer_appendix-c.py",15865,0,"",python,selection_keyboard +781,792273,"jasmine/train_tokenizer_appendix-c.py",15865,0,"_",python,content +782,792274,"jasmine/train_tokenizer_appendix-c.py",15866,0,"",python,selection_keyboard +783,792567,"TERMINAL",0,0,"7",,terminal_output +784,792572,"jasmine/train_tokenizer_appendix-c.py",15866,0,"m",python,content +785,792573,"jasmine/train_tokenizer_appendix-c.py",15867,0,"",python,selection_keyboard +786,792828,"jasmine/train_tokenizer_appendix-c.py",15867,0,"e",python,content +787,792829,"jasmine/train_tokenizer_appendix-c.py",15868,0,"",python,selection_keyboard +788,793013,"jasmine/train_tokenizer_appendix-c.py",15868,0,"t",python,content +789,793014,"jasmine/train_tokenizer_appendix-c.py",15869,0,"",python,selection_keyboard +790,793115,"jasmine/train_tokenizer_appendix-c.py",15869,0,"r",python,content +791,793116,"jasmine/train_tokenizer_appendix-c.py",15870,0,"",python,selection_keyboard +792,793344,"jasmine/train_tokenizer_appendix-c.py",15870,0,"c",python,content +793,793345,"jasmine/train_tokenizer_appendix-c.py",15871,0,"",python,selection_keyboard +794,793527,"TERMINAL",0,0,"8",,terminal_output +795,793586,"jasmine/train_tokenizer_appendix-c.py",15862,9,"val_metrics",python,content +796,794337,"jasmine/train_tokenizer_appendix-c.py",15897,0,"",python,selection_mouse +797,794512,"TERMINAL",0,0,"40",,terminal_output +798,794900,"jasmine/train_tokenizer_appendix-c.py",15896,0,"",python,selection_command +799,795506,"jasmine/train_tokenizer_appendix-c.py",15875,60,"",python,content +800,795521,"jasmine/train_tokenizer_appendix-c.py",15887,0,"",python,selection_command +801,795524,"TERMINAL",0,0,"1",,terminal_output +802,796026,"jasmine/train_tokenizer_appendix-c.py",15875,38,"",python,content +803,796036,"jasmine/train_tokenizer_appendix-c.py",15887,0,"",python,selection_command +804,796436,"jasmine/train_tokenizer_appendix-c.py",15875,32,"",python,content +805,796440,"jasmine/train_tokenizer_appendix-c.py",15883,0,"",python,selection_command +806,796600,"TERMINAL",0,0,"2",,terminal_output +807,796801,"jasmine/train_tokenizer_appendix-c.py",15875,10,"",python,content +808,797565,"TERMINAL",0,0,"3",,terminal_output +809,798620,"TERMINAL",0,0,"4",,terminal_output +810,799159,"jasmine/train_tokenizer_appendix-c.py",15664,0,"",python,selection_mouse +811,799330,"jasmine/train_tokenizer_appendix-c.py",15664,1,"v",python,selection_mouse +812,799348,"jasmine/train_tokenizer_appendix-c.py",15664,3,"val",python,selection_mouse +813,799367,"jasmine/train_tokenizer_appendix-c.py",15664,4,"val_",python,selection_mouse +814,799385,"jasmine/train_tokenizer_appendix-c.py",15664,5,"val_g",python,selection_mouse +815,799416,"jasmine/train_tokenizer_appendix-c.py",15664,6,"val_gt",python,selection_mouse +816,799426,"jasmine/train_tokenizer_appendix-c.py",15664,7,"val_gt_",python,selection_mouse +817,799460,"jasmine/train_tokenizer_appendix-c.py",15664,9,"val_gt_ba",python,selection_mouse +818,799501,"jasmine/train_tokenizer_appendix-c.py",15664,10,"val_gt_bat",python,selection_mouse +819,799502,"jasmine/train_tokenizer_appendix-c.py",15664,11,"val_gt_batc",python,selection_mouse +820,799543,"jasmine/train_tokenizer_appendix-c.py",15664,12,"val_gt_batch",python,selection_mouse +821,799556,"jasmine/train_tokenizer_appendix-c.py",15664,13,"val_gt_batch,",python,selection_mouse +822,799595,"jasmine/train_tokenizer_appendix-c.py",15664,14,"val_gt_batch, ",python,selection_mouse +823,799630,"jasmine/train_tokenizer_appendix-c.py",15664,15,"val_gt_batch, v",python,selection_mouse +824,799650,"jasmine/train_tokenizer_appendix-c.py",15664,16,"val_gt_batch, va",python,selection_mouse +825,799705,"TERMINAL",0,0,"5",,terminal_output +826,799778,"jasmine/train_tokenizer_appendix-c.py",15664,17,"val_gt_batch, val",python,selection_mouse +827,799819,"jasmine/train_tokenizer_appendix-c.py",15664,18,"val_gt_batch, val_",python,selection_mouse +828,799860,"jasmine/train_tokenizer_appendix-c.py",15664,19,"val_gt_batch, val_r",python,selection_mouse +829,799861,"jasmine/train_tokenizer_appendix-c.py",15664,20,"val_gt_batch, val_re",python,selection_mouse +830,799937,"jasmine/train_tokenizer_appendix-c.py",15664,21,"val_gt_batch, val_rec",python,selection_mouse +831,799946,"jasmine/train_tokenizer_appendix-c.py",15664,22,"val_gt_batch, val_reco",python,selection_mouse +832,800094,"jasmine/train_tokenizer_appendix-c.py",15664,23,"val_gt_batch, val_recon",python,selection_mouse +833,800393,"jasmine/train_tokenizer_appendix-c.py",15664,23,"",python,content +834,800704,"TERMINAL",0,0,"6",,terminal_output +835,801711,"jasmine/train_tokenizer_appendix-c.py",15664,0,"_",python,content +836,801712,"jasmine/train_tokenizer_appendix-c.py",15665,0,"",python,selection_keyboard +837,801782,"TERMINAL",0,0,"7",,terminal_output +838,802086,"jasmine/train_tokenizer_appendix-c.py",15665,0,",",python,content +839,802087,"jasmine/train_tokenizer_appendix-c.py",15666,0,"",python,selection_keyboard +840,802376,"jasmine/train_tokenizer_appendix-c.py",15666,0," ",python,content +841,802377,"jasmine/train_tokenizer_appendix-c.py",15667,0,"",python,selection_keyboard +842,802519,"jasmine/train_tokenizer_appendix-c.py",15667,0,"_",python,content +843,802520,"jasmine/train_tokenizer_appendix-c.py",15668,0,"",python,selection_keyboard +844,802798,"TERMINAL",0,0,"8",,terminal_output +845,802846,"jasmine/train_tokenizer_appendix-c.py",15667,0,"",python,selection_command +846,803779,"TERMINAL",0,0,"9",,terminal_output +847,804799,"TERMINAL",0,0,"50",,terminal_output +848,804949,"jasmine/train_tokenizer_appendix-c.py",15879,0,"",python,selection_mouse +849,805828,"TERMINAL",0,0,"1",,terminal_output +850,806914,"TERMINAL",0,0,"2",,terminal_output +851,807254,"jasmine/train_tokenizer_appendix-c.py",15010,0,"",python,selection_mouse +852,807718,"jasmine/train_tokenizer_appendix-c.py",15033,0,"",python,selection_mouse +853,808017,"TERMINAL",0,0,"3",,terminal_output +854,808440,"jasmine/train_tokenizer_appendix-c.py",15008,0,"",python,selection_command +855,808740,"jasmine/train_tokenizer_appendix-c.py",14995,24," # --- TRAIN LOOP ---",python,selection_command +856,808912,"TERMINAL",0,0,"4",,terminal_output +857,808936,"jasmine/train_tokenizer_appendix-c.py",14995,49," # --- TRAIN LOOP ---\n dataloader_train = (",python,selection_command +858,809431,"jasmine/train_tokenizer_appendix-c.py",14995,59," # --- TRAIN LOOP ---\n dataloader_train = (\n {",python,selection_command +859,809484,"jasmine/train_tokenizer_appendix-c.py",14995,121," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(",python,selection_command +860,809514,"jasmine/train_tokenizer_appendix-c.py",14995,169," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]",python,selection_command +861,809520,"jasmine/train_tokenizer_appendix-c.py",14995,184," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),",python,selection_command +862,809547,"jasmine/train_tokenizer_appendix-c.py",14995,194," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }",python,selection_command +863,809687,"jasmine/train_tokenizer_appendix-c.py",14995,229," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator",python,selection_command +864,809806,"jasmine/train_tokenizer_appendix-c.py",14995,235," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )",python,selection_command +865,810002,"TERMINAL",0,0,"5",,terminal_output +866,810117,"jasmine/train_tokenizer_appendix-c.py",14995,236,"",python,content +867,810136,"jasmine/train_tokenizer_appendix-c.py",14999,0,"",python,selection_command +868,810960,"TERMINAL",0,0,"6",,terminal_output +869,812000,"TERMINAL",0,0,"7",,terminal_output +870,813020,"TERMINAL",0,0,"8",,terminal_output +871,814146,"TERMINAL",0,0,"9",,terminal_output +872,815073,"TERMINAL",0,0,"1:00",,terminal_output +873,816050,"TERMINAL",0,0,"1",,terminal_output +874,817311,"TERMINAL",0,0,"2",,terminal_output +875,818143,"TERMINAL",0,0,"3",,terminal_output +876,819160,"TERMINAL",0,0,"4",,terminal_output +877,820126,"TERMINAL",0,0,"5",,terminal_output +878,821225,"TERMINAL",0,0,"6",,terminal_output +879,822233,"TERMINAL",0,0,"7",,terminal_output +880,823203,"TERMINAL",0,0,"8",,terminal_output +881,824357,"TERMINAL",0,0,"9",,terminal_output +882,825241,"TERMINAL",0,0,"10",,terminal_output +883,826400,"TERMINAL",0,0,"1",,terminal_output +884,827279,"TERMINAL",0,0,"2",,terminal_output +885,828337,"TERMINAL",0,0,"3",,terminal_output +886,829321,"TERMINAL",0,0,"4",,terminal_output +887,830365,"TERMINAL",0,0,"5",,terminal_output +888,831361,"TERMINAL",0,0,"6",,terminal_output +889,832531,"TERMINAL",0,0,"7",,terminal_output +890,833406,"TERMINAL",0,0,"8",,terminal_output +891,834466,"TERMINAL",0,0,"9",,terminal_output +892,835485,"TERMINAL",0,0,"20",,terminal_output +893,836469,"TERMINAL",0,0,"1",,terminal_output +894,837595,"TERMINAL",0,0,"2",,terminal_output +895,838682,"TERMINAL",0,0,"4",,terminal_output +896,839604,"TERMINAL",0,0,"5",,terminal_output +897,840609,"TERMINAL",0,0,"6",,terminal_output +898,841575,"TERMINAL",0,0,"7",,terminal_output +899,842591,"TERMINAL",0,0,"8",,terminal_output +900,843630,"TERMINAL",0,0,"9",,terminal_output +901,844637,"TERMINAL",0,0,"30",,terminal_output +902,845807,"TERMINAL",0,0,"1",,terminal_output +903,846877,"TERMINAL",0,0,"2",,terminal_output +904,847728,"TERMINAL",0,0,"3",,terminal_output +905,848713,"TERMINAL",0,0,"4",,terminal_output +906,849735,"TERMINAL",0,0,"5",,terminal_output +907,850752,"TERMINAL",0,0,"6",,terminal_output +908,851819,"TERMINAL",0,0,"7",,terminal_output +909,853145,"TERMINAL",0,0,"8",,terminal_output +910,854261,"TERMINAL",0,0,"9",,terminal_output +911,854834,"TERMINAL",0,0,"40",,terminal_output +912,856104,"TERMINAL",0,0,"1",,terminal_output +913,857271,"TERMINAL",0,0,"2",,terminal_output +914,858265,"TERMINAL",0,0,"3",,terminal_output +915,859487,"TERMINAL",0,0,"4",,terminal_output +916,860501,"TERMINAL",0,0,"5",,terminal_output +917,860962,"TERMINAL",0,0,"6",,terminal_output +918,862081,"TERMINAL",0,0,"7",,terminal_output +919,863190,"TERMINAL",0,0,"8",,terminal_output +920,864243,"TERMINAL",0,0,"9",,terminal_output +921,865282,"TERMINAL",0,0,"50",,terminal_output +922,866318,"TERMINAL",0,0,"1",,terminal_output +923,867180,"TERMINAL",0,0,"2",,terminal_output +924,868193,"TERMINAL",0,0,"3",,terminal_output +925,869220,"TERMINAL",0,0,"4",,terminal_output +926,870390,"TERMINAL",0,0,"5",,terminal_output +927,871220,"TERMINAL",0,0,"6",,terminal_output +928,872349,"TERMINAL",0,0,"7",,terminal_output +929,873335,"TERMINAL",0,0,"8",,terminal_output +930,874301,"TERMINAL",0,0,"9",,terminal_output +931,875291,"TERMINAL",0,0,"2:00",,terminal_output +932,876330,"TERMINAL",0,0,"1",,terminal_output +933,877474,"TERMINAL",0,0,"2",,terminal_output +934,878348,"TERMINAL",0,0,"3",,terminal_output +935,879384,"TERMINAL",0,0,"4",,terminal_output +936,880395,"TERMINAL",0,0,"5",,terminal_output +937,881427,"TERMINAL",0,0,"6",,terminal_output +938,882436,"TERMINAL",0,0,"7",,terminal_output +939,883787,"TERMINAL",0,0,"8",,terminal_output +940,885145,"TERMINAL",0,0,"9",,terminal_output +941,886206,"TERMINAL",0,0,"10",,terminal_output +942,886923,"TERMINAL",0,0,"2",,terminal_output +943,887716,"TERMINAL",0,0,"3",,terminal_output +944,889242,"TERMINAL",0,0,"4",,terminal_output +945,889894,"TERMINAL",0,0,"5",,terminal_output +946,890786,"TERMINAL",0,0,"6",,terminal_output +947,892230,"TERMINAL",0,0,"7",,terminal_output +948,892884,"TERMINAL",0,0,"8",,terminal_output +949,893734,"TERMINAL",0,0,"9",,terminal_output +950,894884,"TERMINAL",0,0,"20",,terminal_output +951,895730,"TERMINAL",0,0,"1",,terminal_output +952,896755,"TERMINAL",0,0,"2",,terminal_output +953,897839,"TERMINAL",0,0,"3",,terminal_output +954,898804,"TERMINAL",0,0,"4",,terminal_output +955,899844,"TERMINAL",0,0,"5",,terminal_output +956,900850,"TERMINAL",0,0,"6",,terminal_output +957,901861,"TERMINAL",0,0,"7",,terminal_output +958,902866,"TERMINAL",0,0,"8",,terminal_output +959,903878,"TERMINAL",0,0,"9",,terminal_output +960,904907,"TERMINAL",0,0,"30",,terminal_output +961,905929,"TERMINAL",0,0,"1",,terminal_output +962,906980,"TERMINAL",0,0,"2",,terminal_output +963,907992,"TERMINAL",0,0,"3",,terminal_output +964,908988,"TERMINAL",0,0,"4",,terminal_output +965,910051,"TERMINAL",0,0,"5",,terminal_output +966,911033,"TERMINAL",0,0,"6",,terminal_output +967,912062,"TERMINAL",0,0,"7",,terminal_output +968,913081,"TERMINAL",0,0,"8",,terminal_output +969,914104,"TERMINAL",0,0,"9",,terminal_output +970,915179,"TERMINAL",0,0,"40",,terminal_output +971,916128,"TERMINAL",0,0,"1",,terminal_output +972,917148,"TERMINAL",0,0,"2",,terminal_output +973,918176,"TERMINAL",0,0,"3",,terminal_output +974,919197,"TERMINAL",0,0,"4",,terminal_output +975,920211,"TERMINAL",0,0,"5",,terminal_output +976,921233,"TERMINAL",0,0,"6",,terminal_output +977,922255,"TERMINAL",0,0,"7",,terminal_output +978,923359,"TERMINAL",0,0,"8",,terminal_output +979,924439,"TERMINAL",0,0,"9",,terminal_output +980,925377,"TERMINAL",0,0,"50",,terminal_output +981,926363,"TERMINAL",0,0,"1",,terminal_output +982,927411,"TERMINAL",0,0,"2",,terminal_output +983,928587,"TERMINAL",0,0,"3",,terminal_output +984,929434,"TERMINAL",0,0,"4",,terminal_output +985,930444,"TERMINAL",0,0,"5",,terminal_output +986,931470,"TERMINAL",0,0,"6",,terminal_output +987,932486,"TERMINAL",0,0,"7",,terminal_output +988,933509,"TERMINAL",0,0,"9",,terminal_output +989,934580,"TERMINAL",0,0,"3:00",,terminal_output +990,935604,"TERMINAL",0,0,"1",,terminal_output +991,936580,"TERMINAL",0,0,"2",,terminal_output +992,937621,"TERMINAL",0,0,"3",,terminal_output +993,938624,"TERMINAL",0,0,"4",,terminal_output +994,939642,"TERMINAL",0,0,"5",,terminal_output +995,940662,"TERMINAL",0,0,"6",,terminal_output +996,941728,"TERMINAL",0,0,"7",,terminal_output +997,942724,"TERMINAL",0,0,"8",,terminal_output +998,943749,"TERMINAL",0,0,"9",,terminal_output +999,944767,"TERMINAL",0,0,"10",,terminal_output +1000,945792,"TERMINAL",0,0,"1",,terminal_output +1001,946814,"TERMINAL",0,0,"2",,terminal_output +1002,947852,"TERMINAL",0,0,"3",,terminal_output +1003,948885,"TERMINAL",0,0,"4",,terminal_output +1004,949911,"TERMINAL",0,0,"5",,terminal_output +1005,950898,"TERMINAL",0,0,"6",,terminal_output +1006,951960,"TERMINAL",0,0,"7",,terminal_output +1007,952949,"TERMINAL",0,0,"8",,terminal_output +1008,954015,"TERMINAL",0,0,"9",,terminal_output +1009,954982,"TERMINAL",0,0,"20",,terminal_output +1010,956008,"TERMINAL",0,0,"1",,terminal_output +1011,957154,"TERMINAL",0,0,"2",,terminal_output +1012,958032,"TERMINAL",0,0,"3",,terminal_output +1013,959056,"TERMINAL",0,0,"4",,terminal_output +1014,960092,"TERMINAL",0,0,"5",,terminal_output +1015,961116,"TERMINAL",0,0,"6",,terminal_output +1016,962122,"TERMINAL",0,0,"7",,terminal_output +1017,963176,"TERMINAL",0,0,"8",,terminal_output +1018,964164,"TERMINAL",0,0,"9",,terminal_output +1019,965345,"TERMINAL",0,0,"30",,terminal_output +1020,966480,"TERMINAL",0,0,"1",,terminal_output +1021,967367,"TERMINAL",0,0,"2",,terminal_output +1022,968304,"TERMINAL",0,0,"3",,terminal_output +1023,969319,"TERMINAL",0,0,"4",,terminal_output +1024,970500,"TERMINAL",0,0,"5",,terminal_output +1025,971474,"TERMINAL",0,0,"6",,terminal_output +1026,972493,"TERMINAL",0,0,"7",,terminal_output +1027,973417,"TERMINAL",0,0,"8",,terminal_output +1028,974445,"TERMINAL",0,0,"9",,terminal_output +1029,975725,"TERMINAL",0,0,"40",,terminal_output +1030,976548,"TERMINAL",0,0,"1",,terminal_output +1031,977618,"TERMINAL",0,0,"2",,terminal_output +1032,978642,"TERMINAL",0,0,"4",,terminal_output +1033,979834,"TERMINAL",0,0,"5",,terminal_output +1034,981196,"TERMINAL",0,0,"6",,terminal_output +1035,982152,"TERMINAL",0,0,"7",,terminal_output +1036,983479,"TERMINAL",0,0,"8",,terminal_output +1037,984216,"TERMINAL",0,0,"9",,terminal_output +1038,985314,"TERMINAL",0,0,"50",,terminal_output +1039,986304,"TERMINAL",0,0,"1",,terminal_output +1040,987499,"TERMINAL",0,0,"2",,terminal_output +1041,988161,"TERMINAL",0,0,"3",,terminal_output +1042,988893,"TERMINAL",0,0,"4",,terminal_output +1043,990152,"TERMINAL",0,0,"5",,terminal_output +1044,990897,"TERMINAL",0,0,"6",,terminal_output +1045,992028,"TERMINAL",0,0,"7",,terminal_output +1046,992824,"TERMINAL",0,0,"8",,terminal_output +1047,993961,"TERMINAL",0,0,"9",,terminal_output +1048,994875,"TERMINAL",0,0,"4:00",,terminal_output +1049,995890,"TERMINAL",0,0,"1",,terminal_output +1050,996925,"TERMINAL",0,0,"2",,terminal_output +1051,997995,"TERMINAL",0,0,"3",,terminal_output +1052,998960,"TERMINAL",0,0,"4",,terminal_output +1053,999993,"TERMINAL",0,0,"5",,terminal_output +1054,1001006,"TERMINAL",0,0,"6",,terminal_output +1055,1002071,"TERMINAL",0,0,"7",,terminal_output +1056,1003121,"TERMINAL",0,0,"8",,terminal_output +1057,1004060,"TERMINAL",0,0,"9",,terminal_output +1058,1005081,"TERMINAL",0,0,"10",,terminal_output +1059,1006113,"TERMINAL",0,0,"1",,terminal_output +1060,1007168,"TERMINAL",0,0,"2",,terminal_output +1061,1008139,"TERMINAL",0,0,"3",,terminal_output +1062,1009164,"TERMINAL",0,0,"4",,terminal_output +1063,1010219,"TERMINAL",0,0,"5",,terminal_output +1064,1011274,"TERMINAL",0,0,"6",,terminal_output +1065,1012225,"TERMINAL",0,0,"7",,terminal_output +1066,1013276,"TERMINAL",0,0,"8",,terminal_output +1067,1014264,"TERMINAL",0,0,"9",,terminal_output +1068,1015417,"TERMINAL",0,0,"20",,terminal_output +1069,1016344,"TERMINAL",0,0,"1",,terminal_output +1070,1017324,"TERMINAL",0,0,"2",,terminal_output +1071,1018349,"TERMINAL",0,0,"3",,terminal_output +1072,1019409,"TERMINAL",0,0,"4",,terminal_output +1073,1020426,"TERMINAL",0,0,"5",,terminal_output +1074,1021410,"TERMINAL",0,0,"6",,terminal_output +1075,1022442,"TERMINAL",0,0,"7",,terminal_output +1076,1023490,"TERMINAL",0,0,"8",,terminal_output +1077,1024475,"TERMINAL",0,0,"9",,terminal_output +1078,1025497,"TERMINAL",0,0,"30",,terminal_output +1079,1026525,"TERMINAL",0,0,"2",,terminal_output +1080,1027547,"TERMINAL",0,0,"3",,terminal_output +1081,1028893,"TERMINAL",0,0,"4",,terminal_output +1082,1029799,"TERMINAL",0,0,"5",,terminal_output +1083,1030758,"TERMINAL",0,0,"6",,terminal_output +1084,1031674,"TERMINAL",0,0,"7",,terminal_output +1085,1032681,"TERMINAL",0,0,"8",,terminal_output +1086,1033733,"TERMINAL",0,0,"9",,terminal_output +1087,1034699,"TERMINAL",0,0,"40",,terminal_output +1088,1035797,"TERMINAL",0,0,"1",,terminal_output +1089,1036774,"TERMINAL",0,0,"2",,terminal_output +1090,1037868,"TERMINAL",0,0,"3",,terminal_output +1091,1038733,"jasmine/train_tokenizer_appendix-c.py",12867,0,"",python,selection_mouse +1092,1038820,"TERMINAL",0,0,"4",,terminal_output +1093,1039171,"jasmine/train_tokenizer_appendix-c.py",12840,0,"",python,selection_mouse +1094,1039832,"TERMINAL",0,0,"5",,terminal_output +1095,1040209,"jasmine/train_tokenizer_appendix-c.py",12820,30," @nnx.jit(donate_argnums=0)",python,selection_command +1096,1040463,"jasmine/train_tokenizer_appendix-c.py",12820,50," @nnx.jit(donate_argnums=0)\n def train_step(",python,selection_command +1097,1040961,"jasmine/train_tokenizer_appendix-c.py",12820,105," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict",python,selection_command +1098,1040980,"TERMINAL",0,0,"6",,terminal_output +1099,1040983,"jasmine/train_tokenizer_appendix-c.py",12820,149," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:",python,selection_command +1100,1041014,"jasmine/train_tokenizer_appendix-c.py",12820,237," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:",python,selection_command +1101,1041046,"jasmine/train_tokenizer_appendix-c.py",12820,263," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()",python,selection_command +1102,1041070,"jasmine/train_tokenizer_appendix-c.py",12820,330," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)",python,selection_command +1103,1041117,"jasmine/train_tokenizer_appendix-c.py",12820,331," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n",python,selection_command +1104,1041131,"jasmine/train_tokenizer_appendix-c.py",12820,416," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(",python,selection_command +1105,1041171,"jasmine/train_tokenizer_appendix-c.py",12820,444," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model",python,selection_command +1106,1041206,"jasmine/train_tokenizer_appendix-c.py",12820,454," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )",python,selection_command +1107,1041248,"jasmine/train_tokenizer_appendix-c.py",12820,486," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)",python,selection_command +1108,1041270,"jasmine/train_tokenizer_appendix-c.py",12820,517," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:",python,selection_command +1109,1041293,"jasmine/train_tokenizer_appendix-c.py",12820,579," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(",python,selection_command +1110,1041348,"jasmine/train_tokenizer_appendix-c.py",12820,641," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]",python,selection_command +1111,1041360,"jasmine/train_tokenizer_appendix-c.py",12820,655," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )",python,selection_command +1112,1041385,"jasmine/train_tokenizer_appendix-c.py",12820,712," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(",python,selection_command +1113,1041443,"jasmine/train_tokenizer_appendix-c.py",12820,769," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]",python,selection_command +1114,1041447,"jasmine/train_tokenizer_appendix-c.py",12820,783," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )",python,selection_command +1115,1041554,"jasmine/train_tokenizer_appendix-c.py",12820,845," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(",python,selection_command +1116,1041730,"jasmine/train_tokenizer_appendix-c.py",12820,907," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]",python,selection_command +1117,1041853,"jasmine/train_tokenizer_appendix-c.py",12820,921," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )",python,selection_command +1118,1041875,"TERMINAL",0,0,"7",,terminal_output +1119,1042023,"jasmine/train_tokenizer_appendix-c.py",12820,957," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics",python,selection_command +1120,1042200,"jasmine/train_tokenizer_appendix-c.py",12820,958,"",python,content +1121,1042913,"TERMINAL",0,0,"8",,terminal_output +1122,1043083,"jasmine/train_tokenizer_appendix-c.py",12820,1,"",python,content +1123,1043084,"jasmine/train_tokenizer_appendix-c.py",12824,0,"",python,selection_command +1124,1043939,"TERMINAL",0,0,"9",,terminal_output +1125,1044952,"TERMINAL",0,0,"50",,terminal_output +1126,1046043,"TERMINAL",0,0,"1",,terminal_output +1127,1047229,"TERMINAL",0,0,"2",,terminal_output +1128,1048295,"TERMINAL",0,0,"3",,terminal_output +1129,1049091,"TERMINAL",0,0,"4",,terminal_output +1130,1050316,"TERMINAL",0,0,"5",,terminal_output +1131,1051263,"TERMINAL",0,0,"6",,terminal_output +1132,1052475,"TERMINAL",0,0,"7",,terminal_output +1133,1053152,"TERMINAL",0,0,"8",,terminal_output +1134,1054333,"TERMINAL",0,0,"9",,terminal_output +1135,1055606,"TERMINAL",0,0,"5:00",,terminal_output +1136,1056292,"TERMINAL",0,0,"1",,terminal_output +1137,1057181,"TERMINAL",0,0,"2",,terminal_output +1138,1058407,"TERMINAL",0,0,"3",,terminal_output +1139,1059290,"TERMINAL",0,0,"4",,terminal_output +1140,1059934,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +1141,1060374,"TERMINAL",0,0,"5",,terminal_output +1142,1061384,"TERMINAL",0,0,"6",,terminal_output +1143,1062580,"TERMINAL",0,0,"7",,terminal_output +1144,1063261,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1145,1063391,"TERMINAL",0,0,"8",,terminal_output +1146,1064420,"TERMINAL",0,0,"9",,terminal_output +1147,1065362,"TERMINAL",0,0,"10",,terminal_output +1148,1065607,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_mouse +1149,1065626,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,0,"",shellscript,selection_command +1150,1065787,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,1,"d",shellscript,selection_mouse +1151,1065788,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_command +1152,1065830,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2571,16,"\nwait $child_pid",shellscript,selection_mouse +1153,1065853,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2023,564," --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1154,1065869,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1934,653," $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1155,1065890,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1718,869,"\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1156,1065937,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1383,1204,"array_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1157,1065938,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1151,1436,"cat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1158,1065954,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1077,1510,"else\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1159,1065986,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",835,1752,"\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1160,1066026,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",529,2058,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1161,1066027,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",123,2464,"#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1162,1066028,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",0,2587,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1163,1066250,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",170,2417,"#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1164,1066269,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",833,1754,"}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1165,1066300,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",861,1726,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1166,1066335,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1082,1505," restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1167,1066336,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,1460,"# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1168,1066337,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1151,1436,"cat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1169,1066356,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1383,1204,"array_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1170,1066372,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1856,731,"\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1171,1066403,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2022,565,"\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1172,1066404,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2204,383,"\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1173,1066405,"TERMINAL",0,0,"1",,terminal_output +1174,1066452,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2471,116," --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1175,1066453,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_mouse +1176,1066453,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,0,"",shellscript,selection_command +1177,1066453,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,1,"d",shellscript,selection_mouse +1178,1066455,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_command +1179,1066673,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2572,15,"wait $child_pid",shellscript,selection_mouse +1180,1067070,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_mouse +1181,1067070,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,0,"",shellscript,selection_command +1182,1067119,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2586,1,"d",shellscript,selection_mouse +1183,1067120,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2587,0,"",shellscript,selection_command +1184,1067355,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2398,189," --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1185,1067355,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",2135,452," --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1186,1067378,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1934,653," $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1187,1067379,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1718,869,"\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1188,1067381,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1246,1341,"array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1189,1067387,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,1460,"# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1190,1067399,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1035,1552," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1191,1067417,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",835,1752,"\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1192,1067434,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",581,2006,"requeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1193,1067450,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",294,2293,"#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1194,1067466,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",21,2566,"#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1195,1067484,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",0,2587,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1196,1067661,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",20,2567,"\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1197,1067664,"TERMINAL",0,0,"2",,terminal_output +1198,1067680,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",170,2417,"#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1199,1067697,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",477,2110,"#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1200,1067713,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",597,1990," echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1201,1067730,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",787,1800," scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1202,1067871,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",835,1752,"\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1203,1067872,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",861,1726,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1204,1067873,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",862,1725,"# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1205,1067873,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",907,1680,"restart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1206,1068018,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1000,1587,"\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1207,1068024,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1001,1586,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1208,1068039,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1035,1552," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1209,1068056,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1077,1510,"else\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1210,1068113,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1082,1505," restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1211,1068197,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1077,1510,"else\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1212,1068237,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1035,1552," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1213,1068287,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1001,1586,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1214,1068297,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1000,1587,"\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1215,1068348,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1001,1586,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1216,1068390,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1077,1510,"else\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1217,1068398,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1082,1505," restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1218,1068467,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1121,1466,"fi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1219,1068474,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1124,1463,"\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1220,1068559,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1125,1462,"\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1221,1068594,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1126,1461,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1222,1068634,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,1460,"# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +1223,1068671,"TERMINAL",0,0,"3",,terminal_output +1224,1069623,"TERMINAL",0,0,"4",,terminal_output +1225,1070424,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1569,0,"",shellscript,selection_mouse +1226,1070789,"TERMINAL",0,0,"5",,terminal_output +1227,1071940,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1134,0,"",shellscript,selection_mouse +1228,1072072,"TERMINAL",0,0,"6",,terminal_output +1229,1072824,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,23,"# Log the sbatch script",shellscript,selection_command +1230,1073040,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,1460,"# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_command +1231,1073108,"TERMINAL",0,0,"7",,terminal_output +1232,1073778,"TERMINAL",0,0,"8",,terminal_output +1233,1073795,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh",1127,0,"",shellscript,selection_command +1234,1074538,"TERMINAL",0,0,"20",,terminal_output +1235,1075324,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +1236,1075556,"TERMINAL",0,0,"1",,terminal_output +1237,1076252,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --max_noise_level=0 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,content +1238,1076262,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1,0,"",shellscript,selection_command +1239,1076595,"TERMINAL",0,0,"2",,terminal_output +1240,1077502,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1446,0,"",shellscript,selection_command +1241,1077710,"TERMINAL",0,0,"3",,terminal_output +1242,1077866,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1445,0,"",shellscript,selection_command +1243,1078122,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1446,0,"",shellscript,selection_command +1244,1078490,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1444,17,"",shellscript,content +1245,1078504,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1432,0,"",shellscript,selection_command +1246,1078675,"TERMINAL",0,0,"4",,terminal_output +1247,1079156,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1431,13,"",shellscript,content +1248,1079242,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1387,0,"",shellscript,selection_command +1249,1079668,"TERMINAL",0,0,"5",,terminal_output +1250,1079697,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1430,0,"",shellscript,selection_command +1251,1079909,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1429,1,"",shellscript,content +1252,1080211,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1428,0,"",shellscript,selection_command +1253,1080721,"TERMINAL",0,0,"6",,terminal_output +1254,1081335,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1385,0,"",shellscript,selection_command +1255,1081734,"TERMINAL",0,0,"7",,terminal_output +1256,1082716,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1343,0,"",shellscript,selection_command +1257,1082747,"TERMINAL",0,0,"8",,terminal_output +1258,1083217,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1313,0,"",shellscript,selection_command +1259,1083258,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1270,0,"",shellscript,selection_command +1260,1083305,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1248,0,"",shellscript,selection_command +1261,1083645,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1218,0,"",shellscript,selection_command +1262,1083781,"TERMINAL",0,0,"9",,terminal_output +1263,1084005,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1217,0,"",shellscript,selection_command +1264,1084508,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1216,0,"",shellscript,selection_command +1265,1084553,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1215,0,"",shellscript,selection_command +1266,1084594,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1214,0,"",shellscript,selection_command +1267,1084595,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1213,0,"",shellscript,selection_command +1268,1084751,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1212,0,"",shellscript,selection_command +1269,1084797,"TERMINAL",0,0,"30",,terminal_output +1270,1084992,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1213,0,"",shellscript,selection_command +1271,1085177,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1214,0,"",shellscript,selection_command +1272,1085447,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1213,0,"",shellscript,selection_command +1273,1085831,"TERMINAL",0,0,"1",,terminal_output +1274,1086854,"TERMINAL",0,0,"2",,terminal_output +1275,1087085,"jasmine/train_tokenizer_appendix-c.py",0,0,"",python,tab +1276,1087475,"jasmine/train_tokenizer_appendix-c.py",13252,0,"",python,selection_mouse +1277,1087478,"jasmine/train_tokenizer_appendix-c.py",13251,0,"",python,selection_command +1278,1087895,"TERMINAL",0,0,"3",,terminal_output +1279,1088927,"TERMINAL",0,0,"4",,terminal_output +1280,1089069,"jasmine/train_tokenizer_appendix-c.py",440,0,"",python,selection_command +1281,1090100,"TERMINAL",0,0,"5",,terminal_output +1282,1090163,"jasmine/train_tokenizer_appendix-c.py",433,13,"",python,content +1283,1091325,"TERMINAL",0,0,"6",,terminal_output +1284,1091594,"jasmine/train_tokenizer_appendix-c.py",2175,0,"",python,selection_command +1285,1092063,"TERMINAL",0,0,"7",,terminal_output +1286,1092502,"jasmine/train_tokenizer_appendix-c.py",2171,23,"",python,content +1287,1092600,"jasmine/train_tokenizer_appendix-c.py",9832,0,"",python,selection_command +1288,1093031,"TERMINAL",0,0,"8",,terminal_output +1289,1093435,"jasmine/train_tokenizer_appendix-c.py",2171,0," wandb_id: str = """"\n",python,content +1290,1093443,"jasmine/train_tokenizer_appendix-c.py",2175,0,"",python,selection_command +1291,1093759,"jasmine/train_tokenizer_appendix-c.py",9855,0,"",python,selection_command +1292,1093958,"jasmine/train_tokenizer_appendix-c.py",10096,0,"",python,selection_command +1293,1094051,"TERMINAL",0,0,"9",,terminal_output +1294,1094995,"jasmine/train_tokenizer_appendix-c.py",9855,0,"",python,selection_command +1295,1095231,"TERMINAL",0,0,"40",,terminal_output +1296,1096320,"TERMINAL",0,0,"1",,terminal_output +1297,1097318,"TERMINAL",0,0,"2",,terminal_output +1298,1098221,"TERMINAL",0,0,"3",,terminal_output +1299,1098557,"jasmine/train_tokenizer_appendix-c.py",440,0,"wandb\nimport ",python,content +1300,1098562,"jasmine/train_tokenizer_appendix-c.py",440,0,"",python,selection_command +1301,1099532,"TERMINAL",0,0,"4",,terminal_output +1302,1100763,"TERMINAL",0,0,"5",,terminal_output +1303,1101396,"TERMINAL",0,0,"6",,terminal_output +1304,1101925,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +1305,1102344,"TERMINAL",0,0,"7",,terminal_output +1306,1103294,"TERMINAL",0,0,"8",,terminal_output +1307,1103545,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1073,0,"",shellscript,selection_mouse +1308,1104446,"TERMINAL",0,0,"9",,terminal_output +1309,1105546,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1073,0,"n",shellscript,content +1310,1105547,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1074,0,"",shellscript,selection_keyboard +1311,1105603,"TERMINAL",0,0,"50",,terminal_output +1312,1105710,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1074,0,"o",shellscript,content +1313,1105711,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1075,0,"",shellscript,selection_keyboard +1314,1105915,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1075,0,"-",shellscript,content +1315,1105916,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1076,0,"",shellscript,selection_keyboard +1316,1106349,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1075,0,"",shellscript,selection_command +1317,1106517,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1090,0,"",shellscript,selection_command +1318,1106866,"TERMINAL",0,0,"1",,terminal_output +1319,1106991,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1082,37,"",shellscript,content +1320,1106999,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1086,0,"",shellscript,selection_command +1321,1107272,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1147,0,"",shellscript,selection_command +1322,1107378,"TERMINAL",0,0,"2",,terminal_output +1323,1107542,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1086,0,"",shellscript,selection_command +1324,1108598,"TERMINAL",0,0,"3",,terminal_output +1325,1108971,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1082,108,"",shellscript,content +1326,1108982,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1086,0,"",shellscript,selection_command +1327,1109556,"TERMINAL",0,0,"4",,terminal_output +1328,1109846,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1082,48,"",shellscript,content +1329,1109853,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1086,0,"",shellscript,selection_command +1330,1110489,"TERMINAL",0,0,"5",,terminal_output +1331,1111533,"TERMINAL",0,0,"6",,terminal_output +1332,1112124,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1071,0,"",shellscript,selection_command +1333,1112781,"TERMINAL",0,0,"7",,terminal_output +1334,1113253,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1039,0,"",shellscript,selection_command +1335,1113814,"TERMINAL",0,0,"8",,terminal_output +1336,1114722,"TERMINAL",0,0,"9",,terminal_output +1337,1115773,"TERMINAL",0,0,"6:01",,terminal_output +1338,1116689,"TERMINAL",0,0,"2",,terminal_output +1339,1117855,"TERMINAL",0,0,"3",,terminal_output +1340,1118996,"TERMINAL",0,0,"4",,terminal_output +1341,1119698,"TERMINAL",0,0,"5",,terminal_output +1342,1120681,"TERMINAL",0,0,"6",,terminal_output +1343,1121724,"TERMINAL",0,0,"7",,terminal_output +1344,1122994,"TERMINAL",0,0,"8",,terminal_output +1345,1124133,"TERMINAL",0,0,"9",,terminal_output +1346,1125085,"TERMINAL",0,0,"10",,terminal_output +1347,1126125,"TERMINAL",0,0,"1",,terminal_output +1348,1126838,"TERMINAL",0,0,"2",,terminal_output +1349,1127933,"TERMINAL",0,0,"3",,terminal_output +1350,1127981,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1009,58,"",shellscript,content +1351,1127987,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1013,0,"",shellscript,selection_command +1352,1128360,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",993,0,"",shellscript,selection_command +1353,1129052,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",971,0,"",shellscript,selection_command +1354,1129054,"TERMINAL",0,0,"4",,terminal_output +1355,1129261,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",948,0,"",shellscript,selection_command +1356,1129436,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",924,0,"",shellscript,selection_command +1357,1129857,"TERMINAL",0,0,"5",,terminal_output +1358,1130854,"TERMINAL",0,0,"6",,terminal_output +1359,1131185,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",914,0,"",shellscript,selection_mouse +1360,1131874,"TERMINAL",0,0,"7",,terminal_output +1361,1132347,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",881,0,"",shellscript,selection_command +1362,1132503,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",850,0,"",shellscript,selection_command +1363,1132672,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,0,"",shellscript,selection_command +1364,1132896,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",850,0,"",shellscript,selection_command +1365,1133010,"TERMINAL",0,0,"8",,terminal_output +1366,1133092,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",881,0,"",shellscript,selection_command +1367,1133304,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",850,0,"",shellscript,selection_command +1368,1133483,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,0,"",shellscript,selection_command +1369,1133696,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",806,0,"",shellscript,selection_command +1370,1133940,"TERMINAL",0,0,"9",,terminal_output +1371,1134014,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,0,"",shellscript,selection_command +1372,1135002,"TERMINAL",0,0,"20",,terminal_output +1373,1135140,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",808,25,"",shellscript,content +1374,1135151,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",812,0,"",shellscript,selection_command +1375,1135345,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",794,0,"",shellscript,selection_command +1376,1135837,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",754,0,"",shellscript,selection_command +1377,1135864,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",749,0,"",shellscript,selection_command +1378,1135898,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",736,0,"",shellscript,selection_command +1379,1136056,"TERMINAL",0,0,"1",,terminal_output +1380,1137148,"TERMINAL",0,0,"2",,terminal_output +1381,1138064,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",629,0,"",shellscript,selection_mouse +1382,1138135,"TERMINAL",0,0,"3",,terminal_output +1383,1138215,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",623,9,"workspace",shellscript,selection_mouse +1384,1138950,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,0,"",shellscript,selection_mouse +1385,1139085,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,2,"/h",shellscript,selection_mouse +1386,1139123,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,3,"/hk",shellscript,selection_mouse +1387,1139124,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,10,"/hkfs/work",shellscript,selection_mouse +1388,1139149,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,16,"/hkfs/work/works",shellscript,selection_mouse +1389,1139179,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,20,"/hkfs/work/workspace",shellscript,selection_mouse +1390,1139180,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,24,"/hkfs/work/workspace/scr",shellscript,selection_mouse +1391,1139181,"TERMINAL",0,0,"4",,terminal_output +1392,1139217,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,29,"/hkfs/work/workspace/scratch/",shellscript,selection_mouse +1393,1139218,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,42,"/hkfs/work/workspace/scratch/tum_ind3695-j",shellscript,selection_mouse +1394,1139244,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,50,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_s",shellscript,selection_mouse +1395,1139245,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,59,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/che",shellscript,selection_mouse +1396,1139291,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,63,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpo",shellscript,selection_mouse +1397,1139291,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,66,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoint",shellscript,selection_mouse +1398,1139311,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,70,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/bi",shellscript,selection_mouse +1399,1139312,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",592,20,"\ntokenizer_ckpt_dir=",shellscript,selection_mouse +1400,1139695,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",592,0,"",shellscript,selection_mouse +1401,1140184,"TERMINAL",0,0,"5",,terminal_output +1402,1141488,"TERMINAL",0,0,"6",,terminal_output +1403,1142326,"TERMINAL",0,0,"7",,terminal_output +1404,1143249,"TERMINAL",0,0,"8",,terminal_output +1405,1144267,"TERMINAL",0,0,"9",,terminal_output +1406,1145286,"TERMINAL",0,0,"30",,terminal_output +1407,1146278,"TERMINAL",0,0,"1",,terminal_output +1408,1147338,"TERMINAL",0,0,"2",,terminal_output +1409,1148315,"TERMINAL",0,0,"3",,terminal_output +1410,1149373,"TERMINAL",0,0,"4",,terminal_output +1411,1150370,"TERMINAL",0,0,"5",,terminal_output +1412,1151392,"TERMINAL",0,0,"6",,terminal_output +1413,1152399,"TERMINAL",0,0,"7",,terminal_output +1414,1153421,"TERMINAL",0,0,"8",,terminal_output +1415,1154448,"TERMINAL",0,0,"9",,terminal_output +1416,1155480,"TERMINAL",0,0,"40",,terminal_output +1417,1156507,"TERMINAL",0,0,"1",,terminal_output +1418,1157520,"TERMINAL",0,0,"3",,terminal_output +1419,1158558,"TERMINAL",0,0,"4",,terminal_output +1420,1159551,"TERMINAL",0,0,"5",,terminal_output +1421,1160764,"TERMINAL",0,0,"6",,terminal_output +1422,1161632,"TERMINAL",0,0,"7",,terminal_output +1423,1162641,"TERMINAL",0,0,"8",,terminal_output +1424,1163684,"TERMINAL",0,0,"9",,terminal_output +1425,1164859,"TERMINAL",0,0,"50",,terminal_output +1426,1165709,"TERMINAL",0,0,"1",,terminal_output +1427,1166724,"TERMINAL",0,0,"2",,terminal_output +1428,1167730,"TERMINAL",0,0,"3",,terminal_output +1429,1168751,"TERMINAL",0,0,"4",,terminal_output +1430,1169773,"TERMINAL",0,0,"5",,terminal_output +1431,1170798,"TERMINAL",0,0,"6",,terminal_output +1432,1171817,"TERMINAL",0,0,"7",,terminal_output +1433,1172837,"TERMINAL",0,0,"8",,terminal_output +1434,1173854,"TERMINAL",0,0,"9",,terminal_output +1435,1174886,"TERMINAL",0,0,"7:00",,terminal_output +1436,1175898,"TERMINAL",0,0,"1",,terminal_output +1437,1176950,"TERMINAL",0,0,"2",,terminal_output +1438,1177982,"TERMINAL",0,0,"3",,terminal_output +1439,1178978,"TERMINAL",0,0,"4",,terminal_output +1440,1180000,"TERMINAL",0,0,"5",,terminal_output +1441,1181024,"TERMINAL",0,0,"6",,terminal_output +1442,1182042,"TERMINAL",0,0,"7",,terminal_output +1443,1183072,"TERMINAL",0,0,"8",,terminal_output +1444,1184110,"TERMINAL",0,0,"9",,terminal_output +1445,1185112,"TERMINAL",0,0,"10",,terminal_output +1446,1186136,"TERMINAL",0,0,"1",,terminal_output +1447,1187160,"TERMINAL",0,0,"2",,terminal_output +1448,1187958,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",894,0,"",shellscript,selection_mouse +1449,1187968,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",893,0,"",shellscript,selection_command +1450,1188185,"TERMINAL",0,0,"3",,terminal_output +1451,1188590,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",983,0,"",shellscript,selection_mouse +1452,1188591,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",982,0,"",shellscript,selection_command +1453,1189179,"TERMINAL",0,0,"4",,terminal_output +1454,1189288,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",918,0,"",shellscript,selection_mouse +1455,1189289,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",917,0,"",shellscript,selection_command +1456,1189772,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",963,0,"",shellscript,selection_mouse +1457,1189773,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",962,0,"",shellscript,selection_command +1458,1190243,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",983,0,"",shellscript,selection_mouse +1459,1190247,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",982,0,"",shellscript,selection_command +1460,1190402,"TERMINAL",0,0,"5",,terminal_output +1461,1191304,"TERMINAL",0,0,"6",,terminal_output +1462,1192346,"TERMINAL",0,0,"7",,terminal_output +1463,1193321,"TERMINAL",0,0,"8",,terminal_output +1464,1194296,"TERMINAL",0,0,"9",,terminal_output +1465,1195324,"TERMINAL",0,0,"20",,terminal_output +1466,1196372,"TERMINAL",0,0,"1",,terminal_output +1467,1197353,"TERMINAL",0,0,"2",,terminal_output +1468,1197983,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",772,0,"",shellscript,selection_mouse +1469,1198368,"TERMINAL",0,0,"3",,terminal_output +1470,1198898,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",770,0,"",shellscript,selection_mouse +1471,1199465,"TERMINAL",0,0,"4",,terminal_output +1472,1199514,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",771,0,"",shellscript,selection_command +1473,1199998,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",772,0,"",shellscript,selection_command +1474,1200069,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",773,0,"",shellscript,selection_command +1475,1200069,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",774,0,"",shellscript,selection_command +1476,1200092,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",775,0,"",shellscript,selection_command +1477,1200115,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",776,0,"",shellscript,selection_command +1478,1200170,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",777,0,"",shellscript,selection_command +1479,1200176,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",778,0,"",shellscript,selection_command +1480,1200215,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",779,0,"",shellscript,selection_command +1481,1200266,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",780,0,"",shellscript,selection_command +1482,1200267,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",781,0,"",shellscript,selection_command +1483,1200300,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",782,0,"",shellscript,selection_command +1484,1200324,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",783,0,"",shellscript,selection_command +1485,1200427,"TERMINAL",0,0,"5",,terminal_output +1486,1200477,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",784,0,"",shellscript,selection_command +1487,1201469,"TERMINAL",0,0,"6",,terminal_output +1488,1201901,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",784,0,"_",shellscript,content +1489,1201902,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",785,0,"",shellscript,selection_keyboard +1490,1202227,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",785,0,"a",shellscript,content +1491,1202228,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",786,0,"",shellscript,selection_keyboard +1492,1202488,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",786,0,"p",shellscript,content +1493,1202489,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",787,0,"",shellscript,selection_keyboard +1494,1202490,"TERMINAL",0,0,"7",,terminal_output +1495,1202588,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",787,0,"p",shellscript,content +1496,1202589,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",788,0,"",shellscript,selection_keyboard +1497,1202714,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",788,0,"e",shellscript,content +1498,1202715,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",789,0,"",shellscript,selection_keyboard +1499,1202878,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",789,0,"n",shellscript,content +1500,1202878,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",790,0,"",shellscript,selection_keyboard +1501,1203488,"TERMINAL",0,0,"8",,terminal_output +1502,1203722,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",790,0,"d",shellscript,content +1503,1203723,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",791,0,"",shellscript,selection_keyboard +1504,1203817,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",791,0,"i",shellscript,content +1505,1203818,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",792,0,"",shellscript,selection_keyboard +1506,1203955,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",792,0,"x",shellscript,content +1507,1203956,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",793,0,"",shellscript,selection_keyboard +1508,1204473,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",793,0,"-",shellscript,content +1509,1204474,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",794,0,"",shellscript,selection_keyboard +1510,1204498,"TERMINAL",0,0,"9",,terminal_output +1511,1204615,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",794,0,"c",shellscript,content +1512,1204616,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",795,0,"",shellscript,selection_keyboard +1513,1205515,"TERMINAL",0,0,"31",,terminal_output +1514,1206534,"TERMINAL",0,0,"2",,terminal_output +1515,1207612,"TERMINAL",0,0,"3",,terminal_output +1516,1208586,"TERMINAL",0,0,"4",,terminal_output +1517,1209601,"TERMINAL",0,0,"5",,terminal_output +1518,1210628,"TERMINAL",0,0,"6",,terminal_output +1519,1211645,"TERMINAL",0,0,"7",,terminal_output +1520,1212656,"TERMINAL",0,0,"8",,terminal_output +1521,1213684,"TERMINAL",0,0,"9",,terminal_output +1522,1214443,"TERMINAL",0,0,"s",,terminal_output +1523,1214595,"TERMINAL",0,0,"h",,terminal_output +1524,1214752,"TERMINAL",0,0," ",,terminal_output +1525,1214753,"TERMINAL",0,0,"40",,terminal_output +1526,1215049,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +1527,1215286,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +1528,1215497,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +1529,1215722,"TERMINAL",0,0,"1",,terminal_output +1530,1216759,"TERMINAL",0,0,"2",,terminal_output +1531,1216896,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1532,1216999,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/python: can't open file '/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py': [Errno 2] No such file or directory\r\nsrun: error: hkn0401: task 0: Exited with exit code 2\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1533,1217784,"TERMINAL",0,0,"3",,terminal_output +1534,1218804,"TERMINAL",0,0,"4",,terminal_output +1535,1219855,"TERMINAL",0,0,"5",,terminal_output +1536,1220843,"TERMINAL",0,0,"6",,terminal_output +1537,1221866,"TERMINAL",0,0,"7",,terminal_output +1538,1222886,"TERMINAL",0,0,"8",,terminal_output +1539,1223911,"TERMINAL",0,0,"9",,terminal_output +1540,1224932,"TERMINAL",0,0,"50",,terminal_output +1541,1225952,"TERMINAL",0,0,"1",,terminal_output +1542,1226965,"TERMINAL",0,0,"2",,terminal_output +1543,1227990,"TERMINAL",0,0,"3",,terminal_output +1544,1229017,"TERMINAL",0,0,"4",,terminal_output +1545,1230130,"TERMINAL",0,0,"5",,terminal_output +1546,1231051,"TERMINAL",0,0,"6",,terminal_output +1547,1232086,"TERMINAL",0,0,"7",,terminal_output +1548,1233176,"TERMINAL",0,0,"8",,terminal_output +1549,1234156,"TERMINAL",0,0,"9",,terminal_output +1550,1235182,"TERMINAL",0,0,"8:00",,terminal_output +1551,1236268,"jasmine/train_tokenizer_appendix-c.py",0,0,"",python,tab +1552,1236282,"TERMINAL",0,0,"1",,terminal_output +1553,1237233,"TERMINAL",0,0,"2",,terminal_output +1554,1238476,"TERMINAL",0,0,"3",,terminal_output +1555,1239262,"TERMINAL",0,0,"4",,terminal_output +1556,1240370,"TERMINAL",0,0,"5",,terminal_output +1557,1241294,"TERMINAL",0,0,"6",,terminal_output +1558,1242324,"TERMINAL",0,0,"7",,terminal_output +1559,1243450,"TERMINAL",0,0,"8",,terminal_output +1560,1244506,"TERMINAL",0,0,"9",,terminal_output +1561,1245078,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +1562,1245461,"TERMINAL",0,0,"10",,terminal_output +1563,1245777,"jasmine/train_dynamics.py",2026,0,"",python,selection_mouse +1564,1245778,"jasmine/train_dynamics.py",2025,0,"",python,selection_command +1565,1246503,"TERMINAL",0,0,"1",,terminal_output +1566,1247708,"TERMINAL",0,0,"2",,terminal_output +1567,1248538,"TERMINAL",0,0,"4",,terminal_output +1568,1249558,"TERMINAL",0,0,"5",,terminal_output +1569,1250552,"TERMINAL",0,0,"6",,terminal_output +1570,1251266,"jasmine/train_dynamics.py",31671,0,"",python,selection_mouse +1571,1251570,"TERMINAL",0,0,"7",,terminal_output +1572,1252138,"jasmine/train_dynamics.py",31537,0,"",python,selection_mouse +1573,1252138,"jasmine/train_dynamics.py",31536,0,"",python,selection_command +1574,1252591,"TERMINAL",0,0,"8",,terminal_output +1575,1252784,"jasmine/train_dynamics.py",31516,21," break",python,selection_command +1576,1253004,"jasmine/train_dynamics.py",31477,60," if step >= args.num_steps:\n break",python,selection_command +1577,1253503,"jasmine/train_dynamics.py",31419,118," print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1578,1253532,"jasmine/train_dynamics.py",31349,188," checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1579,1253575,"jasmine/train_dynamics.py",31327,210," )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1580,1253584,"jasmine/train_dynamics.py",31300,237," ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1581,1253621,"jasmine/train_dynamics.py",31241,296," train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1582,1253665,"jasmine/train_dynamics.py",31145,392," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1583,1253687,"jasmine/train_dynamics.py",31055,482," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1584,1253687,"TERMINAL",0,0,"9",,terminal_output +1585,1253729,"jasmine/train_dynamics.py",30995,542," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1586,1253760,"jasmine/train_dynamics.py",30973,564," else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1587,1253792,"jasmine/train_dynamics.py",30951,586," )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1588,1253822,"jasmine/train_dynamics.py",30924,613," ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1589,1253863,"jasmine/train_dynamics.py",30867,670," val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1590,1253863,"jasmine/train_dynamics.py",30773,764," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1591,1253884,"jasmine/train_dynamics.py",30746,791," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1592,1253916,"jasmine/train_dynamics.py",30687,850," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1593,1253955,"jasmine/train_dynamics.py",30591,946," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1594,1253971,"jasmine/train_dynamics.py",30501,1036," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1595,1254014,"jasmine/train_dynamics.py",30441,1096," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1596,1254057,"jasmine/train_dynamics.py",30408,1129," if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1597,1254109,"jasmine/train_dynamics.py",30353,1184," optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1598,1254111,"jasmine/train_dynamics.py",30299,1238," assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1599,1254123,"jasmine/train_dynamics.py",30223,1314," if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1600,1254164,"jasmine/train_dynamics.py",30187,1350," # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1601,1254252,"jasmine/train_dynamics.py",30141,1396," wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1602,1254259,"jasmine/train_dynamics.py",30107,1430," )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1603,1254260,"jasmine/train_dynamics.py",30069,1468," )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1604,1254300,"jasmine/train_dynamics.py",30026,1511," ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1605,1254305,"jasmine/train_dynamics.py",29980,1557," )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1606,1254349,"jasmine/train_dynamics.py",29913,1624," ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1607,1254368,"jasmine/train_dynamics.py",29829,1708," ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1608,1254400,"jasmine/train_dynamics.py",29768,1769," val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1609,1254458,"jasmine/train_dynamics.py",29712,1825," np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1610,1254458,"jasmine/train_dynamics.py",29636,1901," val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1611,1254519,"jasmine/train_dynamics.py",29593,1944," ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1612,1254519,"jasmine/train_dynamics.py",29547,1990," )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1613,1254581,"jasmine/train_dynamics.py",29497,2040," ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1614,1254581,"jasmine/train_dynamics.py",29428,2109," args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1615,1254634,"jasmine/train_dynamics.py",29345,2192," val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1616,1254637,"jasmine/train_dynamics.py",29289,2248," np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1617,1254697,"jasmine/train_dynamics.py",29221,2316," val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1618,1254697,"jasmine/train_dynamics.py",29179,2358," dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1619,1254698,"TERMINAL",0,0,"20",,terminal_output +1620,1254749,"jasmine/train_dynamics.py",29128,2409," log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1621,1254785,"jasmine/train_dynamics.py",29075,2462," if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1622,1254814,"jasmine/train_dynamics.py",29045,2492," )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1623,1254874,"jasmine/train_dynamics.py",29011,2526," )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1624,1254875,"jasmine/train_dynamics.py",28972,2565," ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1625,1254882,"jasmine/train_dynamics.py",28930,2607," )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1626,1254925,"jasmine/train_dynamics.py",28884,2653," )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1627,1254973,"jasmine/train_dynamics.py",28827,2710," np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1628,1255019,"jasmine/train_dynamics.py",28741,2796," val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1629,1255019,"jasmine/train_dynamics.py",28689,2848," np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1630,1255066,"jasmine/train_dynamics.py",28622,2915," val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1631,1255067,"jasmine/train_dynamics.py",28583,2954," ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1632,1255115,"jasmine/train_dynamics.py",28541,2996," )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1633,1255134,"jasmine/train_dynamics.py",28495,3042," ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1634,1255177,"jasmine/train_dynamics.py",28430,3107," args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1635,1255202,"jasmine/train_dynamics.py",28356,3181," val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1636,1255222,"jasmine/train_dynamics.py",28304,3233," np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1637,1255279,"jasmine/train_dynamics.py",28245,3292," val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1638,1255280,"jasmine/train_dynamics.py",28206,3331," ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1639,1255327,"jasmine/train_dynamics.py",28164,3373," )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1640,1255622,"jasmine/train_dynamics.py",28076,3461," val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1641,1255770,"TERMINAL",0,0,"1",,terminal_output +1642,1256126,"jasmine/train_dynamics.py",28024,3513," np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1643,1256158,"jasmine/train_dynamics.py",27965,3572," val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1644,1256207,"jasmine/train_dynamics.py",27927,3610," dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1645,1256221,"jasmine/train_dynamics.py",27880,3657," log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1646,1256283,"jasmine/train_dynamics.py",27840,3697," if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1647,1256304,"jasmine/train_dynamics.py",27814,3723," )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1648,1256305,"jasmine/train_dynamics.py",27783,3754," ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1649,1256384,"jasmine/train_dynamics.py",27707,3830," np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1650,1256385,"jasmine/train_dynamics.py",27652,3885," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1651,1256429,"jasmine/train_dynamics.py",27564,3973," recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1652,1256430,"jasmine/train_dynamics.py",27479,4058," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1653,1256470,"jasmine/train_dynamics.py",27436,4101," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1654,1256475,"jasmine/train_dynamics.py",27387,4150," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1655,1256519,"jasmine/train_dynamics.py",27310,4227," # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1656,1256567,"jasmine/train_dynamics.py",27232,4305," # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1657,1256568,"jasmine/train_dynamics.py",27152,4385," # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1658,1256592,"jasmine/train_dynamics.py",27122,4415," )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1659,1256656,"jasmine/train_dynamics.py",27088,4449," )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1660,1256657,"jasmine/train_dynamics.py",27028,4509," ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1661,1256697,"jasmine/train_dynamics.py",26940,4597," val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1662,1256736,"jasmine/train_dynamics.py",26890,4647," einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1663,1256810,"jasmine/train_dynamics.py",26813,4724," val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1664,1256811,"jasmine/train_dynamics.py",26783,4754," )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1665,1256813,"jasmine/train_dynamics.py",26749,4788," )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1666,1256813,"TERMINAL",0,0,"2",,terminal_output +1667,1256865,"jasmine/train_dynamics.py",26705,4832," axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1668,1256911,"jasmine/train_dynamics.py",26666,4871," ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1669,1256912,"jasmine/train_dynamics.py",26591,4946," val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1670,1256949,"jasmine/train_dynamics.py",26524,5013," val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1671,1256960,"jasmine/train_dynamics.py",26486,5051," (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1672,1256991,"jasmine/train_dynamics.py",26437,5100," jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1673,1257029,"jasmine/train_dynamics.py",26360,5177," val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1674,1257071,"jasmine/train_dynamics.py",26316,5221," ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1675,1257086,"jasmine/train_dynamics.py",26271,5266," ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1676,1257114,"jasmine/train_dynamics.py",26194,5343," val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1677,1257163,"jasmine/train_dynamics.py",26145,5392," if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1678,1257204,"jasmine/train_dynamics.py",26119,5418," )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1679,1257205,"jasmine/train_dynamics.py",26042,5495," val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1680,1257233,"jasmine/train_dynamics.py",25964,5573," val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1681,1257272,"jasmine/train_dynamics.py",25938,5599," )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1682,1257316,"jasmine/train_dynamics.py",25902,5635," axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1683,1257317,"jasmine/train_dynamics.py",25815,5722," (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1684,1257376,"jasmine/train_dynamics.py",25753,5784," val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1685,1257421,"jasmine/train_dynamics.py",25727,5810," )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1686,1257445,"jasmine/train_dynamics.py",25694,5843," 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1687,1257462,"jasmine/train_dynamics.py",25609,5928," val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1688,1257502,"jasmine/train_dynamics.py",25583,5954," )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1689,1257524,"jasmine/train_dynamics.py",25547,5990," / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1690,1257565,"jasmine/train_dynamics.py",25462,6075," val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1691,1257583,"jasmine/train_dynamics.py",25408,6129," val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1692,1257646,"jasmine/train_dynamics.py",25372,6165," if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1693,1257690,"jasmine/train_dynamics.py",25350,6187," )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1694,1257729,"jasmine/train_dynamics.py",25281,6256," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1695,1257730,"jasmine/train_dynamics.py",25226,6311," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1696,1257733,"jasmine/train_dynamics.py",25144,6393," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1697,1257772,"jasmine/train_dynamics.py",25092,6445," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1698,1257814,"jasmine/train_dynamics.py",25016,6521," gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1699,1257816,"jasmine/train_dynamics.py",24960,6577," if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1700,1257817,"TERMINAL",0,0,"3",,terminal_output +1701,1257860,"jasmine/train_dynamics.py",24920,6617," wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1702,1257893,"jasmine/train_dynamics.py",24856,6681," log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1703,1257901,"jasmine/train_dynamics.py",24820,6717," if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1704,1257970,"jasmine/train_dynamics.py",24856,6681," log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1705,1258454,"jasmine/train_dynamics.py",24920,6617," wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1706,1258461,"jasmine/train_dynamics.py",24960,6577," if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1707,1258542,"jasmine/train_dynamics.py",25016,6521," gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1708,1258583,"jasmine/train_dynamics.py",25092,6445," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1709,1258585,"jasmine/train_dynamics.py",25144,6393," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1710,1258621,"jasmine/train_dynamics.py",25226,6311," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1711,1258661,"jasmine/train_dynamics.py",25281,6256," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1712,1258703,"jasmine/train_dynamics.py",25226,6311," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1713,1258846,"TERMINAL",0,0,"4",,terminal_output +1714,1259193,"jasmine/train_dynamics.py",25144,6393," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1715,1259220,"jasmine/train_dynamics.py",25092,6445," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1716,1259267,"jasmine/train_dynamics.py",25016,6521," gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1717,1259278,"jasmine/train_dynamics.py",24960,6577," if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1718,1259318,"jasmine/train_dynamics.py",24920,6617," wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1719,1259358,"jasmine/train_dynamics.py",24856,6681," log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1720,1259404,"jasmine/train_dynamics.py",24820,6717," if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1721,1259404,"jasmine/train_dynamics.py",24749,6788," log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1722,1259462,"jasmine/train_dynamics.py",24670,6867," if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1723,1259482,"jasmine/train_dynamics.py",24645,6892," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1724,1259512,"jasmine/train_dynamics.py",24615,6922," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1725,1259553,"jasmine/train_dynamics.py",24614,6923,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1726,1259554,"jasmine/train_dynamics.py",24596,6941," }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1727,1259598,"jasmine/train_dynamics.py",24540,6997," ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1728,1259640,"jasmine/train_dynamics.py",24500,7037," ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1729,1259641,"jasmine/train_dynamics.py",24454,7083," ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1730,1259685,"jasmine/train_dynamics.py",24410,7127," ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1731,1259726,"jasmine/train_dynamics.py",24378,7159," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1732,1259769,"jasmine/train_dynamics.py",24296,7241," print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1733,1259772,"jasmine/train_dynamics.py",24278,7259," )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1734,1259826,"jasmine/train_dynamics.py",24256,7281," )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1735,1259864,"jasmine/train_dynamics.py",24185,7352," dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1736,1259868,"jasmine/train_dynamics.py",24135,7402," calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1737,1259909,"jasmine/train_dynamics.py",24056,7481," val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1738,1259909,"jasmine/train_dynamics.py",24135,7402," calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1739,1259910,"TERMINAL",0,0,"5",,terminal_output +1740,1260427,"jasmine/train_dynamics.py",24185,7352," dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1741,1260440,"jasmine/train_dynamics.py",24256,7281," )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1742,1260466,"jasmine/train_dynamics.py",24278,7259," )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1743,1260496,"jasmine/train_dynamics.py",24296,7241," print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1744,1260523,"jasmine/train_dynamics.py",24378,7159," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1745,1260554,"jasmine/train_dynamics.py",24410,7127," ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1746,1260591,"jasmine/train_dynamics.py",24454,7083," ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1747,1260623,"jasmine/train_dynamics.py",24500,7037," ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1748,1260672,"jasmine/train_dynamics.py",24540,6997," ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1749,1260676,"jasmine/train_dynamics.py",24596,6941," }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1750,1260882,"jasmine/train_dynamics.py",24614,6923,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break",python,selection_command +1751,1260883,"TERMINAL",0,0,"6",,terminal_output +1752,1261239,"jasmine/train_dynamics.py",24614,6924,"",python,content +1753,1261878,"TERMINAL",0,0,"7",,terminal_output +1754,1262203,"jasmine/train_dynamics.py",24596,0,"",python,selection_command +1755,1262929,"TERMINAL",0,0,"8",,terminal_output +1756,1263040,"jasmine/train_dynamics.py",24596,17," }",python,selection_command +1757,1263322,"jasmine/train_dynamics.py",24540,73," ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1758,1263813,"jasmine/train_dynamics.py",24500,113," ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1759,1263828,"jasmine/train_dynamics.py",24454,159," ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1760,1263871,"jasmine/train_dynamics.py",24410,203," ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1761,1263913,"jasmine/train_dynamics.py",24378,235," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1762,1264007,"jasmine/train_dynamics.py",24296,317," print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1763,1264007,"jasmine/train_dynamics.py",24278,335," )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1764,1264008,"jasmine/train_dynamics.py",24256,357," )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1765,1264035,"jasmine/train_dynamics.py",24185,428," dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1766,1264036,"TERMINAL",0,0,"9",,terminal_output +1767,1264087,"jasmine/train_dynamics.py",24135,478," calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1768,1264087,"jasmine/train_dynamics.py",24056,557," val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1769,1264100,"jasmine/train_dynamics.py",23997,616," print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1770,1264170,"jasmine/train_dynamics.py",23935,678," rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1771,1264171,"jasmine/train_dynamics.py",23870,743," if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1772,1264203,"jasmine/train_dynamics.py",23841,772," val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1773,1264250,"jasmine/train_dynamics.py",23803,810," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1774,1264284,"jasmine/train_dynamics.py",23802,811,"\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1775,1264300,"jasmine/train_dynamics.py",23780,833," step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1776,1264342,"jasmine/train_dynamics.py",23720,893," print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1777,1264379,"jasmine/train_dynamics.py",23685,928," if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1778,1264398,"jasmine/train_dynamics.py",23621,992," loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1779,1264422,"jasmine/train_dynamics.py",23584,1029," batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1780,1264472,"jasmine/train_dynamics.py",23530,1083," rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1781,1264473,"jasmine/train_dynamics.py",23497,1116," # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1782,1264527,"jasmine/train_dynamics.py",23458,1155," for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1783,1264528,"jasmine/train_dynamics.py",23425,1188," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1784,1264578,"jasmine/train_dynamics.py",23403,1210," first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1785,1264642,"jasmine/train_dynamics.py",23425,1188," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1786,1264984,"TERMINAL",0,0,"30",,terminal_output +1787,1265136,"jasmine/train_dynamics.py",23458,1155," for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1788,1265166,"jasmine/train_dynamics.py",23497,1116," # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1789,1265197,"jasmine/train_dynamics.py",23530,1083," rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1790,1265217,"jasmine/train_dynamics.py",23584,1029," batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1791,1265252,"jasmine/train_dynamics.py",23621,992," loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1792,1265275,"jasmine/train_dynamics.py",23685,928," if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1793,1265307,"jasmine/train_dynamics.py",23720,893," print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1794,1265346,"jasmine/train_dynamics.py",23780,833," step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1795,1265370,"jasmine/train_dynamics.py",23802,811,"\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1796,1265409,"jasmine/train_dynamics.py",23803,810," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1797,1265783,"jasmine/train_dynamics.py",23803,811,"",python,content +1798,1265979,"TERMINAL",0,0,"1",,terminal_output +1799,1266022,"jasmine/train_dynamics.py",23802,0,"",python,selection_command +1800,1266199,"jasmine/train_dynamics.py",23780,0,"",python,selection_command +1801,1266331,"jasmine/train_dynamics.py",23720,0,"",python,selection_command +1802,1266826,"jasmine/train_dynamics.py",23685,0,"",python,selection_command +1803,1266847,"jasmine/train_dynamics.py",23621,0,"",python,selection_command +1804,1266900,"jasmine/train_dynamics.py",23584,0,"",python,selection_command +1805,1266915,"jasmine/train_dynamics.py",23530,0,"",python,selection_command +1806,1266937,"jasmine/train_dynamics.py",23497,0,"",python,selection_command +1807,1266994,"jasmine/train_dynamics.py",23458,0,"",python,selection_command +1808,1267025,"jasmine/train_dynamics.py",23425,0,"",python,selection_command +1809,1267041,"jasmine/train_dynamics.py",23403,0,"",python,selection_command +1810,1267076,"jasmine/train_dynamics.py",23351,0,"",python,selection_command +1811,1267077,"TERMINAL",0,0,"2",,terminal_output +1812,1267101,"jasmine/train_dynamics.py",23275,0,"",python,selection_command +1813,1267126,"jasmine/train_dynamics.py",23221,0,"",python,selection_command +1814,1267156,"jasmine/train_dynamics.py",23158,0,"",python,selection_command +1815,1267207,"jasmine/train_dynamics.py",23094,0,"",python,selection_command +1816,1267212,"jasmine/train_dynamics.py",23024,0,"",python,selection_command +1817,1267247,"jasmine/train_dynamics.py",22975,0,"",python,selection_command +1818,1267299,"jasmine/train_dynamics.py",22930,0,"",python,selection_command +1819,1267304,"jasmine/train_dynamics.py",22897,0,"",python,selection_command +1820,1267350,"jasmine/train_dynamics.py",22887,0,"",python,selection_command +1821,1267379,"jasmine/train_dynamics.py",22850,0,"",python,selection_command +1822,1267413,"jasmine/train_dynamics.py",22887,0,"",python,selection_command +1823,1267914,"jasmine/train_dynamics.py",22897,0,"",python,selection_command +1824,1267957,"jasmine/train_dynamics.py",22930,0,"",python,selection_command +1825,1267981,"jasmine/train_dynamics.py",22975,0,"",python,selection_command +1826,1268024,"jasmine/train_dynamics.py",23024,0,"",python,selection_command +1827,1268024,"jasmine/train_dynamics.py",23094,0,"",python,selection_command +1828,1268025,"TERMINAL",0,0,"3",,terminal_output +1829,1268062,"jasmine/train_dynamics.py",23158,0,"",python,selection_command +1830,1268095,"jasmine/train_dynamics.py",23221,0,"",python,selection_command +1831,1268146,"jasmine/train_dynamics.py",23275,0,"",python,selection_command +1832,1268333,"jasmine/train_dynamics.py",23351,0,"",python,selection_command +1833,1268578,"jasmine/train_dynamics.py",23403,0,"",python,selection_command +1834,1268818,"jasmine/train_dynamics.py",23424,0,"\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,content +1835,1268856,"jasmine/train_dynamics.py",23437,0,"",python,selection_command +1836,1269041,"TERMINAL",0,0,"4",,terminal_output +1837,1269719,"jasmine/train_dynamics.py",23425,37," # --- Validation loss ---",python,selection_command +1838,1269923,"jasmine/train_dynamics.py",23425,66," # --- Validation loss ---\n val_results = {}",python,selection_command +1839,1270054,"TERMINAL",0,0,"5",,terminal_output +1840,1270425,"jasmine/train_dynamics.py",23425,131," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:",python,selection_command +1841,1270465,"jasmine/train_dynamics.py",23425,193," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)",python,selection_command +1842,1270504,"jasmine/train_dynamics.py",23425,252," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")",python,selection_command +1843,1270505,"jasmine/train_dynamics.py",23425,331," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (",python,selection_command +1844,1270547,"jasmine/train_dynamics.py",23425,381," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(",python,selection_command +1845,1270589,"jasmine/train_dynamics.py",23425,452," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val",python,selection_command +1846,1270624,"jasmine/train_dynamics.py",23425,474," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )",python,selection_command +1847,1270629,"jasmine/train_dynamics.py",23425,492," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )",python,selection_command +1848,1270665,"jasmine/train_dynamics.py",23425,574," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")",python,selection_command +1849,1270703,"jasmine/train_dynamics.py",23425,606," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {",python,selection_command +1850,1270753,"jasmine/train_dynamics.py",23425,650," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,",python,selection_command +1851,1270864,"jasmine/train_dynamics.py",23425,696," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,",python,selection_command +1852,1270990,"jasmine/train_dynamics.py",23425,736," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,",python,selection_command +1853,1271075,"TERMINAL",0,0,"6",,terminal_output +1854,1271133,"jasmine/train_dynamics.py",23425,792," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,",python,selection_command +1855,1271283,"jasmine/train_dynamics.py",23425,810," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1856,1271593,"jasmine/train_dynamics.py",23437,0,"",python,selection_command +1857,1271846,"jasmine/train_dynamics.py",24230,4,"",python,content +1858,1271846,"jasmine/train_dynamics.py",24178,4,"",python,content +1859,1271846,"jasmine/train_dynamics.py",24138,4,"",python,content +1860,1271846,"jasmine/train_dynamics.py",24092,4,"",python,content +1861,1271846,"jasmine/train_dynamics.py",24048,4,"",python,content +1862,1271846,"jasmine/train_dynamics.py",24012,4,"",python,content +1863,1271847,"jasmine/train_dynamics.py",23930,4,"",python,content +1864,1271847,"jasmine/train_dynamics.py",23912,4,"",python,content +1865,1271847,"jasmine/train_dynamics.py",23894,4,"",python,content +1866,1271847,"jasmine/train_dynamics.py",23827,4,"",python,content +1867,1271847,"jasmine/train_dynamics.py",23773,4,"",python,content +1868,1271847,"jasmine/train_dynamics.py",23690,4,"",python,content +1869,1271847,"jasmine/train_dynamics.py",23631,4,"",python,content +1870,1271847,"jasmine/train_dynamics.py",23569,4,"",python,content +1871,1271847,"jasmine/train_dynamics.py",23500,4,"",python,content +1872,1271847,"jasmine/train_dynamics.py",23471,4,"",python,content +1873,1271847,"jasmine/train_dynamics.py",23433,4,"",python,content +1874,1272108,"TERMINAL",0,0,"7",,terminal_output +1875,1272155,"jasmine/train_dynamics.py",24162,4,"",python,content +1876,1272156,"jasmine/train_dynamics.py",24114,4,"",python,content +1877,1272156,"jasmine/train_dynamics.py",24078,4,"",python,content +1878,1272156,"jasmine/train_dynamics.py",24036,4,"",python,content +1879,1272156,"jasmine/train_dynamics.py",23996,4,"",python,content +1880,1272156,"jasmine/train_dynamics.py",23964,4,"",python,content +1881,1272156,"jasmine/train_dynamics.py",23886,4,"",python,content +1882,1272156,"jasmine/train_dynamics.py",23872,4,"",python,content +1883,1272156,"jasmine/train_dynamics.py",23858,4,"",python,content +1884,1272156,"jasmine/train_dynamics.py",23795,4,"",python,content +1885,1272156,"jasmine/train_dynamics.py",23745,4,"",python,content +1886,1272156,"jasmine/train_dynamics.py",23666,4,"",python,content +1887,1272156,"jasmine/train_dynamics.py",23611,4,"",python,content +1888,1272156,"jasmine/train_dynamics.py",23553,4,"",python,content +1889,1272156,"jasmine/train_dynamics.py",23488,4,"",python,content +1890,1272157,"jasmine/train_dynamics.py",23463,4,"",python,content +1891,1272157,"jasmine/train_dynamics.py",23429,4,"",python,content +1892,1272305,"jasmine/train_dynamics.py",23428,0,"",python,selection_command +1893,1272457,"jasmine/train_dynamics.py",23458,0,"",python,selection_command +1894,1272629,"jasmine/train_dynamics.py",23479,0,"",python,selection_command +1895,1272813,"jasmine/train_dynamics.py",23536,0,"",python,selection_command +1896,1273103,"TERMINAL",0,0,"8",,terminal_output +1897,1273318,"jasmine/train_dynamics.py",23590,0,"",python,selection_command +1898,1273330,"jasmine/train_dynamics.py",23641,0,"",python,selection_command +1899,1273373,"jasmine/train_dynamics.py",23712,0,"",python,selection_command +1900,1273411,"jasmine/train_dynamics.py",23754,0,"",python,selection_command +1901,1273454,"jasmine/train_dynamics.py",23817,0,"",python,selection_command +1902,1273480,"jasmine/train_dynamics.py",23831,0,"",python,selection_command +1903,1273519,"jasmine/train_dynamics.py",23841,0,"",python,selection_command +1904,1273534,"jasmine/train_dynamics.py",23915,0,"",python,selection_command +1905,1273572,"jasmine/train_dynamics.py",23939,0,"",python,selection_command +1906,1273611,"jasmine/train_dynamics.py",23975,0,"",python,selection_command +1907,1273643,"jasmine/train_dynamics.py",24013,0,"",python,selection_command +1908,1273697,"jasmine/train_dynamics.py",24045,0,"",python,selection_command +1909,1273704,"jasmine/train_dynamics.py",24093,0,"",python,selection_command +1910,1274121,"TERMINAL",0,0,"9",,terminal_output +1911,1274142,"jasmine/train_dynamics.py",24103,0,"",python,selection_command +1912,1274534,"jasmine/train_dynamics.py",24100,32," while step < args.num_steps:",python,selection_command +1913,1274750,"jasmine/train_dynamics.py",24100,71," while step < args.num_steps:\n for batch in dataloader_train:",python,selection_command +1914,1275137,"TERMINAL",0,0,"40",,terminal_output +1915,1275265,"jasmine/train_dynamics.py",24100,104," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---",python,selection_command +1916,1275297,"jasmine/train_dynamics.py",24100,158," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)",python,selection_command +1917,1275305,"jasmine/train_dynamics.py",24100,195," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask",python,selection_command +1918,1275431,"jasmine/train_dynamics.py",24100,259," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)",python,selection_command +1919,1275609,"jasmine/train_dynamics.py",24100,294," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:",python,selection_command +1920,1275771,"jasmine/train_dynamics.py",24100,354," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")",python,selection_command +1921,1275929,"jasmine/train_dynamics.py",24100,376," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1",python,selection_command +1922,1276164,"TERMINAL",0,0,"1",,terminal_output +1923,1276205,"jasmine/train_dynamics.py",24100,377,"",python,content +1924,1276840,"jasmine/train_dynamics.py",24100,1,"",python,content +1925,1277133,"jasmine/train_dynamics.py",24090,0,"",python,selection_command +1926,1277274,"TERMINAL",0,0,"2",,terminal_output +1927,1277630,"jasmine/train_dynamics.py",24042,0,"",python,selection_command +1928,1277672,"jasmine/train_dynamics.py",24010,0,"",python,selection_command +1929,1277716,"jasmine/train_dynamics.py",23972,0,"",python,selection_command +1930,1277717,"jasmine/train_dynamics.py",23936,0,"",python,selection_command +1931,1277760,"jasmine/train_dynamics.py",23912,0,"",python,selection_command +1932,1277809,"jasmine/train_dynamics.py",23838,0,"",python,selection_command +1933,1278018,"jasmine/train_dynamics.py",23912,0,"",python,selection_command +1934,1278196,"jasmine/train_dynamics.py",23936,0,"",python,selection_command +1935,1278249,"TERMINAL",0,0,"3",,terminal_output +1936,1278467,"jasmine/train_dynamics.py",23912,0,"",python,selection_command +1937,1278669,"jasmine/train_dynamics.py",23838,0,"",python,selection_command +1938,1279023,"jasmine/train_dynamics.py",23911,0,"\n ",python,content +1939,1279253,"jasmine/train_dynamics.py",23920,0,"p",python,content +1940,1279254,"jasmine/train_dynamics.py",23921,0,"",python,selection_keyboard +1941,1279316,"TERMINAL",0,0,"4",,terminal_output +1942,1279491,"jasmine/train_dynamics.py",23921,0,"r",python,content +1943,1279492,"jasmine/train_dynamics.py",23922,0,"",python,selection_keyboard +1944,1279591,"jasmine/train_dynamics.py",23922,0,"i",python,content +1945,1279592,"jasmine/train_dynamics.py",23923,0,"",python,selection_keyboard +1946,1280085,"jasmine/train_dynamics.py",23923,0,"n",python,content +1947,1280086,"jasmine/train_dynamics.py",23924,0,"",python,selection_keyboard +1948,1280231,"jasmine/train_dynamics.py",23924,0,"t",python,content +1949,1280233,"jasmine/train_dynamics.py",23925,0,"",python,selection_keyboard +1950,1280274,"TERMINAL",0,0,"5",,terminal_output +1951,1281027,"jasmine/train_dynamics.py",23925,0,"()",python,content +1952,1281028,"jasmine/train_dynamics.py",23926,0,"",python,selection_keyboard +1953,1281284,"TERMINAL",0,0,"6",,terminal_output +1954,1282289,"TERMINAL",0,0,"7",,terminal_output +1955,1283328,"TERMINAL",0,0,"8",,terminal_output +1956,1283410,"jasmine/train_dynamics.py",23926,0,"v",python,content +1957,1283411,"jasmine/train_dynamics.py",23927,0,"",python,selection_keyboard +1958,1283570,"jasmine/train_dynamics.py",23927,0,"a",python,content +1959,1283571,"jasmine/train_dynamics.py",23928,0,"",python,selection_keyboard +1960,1283622,"jasmine/train_dynamics.py",23928,0,"l",python,content +1961,1283623,"jasmine/train_dynamics.py",23929,0,"",python,selection_keyboard +1962,1283939,"jasmine/train_dynamics.py",23929,0,"_",python,content +1963,1283940,"jasmine/train_dynamics.py",23930,0,"",python,selection_keyboard +1964,1284224,"jasmine/train_dynamics.py",23930,0,"m",python,content +1965,1284225,"jasmine/train_dynamics.py",23931,0,"",python,selection_keyboard +1966,1284374,"TERMINAL",0,0,"9",,terminal_output +1967,1284388,"jasmine/train_dynamics.py",23931,0,"e",python,content +1968,1284389,"jasmine/train_dynamics.py",23932,0,"",python,selection_keyboard +1969,1284867,"jasmine/train_dynamics.py",23926,6,"val_metrics",python,content +1970,1285267,"jasmine/train_dynamics.py",23936,0,"",python,selection_command +1971,1285432,"jasmine/train_dynamics.py",23862,0,"",python,selection_command +1972,1285520,"TERMINAL",0,0,"50",,terminal_output +1973,1286207,"jasmine/train_dynamics.py",23936,0,"",python,selection_command +1974,1286371,"jasmine/train_dynamics.py",23961,0,"",python,selection_command +1975,1286448,"TERMINAL",0,0,"1",,terminal_output +1976,1287138,"jasmine/train_dynamics.py",23939,23," val_results = {",python,selection_command +1977,1287428,"TERMINAL",0,0,"2",,terminal_output +1978,1287520,"jasmine/train_dynamics.py",23939,59," val_results = {\n ""metrics"": val_metrics,",python,selection_command +1979,1287642,"jasmine/train_dynamics.py",23939,97," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,",python,selection_command +1980,1287790,"jasmine/train_dynamics.py",23939,129," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,",python,selection_command +1981,1287939,"jasmine/train_dynamics.py",23939,177," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,",python,selection_command +1982,1288097,"jasmine/train_dynamics.py",23939,187," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }",python,selection_command +1983,1288397,"jasmine/train_dynamics.py",23939,188,"",python,content +1984,1288486,"TERMINAL",0,0,"3",,terminal_output +1985,1289482,"TERMINAL",0,0,"4",,terminal_output +1986,1289967,"jasmine/train_dynamics.py",23670,0,"",python,selection_mouse +1987,1290515,"TERMINAL",0,0,"5",,terminal_output +1988,1290932,"jasmine/train_dynamics.py",23658,0,"",python,selection_mouse +1989,1291560,"TERMINAL",0,0,"7",,terminal_output +1990,1291723,"jasmine/train_dynamics.py",23469,0,"",python,selection_mouse +1991,1292677,"TERMINAL",0,0,"8",,terminal_output +1992,1292776,"jasmine/train_dynamics.py",23363,0,"",python,selection_mouse +1993,1293386,"jasmine/train_dynamics.py",23435,0,"",python,selection_mouse +1994,1293554,"TERMINAL",0,0,"9",,terminal_output +1995,1294022,"jasmine/train_dynamics.py",23416,0,"",python,selection_mouse +1996,1294569,"jasmine/train_dynamics.py",23364,0,"",python,selection_command +1997,1294594,"TERMINAL",0,0,"9:00",,terminal_output +1998,1295072,"jasmine/train_dynamics.py",23416,0,"",python,selection_command +1999,1295628,"TERMINAL",0,0,"1",,terminal_output +2000,1295992,"jasmine/train_dynamics.py",23403,22,"",python,content +2001,1296034,"jasmine/train_dynamics.py",23407,0,"",python,selection_command +2002,1296185,"jasmine/train_dynamics.py",23355,0,"",python,selection_command +2003,1296477,"jasmine/train_dynamics.py",23279,0,"",python,selection_command +2004,1296773,"TERMINAL",0,0,"2",,terminal_output +2005,1297682,"TERMINAL",0,0,"3",,terminal_output +2006,1297810,"jasmine/train_dynamics.py",23275,75," dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2007,1298068,"jasmine/train_dynamics.py",23221,129," # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2008,1298561,"jasmine/train_dynamics.py",23158,192," print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2009,1298588,"jasmine/train_dynamics.py",23094,256," print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2010,1298701,"TERMINAL",0,0,"4",,terminal_output +2011,1298747,"jasmine/train_dynamics.py",23024,326," compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2012,1298870,"jasmine/train_dynamics.py",22975,375," first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2013,1299021,"jasmine/train_dynamics.py",22930,420," first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2014,1299165,"jasmine/train_dynamics.py",22897,453," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)",python,selection_command +2015,1299337,"jasmine/train_dynamics.py",22897,454,"",python,content +2016,1299346,"jasmine/train_dynamics.py",22901,0,"",python,selection_command +2017,1299575,"jasmine/train_dynamics.py",22953,0,"",python,selection_command +2018,1299728,"TERMINAL",0,0,"5",,terminal_output +2019,1299831,"jasmine/train_dynamics.py",22901,0,"",python,selection_command +2020,1300285,"jasmine/train_dynamics.py",22891,0,"",python,selection_command +2021,1300328,"jasmine/train_dynamics.py",22854,0,"",python,selection_command +2022,1300331,"jasmine/train_dynamics.py",22840,0,"",python,selection_command +2023,1300371,"jasmine/train_dynamics.py",22821,0,"",python,selection_command +2024,1300393,"jasmine/train_dynamics.py",22791,0,"",python,selection_command +2025,1300432,"jasmine/train_dynamics.py",22748,0,"",python,selection_command +2026,1300449,"jasmine/train_dynamics.py",22726,0,"",python,selection_command +2027,1300488,"jasmine/train_dynamics.py",22668,0,"",python,selection_command +2028,1300516,"jasmine/train_dynamics.py",22608,0,"",python,selection_command +2029,1300557,"jasmine/train_dynamics.py",22579,0,"",python,selection_command +2030,1300602,"jasmine/train_dynamics.py",22560,0,"",python,selection_command +2031,1300612,"jasmine/train_dynamics.py",22508,0,"",python,selection_command +2032,1300647,"jasmine/train_dynamics.py",22442,0,"",python,selection_command +2033,1300687,"jasmine/train_dynamics.py",22428,0,"",python,selection_command +2034,1300693,"jasmine/train_dynamics.py",22401,0,"",python,selection_command +2035,1300726,"jasmine/train_dynamics.py",22380,0,"",python,selection_command +2036,1300727,"TERMINAL",0,0,"6",,terminal_output +2037,1300768,"jasmine/train_dynamics.py",22354,0,"",python,selection_command +2038,1300815,"jasmine/train_dynamics.py",22348,0,"",python,selection_command +2039,1300816,"jasmine/train_dynamics.py",22313,0,"",python,selection_command +2040,1300853,"jasmine/train_dynamics.py",22303,0,"",python,selection_command +2041,1300894,"jasmine/train_dynamics.py",22288,0,"",python,selection_command +2042,1300907,"jasmine/train_dynamics.py",22262,0,"",python,selection_command +2043,1301015,"jasmine/train_dynamics.py",22288,0,"",python,selection_command +2044,1301490,"jasmine/train_dynamics.py",22303,0,"",python,selection_command +2045,1301515,"jasmine/train_dynamics.py",22313,0,"",python,selection_command +2046,1301561,"jasmine/train_dynamics.py",22348,0,"",python,selection_command +2047,1301582,"jasmine/train_dynamics.py",22354,0,"",python,selection_command +2048,1301751,"jasmine/train_dynamics.py",22380,0,"",python,selection_command +2049,1301752,"TERMINAL",0,0,"7",,terminal_output +2050,1301996,"jasmine/train_dynamics.py",22354,0,"",python,selection_command +2051,1302264,"jasmine/train_dynamics.py",22348,0,"",python,selection_command +2052,1302746,"jasmine/train_dynamics.py",22313,0,"",python,selection_command +2053,1302797,"jasmine/train_dynamics.py",22303,0,"",python,selection_command +2054,1302806,"TERMINAL",0,0,"8",,terminal_output +2055,1302837,"jasmine/train_dynamics.py",22288,0,"",python,selection_command +2056,1303002,"jasmine/train_dynamics.py",22303,0,"",python,selection_command +2057,1303192,"jasmine/train_dynamics.py",22313,0,"",python,selection_command +2058,1303554,"jasmine/train_dynamics.py",22348,0,"",python,selection_command +2059,1303801,"TERMINAL",0,0,"9",,terminal_output +2060,1304402,"jasmine/train_dynamics.py",22344,5," )",python,selection_command +2061,1304618,"jasmine/train_dynamics.py",22309,40," for elem in train_iterator\n )",python,selection_command +2062,1304794,"TERMINAL",0,0,"10",,terminal_output +2063,1305110,"jasmine/train_dynamics.py",22299,50," }\n for elem in train_iterator\n )",python,selection_command +2064,1305135,"jasmine/train_dynamics.py",22284,65," ),\n }\n for elem in train_iterator\n )",python,selection_command +2065,1305176,"jasmine/train_dynamics.py",22258,91," else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2066,1305218,"jasmine/train_dynamics.py",22219,130," if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2067,1305233,"jasmine/train_dynamics.py",22201,148," )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2068,1305272,"jasmine/train_dynamics.py",22147,202," actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2069,1305300,"jasmine/train_dynamics.py",22091,258," jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2070,1305340,"jasmine/train_dynamics.py",22066,283," ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2071,1305355,"jasmine/train_dynamics.py",22051,298," ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2072,1305383,"jasmine/train_dynamics.py",21992,357," videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2073,1305427,"jasmine/train_dynamics.py",21930,419," ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2074,1305438,"jasmine/train_dynamics.py",21920,429," {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2075,1305485,"jasmine/train_dynamics.py",21895,454," dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2076,1305543,"jasmine/train_dynamics.py",21870,479," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2077,1305563,"jasmine/train_dynamics.py",21869,480,"\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2078,1305701,"jasmine/train_dynamics.py",21870,479," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2079,1305848,"TERMINAL",0,0,"1",,terminal_output +2080,1306158,"jasmine/train_dynamics.py",21895,454," dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2081,1306401,"jasmine/train_dynamics.py",21870,479," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )",python,selection_command +2082,1306566,"jasmine/train_dynamics.py",21870,480,"",python,content +2083,1306575,"jasmine/train_dynamics.py",21874,0,"",python,selection_command +2084,1306890,"TERMINAL",0,0,"2",,terminal_output +2085,1307007,"jasmine/train_dynamics.py",21869,0,"",python,selection_command +2086,1307512,"jasmine/train_dynamics.py",21814,0,"",python,selection_command +2087,1307536,"jasmine/train_dynamics.py",21731,0,"",python,selection_command +2088,1307573,"jasmine/train_dynamics.py",21676,0,"",python,selection_command +2089,1307590,"jasmine/train_dynamics.py",21662,0,"",python,selection_command +2090,1307622,"jasmine/train_dynamics.py",21597,0,"",python,selection_command +2091,1307670,"jasmine/train_dynamics.py",21579,0,"",python,selection_command +2092,1307680,"jasmine/train_dynamics.py",21506,0,"",python,selection_command +2093,1307749,"jasmine/train_dynamics.py",21456,0,"",python,selection_command +2094,1307749,"jasmine/train_dynamics.py",21417,0,"",python,selection_command +2095,1307773,"jasmine/train_dynamics.py",21384,0,"",python,selection_command +2096,1307818,"jasmine/train_dynamics.py",21327,0,"",python,selection_command +2097,1307841,"jasmine/train_dynamics.py",21317,0,"",python,selection_command +2098,1307866,"jasmine/train_dynamics.py",21267,0,"",python,selection_command +2099,1307887,"TERMINAL",0,0,"3",,terminal_output +2100,1307920,"jasmine/train_dynamics.py",21190,0,"",python,selection_command +2101,1307921,"jasmine/train_dynamics.py",21166,0,"",python,selection_command +2102,1307958,"jasmine/train_dynamics.py",21161,0,"",python,selection_command +2103,1308000,"jasmine/train_dynamics.py",21151,0,"",python,selection_command +2104,1308011,"jasmine/train_dynamics.py",21011,0,"",python,selection_command +2105,1308044,"jasmine/train_dynamics.py",20992,0,"",python,selection_command +2106,1308083,"jasmine/train_dynamics.py",20958,0,"",python,selection_command +2107,1308126,"jasmine/train_dynamics.py",20953,0,"",python,selection_command +2108,1308133,"jasmine/train_dynamics.py",20935,0,"",python,selection_command +2109,1308173,"jasmine/train_dynamics.py",20897,0,"",python,selection_command +2110,1308918,"TERMINAL",0,0,"4",,terminal_output +2111,1309894,"TERMINAL",0,0,"5",,terminal_output +2112,1310943,"TERMINAL",0,0,"6",,terminal_output +2113,1311933,"TERMINAL",0,0,"7",,terminal_output +2114,1312963,"TERMINAL",0,0,"8",,terminal_output +2115,1313974,"TERMINAL",0,0,"9",,terminal_output +2116,1314083,"jasmine/train_dynamics.py",17104,0,"",python,selection_mouse +2117,1314753,"jasmine/train_dynamics.py",17092,30," @nnx.jit(donate_argnums=0)",python,selection_command +2118,1314972,"jasmine/train_dynamics.py",17092,50," @nnx.jit(donate_argnums=0)\n def train_step(",python,selection_command +2119,1315053,"TERMINAL",0,0,"20",,terminal_output +2120,1315465,"jasmine/train_dynamics.py",17092,105," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict",python,selection_command +2121,1315498,"jasmine/train_dynamics.py",17092,149," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:",python,selection_command +2122,1315540,"jasmine/train_dynamics.py",17092,228," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:",python,selection_command +2123,1315586,"jasmine/train_dynamics.py",17092,254," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()",python,selection_command +2124,1315587,"jasmine/train_dynamics.py",17092,305," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)",python,selection_command +2125,1315676,"jasmine/train_dynamics.py",17092,306," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n",python,selection_command +2126,1315676,"jasmine/train_dynamics.py",17092,391," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(",python,selection_command +2127,1315677,"jasmine/train_dynamics.py",17092,419," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model",python,selection_command +2128,1315729,"jasmine/train_dynamics.py",17092,429," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )",python,selection_command +2129,1315819,"jasmine/train_dynamics.py",17092,461," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)",python,selection_command +2130,1315992,"jasmine/train_dynamics.py",17092,492," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:",python,selection_command +2131,1316101,"TERMINAL",0,0,"1",,terminal_output +2132,1316134,"jasmine/train_dynamics.py",17092,546," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(",python,selection_command +2133,1316307,"jasmine/train_dynamics.py",17092,609," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]",python,selection_command +2134,1316453,"jasmine/train_dynamics.py",17092,623," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )",python,selection_command +2135,1316575,"jasmine/train_dynamics.py",17092,659," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics",python,selection_command +2136,1316893,"jasmine/train_dynamics.py",17092,660,"",python,content +2137,1317141,"TERMINAL",0,0,"2",,terminal_output +2138,1318018,"jasmine/train_dynamics.py",17092,1,"",python,content +2139,1318039,"jasmine/train_dynamics.py",17096,0,"",python,selection_command +2140,1318155,"TERMINAL",0,0,"3",,terminal_output +2141,1319154,"TERMINAL",0,0,"4",,terminal_output +2142,1320166,"TERMINAL",0,0,"5",,terminal_output +2143,1321258,"TERMINAL",0,0,"6",,terminal_output +2144,1322246,"TERMINAL",0,0,"7",,terminal_output +2145,1323172,"TERMINAL",0,0,"8",,terminal_output +2146,1324203,"TERMINAL",0,0,"9",,terminal_output +2147,1325244,"TERMINAL",0,0,"30",,terminal_output +2148,1326221,"TERMINAL",0,0,"1",,terminal_output +2149,1327407,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=sample_dynamics_doom_60x80\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=8 \\n --start_frame=4 \\n --image_height=60 \\n --image_width=80 \\n --num_actions=18 \\n --patch_size=16 \\n --output_dir=gifs/doom/action-prepend-branch/60x80/500k-test/ $@",shellscript,tab +2150,1327572,"TERMINAL",0,0,"2",,terminal_output +2151,1328317,"TERMINAL",0,0,"3",,terminal_output +2152,1329288,"TERMINAL",0,0,"4",,terminal_output +2153,1329884,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +2154,1330545,"TERMINAL",0,0,"5",,terminal_output +2155,1330637,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",818,0,"",shellscript,selection_mouse +2156,1331181,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",818,0,"\n ",shellscript,content +2157,1331446,"TERMINAL",0,0,"6",,terminal_output +2158,1331978,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",823,0,"-",shellscript,content +2159,1331979,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",824,0,"",shellscript,selection_keyboard +2160,1332143,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",824,0,"-",shellscript,content +2161,1332144,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,0,"",shellscript,selection_keyboard +2162,1332282,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,0,"r",shellscript,content +2163,1332283,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",826,0,"",shellscript,selection_keyboard +2164,1332451,"TERMINAL",0,0,"7",,terminal_output +2165,1332466,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",826,0,"e",shellscript,content +2166,1332467,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",827,0,"",shellscript,selection_keyboard +2167,1332661,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",827,0,"s",shellscript,content +2168,1332662,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",828,0,"",shellscript,selection_keyboard +2169,1332766,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",828,0,"t",shellscript,content +2170,1332767,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",829,0,"",shellscript,selection_keyboard +2171,1332952,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",829,0,"o",shellscript,content +2172,1332953,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",830,0,"",shellscript,selection_keyboard +2173,1333076,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",830,0,"r",shellscript,content +2174,1333077,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",831,0,"",shellscript,selection_keyboard +2175,1333254,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",831,0,"e",shellscript,content +2176,1333255,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",832,0,"",shellscript,selection_keyboard +2177,1333447,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",832,0,"_",shellscript,content +2178,1333448,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",833,0,"",shellscript,selection_keyboard +2179,1333514,"TERMINAL",0,0,"8",,terminal_output +2180,1334354,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",833,0,"c",shellscript,content +2181,1334355,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",834,0,"",shellscript,selection_keyboard +2182,1334467,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",834,0,"k",shellscript,content +2183,1334468,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",835,0,"",shellscript,selection_keyboard +2184,1334616,"TERMINAL",0,0,"9",,terminal_output +2185,1334650,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",835,0,"p",shellscript,content +2186,1334651,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",836,0,"",shellscript,selection_keyboard +2187,1334765,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",836,0,"t",shellscript,content +2188,1334766,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",837,0,"",shellscript,selection_keyboard +2189,1335498,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",837,0," ",shellscript,content +2190,1335499,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",838,0,"",shellscript,selection_keyboard +2191,1335566,"TERMINAL",0,0,"40",,terminal_output +2192,1335777,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",838,0,"\",shellscript,content +2193,1335778,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",839,0,"",shellscript,selection_keyboard +2194,1336064,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",839,0,"\n ",shellscript,content +2195,1336557,"TERMINAL",0,0,"2",,terminal_output +2196,1337545,"TERMINAL",0,0,"3",,terminal_output +2197,1338232,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",840,4,"",shellscript,content +2198,1338413,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",839,1,"",shellscript,content +2199,1338591,"TERMINAL",0,0,"4",,terminal_output +2200,1339568,"TERMINAL",0,0,"5",,terminal_output +2201,1340580,"TERMINAL",0,0,"6",,terminal_output +2202,1341602,"TERMINAL",0,0,"7",,terminal_output +2203,1342661,"TERMINAL",0,0,"8",,terminal_output +2204,1343844,"TERMINAL",0,0,"9",,terminal_output +2205,1344678,"TERMINAL",0,0,"50",,terminal_output +2206,1345712,"TERMINAL",0,0,"1",,terminal_output +2207,1346747,"TERMINAL",0,0,"2",,terminal_output +2208,1347726,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",613,0,"",shellscript,selection_mouse +2209,1347767,"TERMINAL",0,0,"3",,terminal_output +2210,1348354,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",612,0,"",shellscript,selection_mouse +2211,1348745,"TERMINAL",0,0,"4",,terminal_output +2212,1349770,"TERMINAL",0,0,"5",,terminal_output +2213,1350793,"TERMINAL",0,0,"6",,terminal_output +2214,1351133,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",611,0,"",shellscript,selection_command +2215,1351769,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",592,0,"",shellscript,selection_command +2216,1351825,"TERMINAL",0,0,"7",,terminal_output +2217,1351944,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",585,0,"",shellscript,selection_command +2218,1352091,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",462,0,"",shellscript,selection_command +2219,1352608,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",566,0,"\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id",shellscript,content +2220,1352611,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",567,0,"",shellscript,selection_command +2221,1352887,"TERMINAL",0,0,"8",,terminal_output +2222,1353137,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",444,0,"",shellscript,selection_command +2223,1353909,"TERMINAL",0,0,"9",,terminal_output +2224,1354071,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",566,0,"",shellscript,selection_command +2225,1354418,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",554,12,"",shellscript,content +2226,1354551,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",552,2,"",shellscript,content +2227,1354725,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",544,8,"",shellscript,content +2228,1354850,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",542,2,"",shellscript,content +2229,1354877,"TERMINAL",0,0,"3:00:00",,terminal_output +2230,1355026,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",535,7,"",shellscript,content +2231,1355164,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",534,1,"",shellscript,content +2232,1355308,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",527,7,"",shellscript,content +2233,1355471,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",526,1,"",shellscript,content +2234,1355627,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",515,11,"",shellscript,content +2235,1355772,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",514,1,"",shellscript,content +2236,1355888,"TERMINAL",0,0,"1",,terminal_output +2237,1355931,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",500,14,"",shellscript,content +2238,1356076,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",499,1,"",shellscript,content +2239,1356239,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",488,11,"",shellscript,content +2240,1356403,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",487,1,"",shellscript,content +2241,1356557,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",480,7,"",shellscript,content +2242,1356713,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",479,1,"",shellscript,content +2243,1356869,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",470,9,"",shellscript,content +2244,1356945,"TERMINAL",0,0,"2",,terminal_output +2245,1357032,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",469,1,"",shellscript,content +2246,1357205,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",465,4,"",shellscript,content +2247,1357501,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",464,1,"",shellscript,content +2248,1357848,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",463,1,"",shellscript,content +2249,1358024,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",462,1,"",shellscript,content +2250,1358061,"TERMINAL",0,0,"3",,terminal_output +2251,1358185,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",461,1,"",shellscript,content +2252,1358304,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",460,1,"",shellscript,content +2253,1358826,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",459,1,"",shellscript,content +2254,1358999,"TERMINAL",0,0,"4",,terminal_output +2255,1359111,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",459,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426",shellscript,content +2256,1359786,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",572,0,"",shellscript,selection_command +2257,1360019,"TERMINAL",0,0,"5",,terminal_output +2258,1360176,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",695,0,"",shellscript,selection_command +2259,1360495,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",574,0,"",shellscript,selection_command +2260,1361068,"TERMINAL",0,0,"6",,terminal_output +2261,1361283,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",574,0,"#",shellscript,content +2262,1361284,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0,"",shellscript,selection_keyboard +2263,1361387,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0," ",shellscript,content +2264,1361388,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",576,0,"",shellscript,selection_keyboard +2265,1361653,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0,"",shellscript,selection_command +2266,1362139,"TERMINAL",0,0,"7",,terminal_output +2267,1363125,"TERMINAL",0,0,"8",,terminal_output +2268,1363927,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",574,125,"",shellscript,content +2269,1363972,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",444,0,"",shellscript,selection_command +2270,1364104,"TERMINAL",0,0,"9",,terminal_output +2271,1364120,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",443,0,"",shellscript,selection_command +2272,1364395,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",443,0,"\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id",shellscript,content +2273,1364404,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",444,0,"",shellscript,selection_command +2274,1365118,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",569,0,"",shellscript,selection_command +2275,1365118,"TERMINAL",0,0,"10",,terminal_output +2276,1365378,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",444,0,"",shellscript,selection_command +2277,1366140,"TERMINAL",0,0,"1",,terminal_output +2278,1366398,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",568,0,"\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id",shellscript,content +2279,1366407,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",569,0,"",shellscript,selection_command +2280,1367240,"TERMINAL",0,0,"2",,terminal_output +2281,1368162,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",569,125,"",shellscript,content +2282,1368189,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",444,0,"",shellscript,selection_command +2283,1368218,"TERMINAL",0,0,"3",,terminal_output +2284,1368399,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",568,0,"\n",shellscript,content +2285,1368780,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",569,0,"\n",shellscript,content +2286,1369203,"TERMINAL",0,0,"4",,terminal_output +2287,1369381,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",570,0,"#",shellscript,content +2288,1369382,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",571,0,"",shellscript,selection_keyboard +2289,1369555,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",571,0," ",shellscript,content +2290,1369556,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",572,0,"",shellscript,selection_keyboard +2291,1369765,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",572,0,"m",shellscript,content +2292,1369766,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",573,0,"",shellscript,selection_keyboard +2293,1369970,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",573,0,"a",shellscript,content +2294,1369970,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",574,0,"",shellscript,selection_keyboard +2295,1370035,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",574,0,"i",shellscript,content +2296,1370036,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0,"",shellscript,selection_keyboard +2297,1370072,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0,"n",shellscript,content +2298,1370073,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",576,0,"",shellscript,selection_keyboard +2299,1370234,"TERMINAL",0,0,"5",,terminal_output +2300,1370499,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",575,0,"",shellscript,selection_command +2301,1371244,"TERMINAL",0,0,"6",,terminal_output +2302,1371283,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",731,0,"",shellscript,selection_mouse +2303,1371284,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",730,0,"",shellscript,selection_command +2304,1371765,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",606,0,"",shellscript,selection_mouse +2305,1372279,"TERMINAL",0,0,"7",,terminal_output +2306,1372682,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",706,0,"\n",shellscript,content +2307,1373290,"TERMINAL",0,0,"8",,terminal_output +2308,1373950,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",959,0,"",shellscript,selection_mouse +2309,1373954,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",958,0,"",shellscript,selection_command +2310,1374304,"TERMINAL",0,0,"9",,terminal_output +2311,1375400,"TERMINAL",0,0,"20",,terminal_output +2312,1376022,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +2313,1376349,"TERMINAL",0,0,"1",,terminal_output +2314,1377190,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +2315,1377328,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2316,1377375,"TERMINAL",0,0,"2",,terminal_output +2317,1377447,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2318,1377635,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/python: can't open file '/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py': [Errno 2] No such file or directory\r\nsrun: error: hkn0401: task 0: Exited with exit code 2\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2319,1378406,"TERMINAL",0,0,"3",,terminal_output +2320,1379407,"TERMINAL",0,0,"4",,terminal_output +2321,1380430,"TERMINAL",0,0,"5",,terminal_output +2322,1381455,"TERMINAL",0,0,"6",,terminal_output +2323,1382537,"TERMINAL",0,0,"7",,terminal_output +2324,1383170,"jasmine/train_dynamics.py",0,0,"",python,tab +2325,1383778,"TERMINAL",0,0,"8",,terminal_output +2326,1384635,"TERMINAL",0,0,"30",,terminal_output +2327,1385562,"TERMINAL",0,0,"1",,terminal_output +2328,1386655,"TERMINAL",0,0,"2",,terminal_output +2329,1387731,"TERMINAL",0,0,"3",,terminal_output +2330,1388681,"TERMINAL",0,0,"4",,terminal_output +2331,1389690,"TERMINAL",0,0,"5",,terminal_output +2332,1390775,"TERMINAL",0,0,"6",,terminal_output +2333,1391740,"TERMINAL",0,0,"7",,terminal_output +2334,1392776,"TERMINAL",0,0,"8",,terminal_output +2335,1393930,"TERMINAL",0,0,"9",,terminal_output +2336,1394803,"TERMINAL",0,0,"40",,terminal_output +2337,1395822,"TERMINAL",0,0,"1",,terminal_output +2338,1396844,"TERMINAL",0,0,"2",,terminal_output +2339,1397859,"TERMINAL",0,0,"3",,terminal_output +2340,1398922,"TERMINAL",0,0,"4",,terminal_output +2341,1399900,"TERMINAL",0,0,"5",,terminal_output +2342,1400945,"TERMINAL",0,0,"6",,terminal_output +2343,1401949,"TERMINAL",0,0,"7",,terminal_output +2344,1403001,"TERMINAL",0,0,"8",,terminal_output +2345,1404070,"TERMINAL",0,0,"9",,terminal_output +2346,1405009,"TERMINAL",0,0,"50",,terminal_output +2347,1406036,"TERMINAL",0,0,"1",,terminal_output +2348,1406661,"jasmine/train_dynamics copy.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n print(val_metrics)\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +2349,1407140,"TERMINAL",0,0,"2",,terminal_output +2350,1407680,"jasmine/train_dynamics.py",0,0,"",python,tab +2351,1408172,"TERMINAL",0,0,"3",,terminal_output +2352,1408882,"jasmine/train_dynamics copy.py",0,0,"",python,tab +2353,1409179,"TERMINAL",0,0,"4",,terminal_output +2354,1410179,"TERMINAL",0,0,"5",,terminal_output +2355,1411240,"TERMINAL",0,0,"6",,terminal_output +2356,1412253,"TERMINAL",0,0,"7",,terminal_output +2357,1413505,"TERMINAL",0,0,"8",,terminal_output +2358,1413971,"jasmine/train_dynamics.py",0,0,"",python,tab +2359,1414270,"TERMINAL",0,0,"9",,terminal_output +2360,1414601,"jasmine/train_dynamics.py",686,0,"",python,selection_mouse +2361,1414633,"jasmine/train_dynamics.py",685,0,"",python,selection_command +2362,1415289,"TERMINAL",0,0,"1:00",,terminal_output +2363,1415736,"jasmine/train_dynamics.py",17092,0,"\n",python,content +2364,1416237,"jasmine/train_dynamics.py",17092,0," @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n",python,content +2365,1416279,"jasmine/train_dynamics.py",21870,0," # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n",python,content +2366,1416296,"jasmine/train_dynamics.py",22897,0," if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n",python,content +2367,1416322,"jasmine/train_dynamics.py",23403,0," first_step = step\n",python,content +2368,1416362,"jasmine/train_dynamics.py",23939,0," val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n",python,content +2369,1416367,"jasmine/train_dynamics.py",23926,11,"val_me",python,content +2370,1416376,"TERMINAL",0,0,"1",,terminal_output +2371,1416388,"jasmine/train_dynamics.py",23926,6,"",python,content +2372,1416425,"jasmine/train_dynamics.py",23925,2,"",python,content +2373,1416452,"jasmine/train_dynamics.py",23920,5,"",python,content +2374,1416495,"jasmine/train_dynamics.py",23911,9,"",python,content +2375,1416539,"jasmine/train_dynamics.py",24100,0,"\n",python,content +2376,1416592,"jasmine/train_dynamics.py",24100,0," while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n",python,content +2377,1416603,"jasmine/train_dynamics.py",24098,0," ",python,content +2378,1416603,"jasmine/train_dynamics.py",24054,0," ",python,content +2379,1416603,"jasmine/train_dynamics.py",24022,0," ",python,content +2380,1416603,"jasmine/train_dynamics.py",23984,0," ",python,content +2381,1416603,"jasmine/train_dynamics.py",23948,0," ",python,content +2382,1416603,"jasmine/train_dynamics.py",23920,0," ",python,content +2383,1416603,"jasmine/train_dynamics.py",23846,0," ",python,content +2384,1416603,"jasmine/train_dynamics.py",23836,0," ",python,content +2385,1416603,"jasmine/train_dynamics.py",23826,0," ",python,content +2386,1416603,"jasmine/train_dynamics.py",23767,0," ",python,content +2387,1416604,"jasmine/train_dynamics.py",23721,0," ",python,content +2388,1416604,"jasmine/train_dynamics.py",23646,0," ",python,content +2389,1416604,"jasmine/train_dynamics.py",23595,0," ",python,content +2390,1416604,"jasmine/train_dynamics.py",23541,0," ",python,content +2391,1416604,"jasmine/train_dynamics.py",23480,0," ",python,content +2392,1416604,"jasmine/train_dynamics.py",23459,0," ",python,content +2393,1416604,"jasmine/train_dynamics.py",23429,0," ",python,content +2394,1416611,"jasmine/train_dynamics.py",23424,811,"",python,content +2395,1416647,"jasmine/train_dynamics.py",23803,0," # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n",python,content +2396,1416665,"jasmine/train_dynamics.py",24614,0,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n",python,content +2397,1417412,"TERMINAL",0,0,"2",,terminal_output +2398,1418357,"TERMINAL",0,0,"3",,terminal_output +2399,1419435,"TERMINAL",0,0,"4",,terminal_output +2400,1419695,"jasmine/train_dynamics.py",24614,0,"",python,selection_command +2401,1420342,"TERMINAL",0,0,"5",,terminal_output +2402,1420835,"jasmine/train_dynamics copy.py",0,0,"",python,tab +2403,1421404,"TERMINAL",0,0,"6",,terminal_output +2404,1422456,"TERMINAL",0,0,"7",,terminal_output +2405,1423443,"TERMINAL",0,0,"8",,terminal_output +2406,1424954,"TERMINAL",0,0,"9",,terminal_output +2407,1425669,"TERMINAL",0,0,"10",,terminal_output +2408,1426710,"TERMINAL",0,0,"1",,terminal_output +2409,1427557,"TERMINAL",0,0,"2",,terminal_output +2410,1428192,"jasmine/train_dynamics_appendix-c.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n print(val_metrics)\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +2411,1428510,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"",shellscript,tab +2412,1428613,"TERMINAL",0,0,"3",,terminal_output +2413,1429066,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",457,0,"",shellscript,selection_command +2414,1429647,"TERMINAL",0,0,"4",,terminal_output +2415,1430630,"TERMINAL",0,0,"6",,terminal_output +2416,1431287,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +2417,1431457,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +2418,1431698,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2419,1431699,"TERMINAL",0,0,"7",,terminal_output +2420,1431786,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2421,1432653,"TERMINAL",0,0,"8",,terminal_output +2422,1433759,"TERMINAL",0,0,"9",,terminal_output +2423,1434722,"TERMINAL",0,0,"20",,terminal_output +2424,1435883,"TERMINAL",0,0,"1",,terminal_output +2425,1436774,"TERMINAL",0,0,"2",,terminal_output +2426,1437799,"TERMINAL",0,0,"3",,terminal_output +2427,1438765,"TERMINAL",0,0,"4",,terminal_output +2428,1439869,"TERMINAL",0,0,"5",,terminal_output +2429,1440925,"TERMINAL",0,0,"6",,terminal_output +2430,1441903,"TERMINAL",0,0,"7",,terminal_output +2431,1442970,"TERMINAL",0,0,"8",,terminal_output +2432,1443970,"TERMINAL",0,0,"9",,terminal_output +2433,1444954,"TERMINAL",0,0,"30",,terminal_output +2434,1445880,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2435,1445958,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2436,1445968,"TERMINAL",0,0,"1",,terminal_output +2437,1446874,"TERMINAL",0,0,"2",,terminal_output +2438,1447867,"TERMINAL",0,0,"3",,terminal_output +2439,1448814,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2440,1448913,"TERMINAL",0,0,"4",,terminal_output +2441,1450020,"TERMINAL",0,0,"5",,terminal_output +2442,1451004,"TERMINAL",0,0,"6",,terminal_output +2443,1452031,"TERMINAL",0,0,"7",,terminal_output +2444,1453070,"TERMINAL",0,0,"8",,terminal_output +2445,1454007,"TERMINAL",0,0,"9",,terminal_output +2446,1455041,"TERMINAL",0,0,"40",,terminal_output +2447,1456136,"TERMINAL",0,0,"1",,terminal_output +2448,1456471,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +2449,1457081,"TERMINAL",0,0,"2",,terminal_output +2450,1458131,"TERMINAL",0,0,"3",,terminal_output +2451,1459205,"TERMINAL",0,0,"4",,terminal_output +2452,1460207,"TERMINAL",0,0,"5",,terminal_output +2453,1461227,"TERMINAL",0,0,"6",,terminal_output +2454,1462247,"TERMINAL",0,0,"7",,terminal_output +2455,1463373,"TERMINAL",0,0,"8",,terminal_output +2456,1464246,"TERMINAL",0,0,"9",,terminal_output +2457,1465347,"TERMINAL",0,0,"50",,terminal_output +2458,1466308,"TERMINAL",0,0,"1",,terminal_output +2459,1467373,"TERMINAL",0,0,"2",,terminal_output +2460,1468305,"TERMINAL",0,0,"3",,terminal_output +2461,1469324,"TERMINAL",0,0,"4",,terminal_output +2462,1470372,"TERMINAL",0,0,"5",,terminal_output +2463,1471376,"TERMINAL",0,0,"6",,terminal_output +2464,1472412,"TERMINAL",0,0,"7",,terminal_output +2465,1473551,"TERMINAL",0,0,"8",,terminal_output +2466,1474535,"TERMINAL",0,0,"9",,terminal_output +2467,1475505,"TERMINAL",0,0,"2:00",,terminal_output +2468,1476526,"TERMINAL",0,0,"1",,terminal_output +2469,1477679,"TERMINAL",0,0,"2",,terminal_output +2470,1478681,"TERMINAL",0,0,"4",,terminal_output +2471,1479540,"TERMINAL",0,0,"5",,terminal_output +2472,1481000,"TERMINAL",0,0,"6",,terminal_output +2473,1481643,"TERMINAL",0,0,"7",,terminal_output +2474,1482747,"TERMINAL",0,0,"8",,terminal_output +2475,1483637,"TERMINAL",0,0,"9",,terminal_output +2476,1484658,"TERMINAL",0,0,"10",,terminal_output +2477,1485909,"TERMINAL",0,0,"1",,terminal_output +2478,1486692,"TERMINAL",0,0,"2",,terminal_output +2479,1487710,"TERMINAL",0,0,"3",,terminal_output +2480,1488732,"TERMINAL",0,0,"4",,terminal_output +2481,1489786,"TERMINAL",0,0,"5",,terminal_output +2482,1490772,"TERMINAL",0,0,"6",,terminal_output +2483,1491848,"TERMINAL",0,0,"7",,terminal_output +2484,1492816,"TERMINAL",0,0,"8",,terminal_output +2485,1493853,"TERMINAL",0,0,"9",,terminal_output +2486,1494864,"TERMINAL",0,0,"20",,terminal_output +2487,1496124,"TERMINAL",0,0,"1",,terminal_output +2488,1497008,"TERMINAL",0,0,"2",,terminal_output +2489,1498036,"TERMINAL",0,0,"3",,terminal_output +2490,1499113,"TERMINAL",0,0,"4",,terminal_output +2491,1500103,"TERMINAL",0,0,"5",,terminal_output +2492,1500985,"TERMINAL",0,0,"6",,terminal_output +2493,1502042,"TERMINAL",0,0,"7",,terminal_output +2494,1503019,"TERMINAL",0,0,"8",,terminal_output +2495,1504046,"TERMINAL",0,0,"9",,terminal_output +2496,1505061,"TERMINAL",0,0,"30",,terminal_output +2497,1506188,"TERMINAL",0,0,"1",,terminal_output +2498,1507234,"TERMINAL",0,0,"2",,terminal_output +2499,1508137,"TERMINAL",0,0,"3",,terminal_output +2500,1509162,"TERMINAL",0,0,"4",,terminal_output +2501,1510169,"TERMINAL",0,0,"5",,terminal_output +2502,1511192,"TERMINAL",0,0,"6",,terminal_output +2503,1512204,"TERMINAL",0,0,"7",,terminal_output +2504,1513223,"TERMINAL",0,0,"8",,terminal_output +2505,1514242,"TERMINAL",0,0,"9",,terminal_output +2506,1515262,"TERMINAL",0,0,"40",,terminal_output +2507,1516284,"TERMINAL",0,0,"1",,terminal_output +2508,1517307,"TERMINAL",0,0,"2",,terminal_output +2509,1518340,"TERMINAL",0,0,"3",,terminal_output +2510,1519360,"TERMINAL",0,0,"4",,terminal_output +2511,1520387,"TERMINAL",0,0,"5",,terminal_output +2512,1521406,"TERMINAL",0,0,"6",,terminal_output +2513,1522429,"TERMINAL",0,0,"7",,terminal_output +2514,1523543,"TERMINAL",0,0,"8",,terminal_output +2515,1524517,"TERMINAL",0,0,"9",,terminal_output +2516,1525513,"TERMINAL",0,0,"51",,terminal_output +2517,1526570,"TERMINAL",0,0,"2",,terminal_output +2518,1527566,"TERMINAL",0,0,"3",,terminal_output +2519,1528776,"TERMINAL",0,0,"4",,terminal_output +2520,1529583,"TERMINAL",0,0,"5",,terminal_output +2521,1530644,"TERMINAL",0,0,"6",,terminal_output +2522,1531664,"TERMINAL",0,0,"7",,terminal_output +2523,1532652,"TERMINAL",0,0,"8",,terminal_output +2524,1533671,"TERMINAL",0,0,"9",,terminal_output +2525,1534692,"TERMINAL",0,0,"3:00",,terminal_output +2526,1535725,"TERMINAL",0,0,"1",,terminal_output +2527,1536754,"TERMINAL",0,0,"2",,terminal_output +2528,1537990,"TERMINAL",0,0,"3",,terminal_output +2529,1538803,"TERMINAL",0,0,"4",,terminal_output +2530,1539842,"TERMINAL",0,0,"5",,terminal_output +2531,1540853,"TERMINAL",0,0,"6",,terminal_output +2532,1541869,"TERMINAL",0,0,"7",,terminal_output +2533,1542892,"TERMINAL",0,0,"8",,terminal_output +2534,1544014,"TERMINAL",0,0,"9",,terminal_output +2535,1544967,"TERMINAL",0,0,"10",,terminal_output +2536,1545989,"TERMINAL",0,0,"1",,terminal_output +2537,1546312,"TERMINAL",0,0,"Step 200000, validation loss: 0.7587440609931946\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(0.758744008400861), 'val_entropy': np.float64(0.8734586110302046), 'val_masked_token_top16_accuracy': np.float64(0.9815394200530707), 'val_masked_token_top1_accuracy': np.float64(0.772024557870977), 'val_masked_token_top2_accuracy': np.float64(0.8777434533717585), 'val_masked_token_top5_accuracy': np.float64(0.9463883007273954), 'val_psnr': np.float64(18.16088799869313), 'val_select_logit': np.float64(15.902265399110084), 'val_select_p': np.float64(0.7548150642245424), 'val_ssim': np.float64(0.6326266468739977), 'val_total_loss': np.float64(0.758744008400861), 'val_z_loss': np.float64(310.9372079886642), 'val_loss': np.float32(0.75874406), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\r\n",,terminal_output +2538,1546688,"TERMINAL",0,0,"W1008 23:03:12.068012 1283551 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +2539,1547092,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +2540,1547180,"TERMINAL",0,0,"2",,terminal_output +2541,1547335,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2542,1548311,"TERMINAL",0,0,"3",,terminal_output +2543,1549275,"TERMINAL",0,0,"4",,terminal_output +2544,1550164,"TERMINAL",0,0,"5",,terminal_output +2545,1551090,"TERMINAL",0,0,"6",,terminal_output +2546,1552234,"TERMINAL",0,0,"7",,terminal_output +2547,1553417,"TERMINAL",0,0,"8",,terminal_output +2548,1554151,"TERMINAL",0,0,"9",,terminal_output +2549,1555221,"TERMINAL",0,0,"20",,terminal_output +2550,1556256,"TERMINAL",0,0,"1",,terminal_output +2551,1557415,"TERMINAL",0,0,"2",,terminal_output +2552,1558737,"TERMINAL",0,0,"3",,terminal_output +2553,1559399,"TERMINAL",0,0,"4",,terminal_output +2554,1560406,"TERMINAL",0,0,"5",,terminal_output +2555,1561548,"TERMINAL",0,0,"6",,terminal_output +2556,1562732,"TERMINAL",0,0,"7",,terminal_output +2557,1563502,"TERMINAL",0,0,"8",,terminal_output +2558,1564798,"TERMINAL",0,0,"9",,terminal_output +2559,1565769,"TERMINAL",0,0,"30",,terminal_output +2560,1566695,"TERMINAL",0,0,"1",,terminal_output +2561,1567415,"TERMINAL",0,0,"2",,terminal_output +2562,1568604,"TERMINAL",0,0,"3",,terminal_output +2563,1569458,"TERMINAL",0,0,"4",,terminal_output +2564,1570593,"TERMINAL",0,0,"5",,terminal_output +2565,1571502,"TERMINAL",0,0,"7",,terminal_output +2566,1572522,"TERMINAL",0,0,"8",,terminal_output +2567,1573542,"TERMINAL",0,0,"9",,terminal_output +2568,1574569,"TERMINAL",0,0,"40",,terminal_output +2569,1575701,"TERMINAL",0,0,"1",,terminal_output +2570,1576705,"TERMINAL",0,0,"2",,terminal_output +2571,1577677,"TERMINAL",0,0,"3",,terminal_output +2572,1578755,"TERMINAL",0,0,"4",,terminal_output +2573,1579664,"TERMINAL",0,0,"5",,terminal_output +2574,1580686,"TERMINAL",0,0,"6",,terminal_output +2575,1581740,"TERMINAL",0,0,"7",,terminal_output +2576,1582752,"TERMINAL",0,0,"8",,terminal_output +2577,1583760,"TERMINAL",0,0,"9",,terminal_output +2578,1584800,"TERMINAL",0,0,"50",,terminal_output +2579,1585806,"TERMINAL",0,0,"1",,terminal_output +2580,1586837,"TERMINAL",0,0,"2",,terminal_output +2581,1587761,"appendix_c_nodes.md",0,0,"",markdown,tab +2582,1588010,"TERMINAL",0,0,"3",,terminal_output +2583,1588719,"appendix_c_nodes.md",0,0,"\n",markdown,content +2584,1588911,"TERMINAL",0,0,"4",,terminal_output +2585,1589917,"TERMINAL",0,0,"5",,terminal_output +2586,1591008,"TERMINAL",0,0,"6",,terminal_output +2587,1591515,"appendix_c_nodes.md",1,0,"A",markdown,content +2588,1591517,"appendix_c_nodes.md",2,0,"",markdown,selection_keyboard +2589,1591638,"appendix_c_nodes.md",2,0,"d",markdown,content +2590,1591640,"appendix_c_nodes.md",3,0,"",markdown,selection_keyboard +2591,1591774,"appendix_c_nodes.md",3,0,"d",markdown,content +2592,1591776,"appendix_c_nodes.md",4,0,"",markdown,selection_keyboard +2593,1591944,"appendix_c_nodes.md",4,0,"i",markdown,content +2594,1591945,"appendix_c_nodes.md",5,0,"",markdown,selection_keyboard +2595,1592102,"TERMINAL",0,0,"7",,terminal_output +2596,1592297,"appendix_c_nodes.md",5,0,"t",markdown,content +2597,1592299,"appendix_c_nodes.md",6,0,"",markdown,selection_keyboard +2598,1592419,"appendix_c_nodes.md",6,0,"i",markdown,content +2599,1592420,"appendix_c_nodes.md",7,0,"",markdown,selection_keyboard +2600,1592559,"appendix_c_nodes.md",7,0,"v",markdown,content +2601,1592560,"appendix_c_nodes.md",8,0,"",markdown,selection_keyboard +2602,1592747,"appendix_c_nodes.md",8,0,"e",markdown,content +2603,1592748,"appendix_c_nodes.md",9,0,"",markdown,selection_keyboard +2604,1592844,"appendix_c_nodes.md",9,0," ",markdown,content +2605,1592845,"appendix_c_nodes.md",10,0,"",markdown,selection_keyboard +2606,1593002,"TERMINAL",0,0,"8",,terminal_output +2607,1593073,"appendix_c_nodes.md",10,0,"e",markdown,content +2608,1593074,"appendix_c_nodes.md",11,0,"",markdown,selection_keyboard +2609,1593101,"appendix_c_nodes.md",11,0,"m",markdown,content +2610,1593103,"appendix_c_nodes.md",12,0,"",markdown,selection_keyboard +2611,1593326,"appendix_c_nodes.md",12,0,"b",markdown,content +2612,1593327,"appendix_c_nodes.md",13,0,"",markdown,selection_keyboard +2613,1593443,"appendix_c_nodes.md",13,0,"e",markdown,content +2614,1593444,"appendix_c_nodes.md",14,0,"",markdown,selection_keyboard +2615,1593662,"appendix_c_nodes.md",14,0,"d",markdown,content +2616,1593664,"appendix_c_nodes.md",15,0,"",markdown,selection_keyboard +2617,1593851,"appendix_c_nodes.md",15,0,"d",markdown,content +2618,1593852,"appendix_c_nodes.md",16,0,"",markdown,selection_keyboard +2619,1593929,"appendix_c_nodes.md",16,0,"i",markdown,content +2620,1593931,"appendix_c_nodes.md",17,0,"",markdown,selection_keyboard +2621,1593993,"appendix_c_nodes.md",17,0,"n",markdown,content +2622,1593994,"appendix_c_nodes.md",18,0,"",markdown,selection_keyboard +2623,1593994,"TERMINAL",0,0,"9",,terminal_output +2624,1594102,"appendix_c_nodes.md",18,0,"g",markdown,content +2625,1594103,"appendix_c_nodes.md",19,0,"",markdown,selection_keyboard +2626,1594265,"appendix_c_nodes.md",19,0,"s",markdown,content +2627,1594266,"appendix_c_nodes.md",20,0,"",markdown,selection_keyboard +2628,1595035,"TERMINAL",0,0,"4:00",,terminal_output +2629,1595470,"appendix_c_nodes.md",1,0,"",markdown,selection_command +2630,1596038,"TERMINAL",0,0,"1",,terminal_output +2631,1596112,"appendix_c_nodes.md",1,0,"#",markdown,content +2632,1596114,"appendix_c_nodes.md",2,0,"",markdown,selection_keyboard +2633,1596278,"appendix_c_nodes.md",2,0,"#",markdown,content +2634,1596280,"appendix_c_nodes.md",3,0,"",markdown,selection_keyboard +2635,1596363,"appendix_c_nodes.md",3,0," ",markdown,content +2636,1596364,"appendix_c_nodes.md",4,0,"",markdown,selection_keyboard +2637,1597074,"TERMINAL",0,0,"2",,terminal_output +2638,1597438,"appendix_c_nodes.md",23,0,"\n",markdown,content +2639,1597885,"appendix_c_nodes.md",24,0,"\n",markdown,content +2640,1598097,"TERMINAL",0,0,"3",,terminal_output +2641,1598725,"appendix_c_nodes.md",25,0,"#",markdown,content +2642,1598726,"appendix_c_nodes.md",26,0,"",markdown,selection_keyboard +2643,1598885,"appendix_c_nodes.md",26,0,"#",markdown,content +2644,1598886,"appendix_c_nodes.md",27,0,"",markdown,selection_keyboard +2645,1599165,"TERMINAL",0,0,"4",,terminal_output +2646,1600154,"TERMINAL",0,0,"5",,terminal_output +2647,1600742,"appendix_c_nodes.md",27,0,"#",markdown,content +2648,1600744,"appendix_c_nodes.md",28,0,"",markdown,selection_keyboard +2649,1600930,"appendix_c_nodes.md",28,0," ",markdown,content +2650,1600932,"appendix_c_nodes.md",29,0,"",markdown,selection_keyboard +2651,1601165,"TERMINAL",0,0,"6",,terminal_output +2652,1601621,"appendix_c_nodes.md",29,0,"n",markdown,content +2653,1601623,"appendix_c_nodes.md",30,0,"",markdown,selection_keyboard +2654,1601785,"appendix_c_nodes.md",30,0,"o",markdown,content +2655,1601787,"appendix_c_nodes.md",31,0,"",markdown,selection_keyboard +2656,1601877,"appendix_c_nodes.md",31,0," ",markdown,content +2657,1601878,"appendix_c_nodes.md",32,0,"",markdown,selection_keyboard +2658,1602002,"appendix_c_nodes.md",32,0,"n",markdown,content +2659,1602003,"appendix_c_nodes.md",33,0,"",markdown,selection_keyboard +2660,1602141,"appendix_c_nodes.md",33,0,"o",markdown,content +2661,1602143,"appendix_c_nodes.md",34,0,"",markdown,selection_keyboard +2662,1602226,"TERMINAL",0,0,"7",,terminal_output +2663,1602272,"appendix_c_nodes.md",34,0,"i",markdown,content +2664,1602273,"appendix_c_nodes.md",35,0,"",markdown,selection_keyboard +2665,1602315,"appendix_c_nodes.md",35,0,"s",markdown,content +2666,1602316,"appendix_c_nodes.md",36,0,"",markdown,selection_keyboard +2667,1602429,"appendix_c_nodes.md",36,0,"e",markdown,content +2668,1602430,"appendix_c_nodes.md",37,0,"",markdown,selection_keyboard +2669,1602651,"appendix_c_nodes.md",37,0,"\n",markdown,content +2670,1603046,"appendix_c_nodes.md",38,0,"Step 200000, validation loss: 0.7587440609931946\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(0.758744008400861), 'val_entropy': np.float64(0.8734586110302046), 'val_masked_token_top16_accuracy': np.float64(0.9815394200530707), 'val_masked_token_top1_accuracy': np.float64(0.772024557870977), 'val_masked_token_top2_accuracy': np.float64(0.8777434533717585), 'val_masked_token_top5_accuracy': np.float64(0.9463883007273954), 'val_psnr': np.float64(18.16088799869313), 'val_select_logit': np.float64(15.902265399110084), 'val_select_p': np.float64(0.7548150642245424), 'val_ssim': np.float64(0.6326266468739977), 'val_total_loss': np.float64(0.758744008400861), 'val_z_loss': np.float64(310.9372079886642), 'val_loss': np.float32(0.75874406), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}",markdown,content +2671,1603349,"TERMINAL",0,0,"8",,terminal_output +2672,1604306,"TERMINAL",0,0,"9",,terminal_output +2673,1605256,"TERMINAL",0,0,"10",,terminal_output +2674,1606274,"TERMINAL",0,0,"1",,terminal_output +2675,1606844,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +2676,1607288,"TERMINAL",0,0,"2",,terminal_output +2677,1608297,"TERMINAL",0,0,"3",,terminal_output +2678,1609319,"TERMINAL",0,0,"4",,terminal_output +2679,1610416,"TERMINAL",0,0,"5",,terminal_output +2680,1611380,"TERMINAL",0,0,"6",,terminal_output +2681,1612401,"TERMINAL",0,0,"7",,terminal_output +2682,1613448,"TERMINAL",0,0,"8",,terminal_output +2683,1614441,"TERMINAL",0,0,"9",,terminal_output +2684,1615471,"TERMINAL",0,0,"20",,terminal_output +2685,1616754,"TERMINAL",0,0,"1",,terminal_output +2686,1617606,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +2687,1617751,"TERMINAL",0,0,"3",,terminal_output +2688,1618646,"TERMINAL",0,0,"4",,terminal_output +2689,1619843,"TERMINAL",0,0,"5",,terminal_output +2690,1620666,"TERMINAL",0,0,"6",,terminal_output +2691,1621626,"TERMINAL",0,0,"7",,terminal_output +2692,1622629,"TERMINAL",0,0,"8",,terminal_output +2693,1623722,"TERMINAL",0,0,"9",,terminal_output +2694,1624721,"TERMINAL",0,0,"30",,terminal_output +2695,1625714,"TERMINAL",0,0,"1",,terminal_output +2696,1626866,"TERMINAL",0,0,"2",,terminal_output +2697,1627735,"TERMINAL",0,0,"3",,terminal_output +2698,1628406,"jasmine/train_dynamics_appendix-c.py",19340,0,"",python,selection_mouse +2699,1628776,"TERMINAL",0,0,"4",,terminal_output +2700,1629920,"TERMINAL",0,0,"5",,terminal_output +2701,1630154,"jasmine/train_dynamics_appendix-c.py",22104,0,"",python,selection_command +2702,1630963,"TERMINAL",0,0,"6",,terminal_output +2703,1631497,"jasmine/train_dynamics_appendix-c.py",22122,0,"",python,selection_mouse +2704,1631699,"jasmine/train_dynamics_appendix-c.py",19323,0,"",python,selection_command +2705,1631912,"TERMINAL",0,0,"7",,terminal_output +2706,1632982,"TERMINAL",0,0,"8",,terminal_output +2707,1634071,"TERMINAL",0,0,"9",,terminal_output +2708,1634960,"TERMINAL",0,0,"40",,terminal_output +2709,1635969,"TERMINAL",0,0,"1",,terminal_output +2710,1636666,"jasmine/train_dynamics_appendix-c.py",17326,0,"",python,selection_mouse +2711,1636990,"TERMINAL",0,0,"2",,terminal_output +2712,1637852,"jasmine/train_dynamics_appendix-c.py",17340,0,"",python,selection_mouse +2713,1638064,"TERMINAL",0,0,"3",,terminal_output +2714,1638106,"jasmine/train_dynamics_appendix-c.py",16507,0,"",python,selection_command +2715,1639069,"TERMINAL",0,0,"4",,terminal_output +2716,1640168,"TERMINAL",0,0,"5",,terminal_output +2717,1641094,"TERMINAL",0,0,"6",,terminal_output +2718,1641739,"jasmine/train_dynamics_appendix-c.py",17356,0,"",python,selection_mouse +2719,1642156,"TERMINAL",0,0,"7",,terminal_output +2720,1642336,"jasmine/train_dynamics_appendix-c.py",17357,0,"",python,selection_mouse +2721,1642946,"jasmine/train_dynamics_appendix-c.py",17362,0,"",python,selection_mouse +2722,1643113,"jasmine/train_dynamics_appendix-c.py",17358,6,"inputs",python,selection_mouse +2723,1643118,"TERMINAL",0,0,"8",,terminal_output +2724,1644188,"TERMINAL",0,0,"9",,terminal_output +2725,1644255,"jasmine/train_dynamics_appendix-c.py",17275,0,"",python,selection_mouse +2726,1645219,"TERMINAL",0,0,"50",,terminal_output +2727,1646188,"TERMINAL",0,0,"1",,terminal_output +2728,1647201,"TERMINAL",0,0,"2",,terminal_output +2729,1648208,"jasmine/train_dynamics_appendix-c.py",16619,0,"",python,selection_mouse +2730,1648213,"jasmine/train_dynamics_appendix-c.py",16618,0,"",python,selection_command +2731,1648262,"TERMINAL",0,0,"3",,terminal_output +2732,1649225,"TERMINAL",0,0,"4",,terminal_output +2733,1650324,"TERMINAL",0,0,"5",,terminal_output +2734,1651074,"jasmine/train_dynamics_appendix-c.py",16718,0,"",python,selection_mouse +2735,1651271,"TERMINAL",0,0,"6",,terminal_output +2736,1652292,"TERMINAL",0,0,"7",,terminal_output +2737,1653315,"TERMINAL",0,0,"8",,terminal_output +2738,1654369,"TERMINAL",0,0,"9",,terminal_output +2739,1655359,"TERMINAL",0,0,"5:00",,terminal_output +2740,1656438,"TERMINAL",0,0,"1",,terminal_output +2741,1657514,"TERMINAL",0,0,"2",,terminal_output +2742,1658450,"TERMINAL",0,0,"3",,terminal_output +2743,1659474,"TERMINAL",0,0,"4",,terminal_output +2744,1660496,"TERMINAL",0,0,"5",,terminal_output +2745,1661518,"TERMINAL",0,0,"7",,terminal_output +2746,1662016,"jasmine/models/tokenizer.py",0,0,"from typing import Dict, Tuple\n\nimport flax.nnx as nnx\nimport jax.numpy as jnp\nimport jax\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nnx.Module):\n """"""\n ST-ViVit VQ-VAE\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n D: B * T * N\n H: height\n W: width\n C: number of channels\n P: patch token dimension (patch_size^2 * C)\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.encoder = STTransformer(\n self.in_dim * self.patch_size**2,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n self.dtype,\n rngs=rngs,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.latent_dim,\n self.model_dim,\n self.ffn_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n z_q_BTNL = outputs[""z_q""]\n recon_BTHWC = self.decoder(z_q_BTNL)\n recon_BTHWC = recon_BTHWC.astype(jnp.float32)\n recon_BTHWC = nnx.sigmoid(recon_BTHWC)\n recon_BTHWC = recon_BTHWC.astype(self.dtype)\n recon_BTHWC = unpatchify(recon_BTHWC, self.patch_size, H, W)\n outputs[""recon""] = recon_BTHWC\n return outputs\n\n def vq_encode(\n self, videos: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n patch_BTNP = patchify(videos, self.patch_size)\n N = patch_BTNP.shape[2]\n x_BTNL = self.encoder(patch_BTNP)\n\n # --- Vector quantize ---\n x_DL = x_BTNL.reshape(B * T * N, self.latent_dim)\n z_q_DL, z_DL, emb_DL, indices_D = self.vq(x_DL, training)\n z_q_BTNL = z_q_DL.reshape(B, T, N, self.latent_dim)\n indices_BTN = indices_D.reshape(B, T, N)\n return dict(z_q=z_q_BTNL, z=z_DL, emb=emb_DL, indices=indices_BTN)\n\n def decode(self, indices_BTN: jax.Array, video_hw: Tuple[int, int]) -> jax.Array:\n z_BTNL = self.vq.codebook[indices_BTN]\n recon_BTNP = self.decoder(z_BTNL)\n recon_BTNP = recon_BTNP.astype(jnp.float32)\n recon_BTNP = nnx.sigmoid(recon_BTNP)\n recon_BTNP = recon_BTNP.astype(self.dtype)\n return unpatchify(recon_BTNP, self.patch_size, *video_hw)\n",python,tab +2747,1662543,"TERMINAL",0,0,"8",,terminal_output +2748,1663003,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +2749,1663555,"TERMINAL",0,0,"9",,terminal_output +2750,1664690,"TERMINAL",0,0,"10",,terminal_output +2751,1665666,"TERMINAL",0,0,"1",,terminal_output +2752,1666176,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +2753,1666658,"TERMINAL",0,0,"2",,terminal_output +2754,1667743,"TERMINAL",0,0,"3",,terminal_output +2755,1668704,"TERMINAL",0,0,"4",,terminal_output +2756,1669341,"jasmine/models/dynamics.py",2927,0,"",python,selection_mouse +2757,1669730,"TERMINAL",0,0,"5",,terminal_output +2758,1669946,"jasmine/models/dynamics.py",2926,0,"",python,selection_mouse +2759,1669948,"jasmine/models/dynamics.py",2925,0,"",python,selection_command +2760,1670746,"TERMINAL",0,0,"6",,terminal_output +2761,1671762,"TERMINAL",0,0,"7",,terminal_output +2762,1672400,"jasmine/models/dynamics.py",2133,0,"",python,selection_mouse +2763,1672510,"jasmine/models/dynamics.py",2125,16,"video_tokens_BTN",python,selection_mouse +2764,1673170,"TERMINAL",0,0,"8",,terminal_output +2765,1673993,"TERMINAL",0,0,"9",,terminal_output +2766,1674197,"jasmine/models/dynamics.py",2984,0,"",python,selection_mouse +2767,1674824,"TERMINAL",0,0,"20",,terminal_output +2768,1675070,"jasmine/models/dynamics.py",3008,0,"",python,selection_mouse +2769,1675707,"jasmine/models/dynamics.py",3001,0,"",python,selection_mouse +2770,1675861,"TERMINAL",0,0,"1",,terminal_output +2771,1676965,"jasmine/models/dynamics.py",3188,0,"",python,selection_mouse +2772,1677013,"TERMINAL",0,0,"2",,terminal_output +2773,1677887,"TERMINAL",0,0,"3",,terminal_output +2774,1678253,"jasmine/models/dynamics.py",3276,0,"",python,selection_mouse +2775,1678372,"jasmine/models/dynamics.py",3268,14,"vid_embed_BTNM",python,selection_mouse +2776,1678907,"TERMINAL",0,0,"4",,terminal_output +2777,1680191,"TERMINAL",0,0,"5",,terminal_output +2778,1681293,"jasmine/models/dynamics.py",2221,0,"",python,selection_mouse +2779,1681298,"jasmine/models/dynamics.py",2220,0,"",python,selection_command +2780,1681299,"TERMINAL",0,0,"6",,terminal_output +2781,1681567,"jasmine/models/dynamics.py",2220,1,"]",python,selection_mouse +2782,1681568,"jasmine/models/dynamics.py",2221,0,"",python,selection_command +2783,1681568,"jasmine/models/dynamics.py",2221,59,"\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN",python,selection_mouse +2784,1681568,"jasmine/models/dynamics.py",2221,60,"\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)",python,selection_mouse +2785,1681794,"jasmine/models/dynamics.py",2281,0,"",python,selection_mouse +2786,1681795,"jasmine/models/dynamics.py",2280,0,"",python,selection_command +2787,1682207,"TERMINAL",0,0,"7",,terminal_output +2788,1682848,"jasmine/models/dynamics.py",2281,0,"\n ",python,content +2789,1683115,"TERMINAL",0,0,"8",,terminal_output +2790,1683161,"jasmine/models/dynamics.py",2290,0,"\n ",python,content +2791,1683161,"jasmine/models/dynamics.py",2282,8,"",python,content +2792,1684105,"jasmine/models/dynamics.py",2291,0,"v",python,content +2793,1684106,"jasmine/models/dynamics.py",2292,0,"",python,selection_keyboard +2794,1684200,"jasmine/models/dynamics.py",2292,0,"i",python,content +2795,1684202,"jasmine/models/dynamics.py",2293,0,"",python,selection_keyboard +2796,1684310,"jasmine/models/dynamics.py",2293,0,"d",python,content +2797,1684312,"jasmine/models/dynamics.py",2294,0,"",python,selection_keyboard +2798,1684315,"TERMINAL",0,0,"9",,terminal_output +2799,1684631,"jasmine/models/dynamics.py",2294,0,"_",python,content +2800,1684633,"jasmine/models/dynamics.py",2295,0,"",python,selection_keyboard +2801,1685095,"TERMINAL",0,0,"30",,terminal_output +2802,1685164,"jasmine/models/dynamics.py",2295,0,"e",python,content +2803,1685166,"jasmine/models/dynamics.py",2296,0,"",python,selection_keyboard +2804,1685281,"jasmine/models/dynamics.py",2296,0,"m",python,content +2805,1685283,"jasmine/models/dynamics.py",2297,0,"",python,selection_keyboard +2806,1685567,"jasmine/models/dynamics.py",2297,0,"b",python,content +2807,1685569,"jasmine/models/dynamics.py",2298,0,"",python,selection_keyboard +2808,1686093,"TERMINAL",0,0,"1",,terminal_output +2809,1686838,"jasmine/models/dynamics.py",2291,7,"vid_embed_BTNM",python,content +2810,1687217,"TERMINAL",0,0,"2",,terminal_output +2811,1687924,"jasmine/models/dynamics.py",2291,14,"",python,content +2812,1688220,"TERMINAL",0,0,"3",,terminal_output +2813,1689216,"TERMINAL",0,0,"4",,terminal_output +2814,1690193,"TERMINAL",0,0,"5",,terminal_output +2815,1691221,"TERMINAL",0,0,"6",,terminal_output +2816,1692233,"TERMINAL",0,0,"7",,terminal_output +2817,1693236,"TERMINAL",0,0,"8",,terminal_output +2818,1694259,"TERMINAL",0,0,"9",,terminal_output +2819,1695350,"TERMINAL",0,0,"40",,terminal_output +2820,1696048,"jasmine/models/dynamics.py",2283,0,"",python,selection_command +2821,1696357,"TERMINAL",0,0,"1",,terminal_output +2822,1697485,"TERMINAL",0,0,"2",,terminal_output +2823,1698348,"TERMINAL",0,0,"3",,terminal_output +2824,1699365,"TERMINAL",0,0,"4",,terminal_output +2825,1700390,"TERMINAL",0,0,"5",,terminal_output +2826,1700613,"jasmine/models/dynamics.py",2283,0," noise_stddev = 0.1 # Standard deviation for the Gaussian noise\n",python,content +2827,1701467,"TERMINAL",0,0,"6",,terminal_output +2828,1701597,"jasmine/models/dynamics.py",2355,0," noise = jax.random.normal(rngs.params(), vid_embed_BTNM.shape) * noise_stddev\n",python,content +2829,1701758,"jasmine/models/dynamics.py",2441,0," vid_embed_BTNM += noise\n",python,content +2830,1701767,"jasmine/models/dynamics.py",2473,9,"",python,content +2831,1702563,"TERMINAL",0,0,"7",,terminal_output +2832,1703603,"TERMINAL",0,0,"8",,terminal_output +2833,1704696,"TERMINAL",0,0,"9",,terminal_output +2834,1704746,"jasmine/models/dynamics.py",2391,0,"",python,selection_mouse +2835,1705785,"TERMINAL",0,0,"50",,terminal_output +2836,1706789,"TERMINAL",0,0,"2",,terminal_output +2837,1707689,"TERMINAL",0,0,"3",,terminal_output +2838,1708664,"TERMINAL",0,0,"4",,terminal_output +2839,1708728,"jasmine/models/dynamics.py",2571,0,"",python,selection_mouse +2840,1708905,"jasmine/models/dynamics.py",2569,5,"batch",python,selection_mouse +2841,1709066,"jasmine/models/dynamics.py",2569,6,"batch[",python,selection_mouse +2842,1709109,"jasmine/models/dynamics.py",2569,7,"batch[""",python,selection_mouse +2843,1709110,"jasmine/models/dynamics.py",2569,15,"batch[""mask_rng",python,selection_mouse +2844,1709263,"jasmine/models/dynamics.py",2569,16,"batch[""mask_rng""",python,selection_mouse +2845,1709312,"jasmine/models/dynamics.py",2569,17,"batch[""mask_rng""]",python,selection_mouse +2846,1709362,"jasmine/models/dynamics.py",2569,18,"batch[""mask_rng""],",python,selection_mouse +2847,1709585,"TERMINAL",0,0,"5",,terminal_output +2848,1709700,"jasmine/models/dynamics.py",2569,17,"batch[""mask_rng""]",python,selection_mouse +2849,1710710,"TERMINAL",0,0,"6",,terminal_output +2850,1711778,"TERMINAL",0,0,"7",,terminal_output +2851,1712902,"jasmine/models/dynamics.py",2402,0,"",python,selection_mouse +2852,1713054,"TERMINAL",0,0,"8",,terminal_output +2853,1713643,"jasmine/models/dynamics.py",2400,2,"",python,content +2854,1713735,"TERMINAL",0,0,"9",,terminal_output +2855,1713818,"jasmine/models/dynamics.py",2394,6,"",python,content +2856,1713954,"jasmine/models/dynamics.py",2393,1,"",python,content +2857,1714294,"jasmine/models/dynamics.py",2389,4,"",python,content +2858,1714595,"jasmine/models/dynamics.py",2389,0,"batch[""mask_rng""]",python,content +2859,1714720,"TERMINAL",0,0,"6:00",,terminal_output +2860,1715739,"TERMINAL",0,0,"1",,terminal_output +2861,1716726,"TERMINAL",0,0,"2",,terminal_output +2862,1717758,"TERMINAL",0,0,"3",,terminal_output +2863,1718764,"TERMINAL",0,0,"4",,terminal_output +2864,1718900,"jasmine/models/dynamics.py",2354,0,"",python,selection_mouse +2865,1719503,"jasmine/models/dynamics.py",2477,0,"",python,selection_mouse +2866,1719784,"TERMINAL",0,0,"5",,terminal_output +2867,1719995,"jasmine/models/dynamics.py",2476,0,"",python,selection_mouse +2868,1720913,"TERMINAL",0,0,"6",,terminal_output +2869,1721927,"TERMINAL",0,0,"7",,terminal_output +2870,1722853,"TERMINAL",0,0,"8",,terminal_output +2871,1723138,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +2872,1723257,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2873,1723401,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2874,1723881,"TERMINAL",0,0,"9",,terminal_output +2875,1724895,"TERMINAL",0,0,"10",,terminal_output +2876,1725971,"TERMINAL",0,0,"1",,terminal_output +2877,1727031,"TERMINAL",0,0,"2",,terminal_output +2878,1728222,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +2879,1728254,"TERMINAL",0,0,"3",,terminal_output +2880,1729228,"TERMINAL",0,0,"4",,terminal_output +2881,1730111,"TERMINAL",0,0,"5",,terminal_output +2882,1731338,"jasmine/models/dynamics.py",0,0,"",python,tab +2883,1731470,"TERMINAL",0,0,"6",,terminal_output +2884,1732232,"TERMINAL",0,0,"7",,terminal_output +2885,1733065,"TERMINAL",0,0,"8",,terminal_output +2886,1734100,"TERMINAL",0,0,"9",,terminal_output +2887,1735162,"TERMINAL",0,0,"20",,terminal_output +2888,1736245,"TERMINAL",0,0,"1",,terminal_output +2889,1737511,"TERMINAL",0,0,"2",,terminal_output +2890,1738558,"TERMINAL",0,0,"3",,terminal_output +2891,1739751,"TERMINAL",0,0,"4",,terminal_output +2892,1740702,"TERMINAL",0,0,"5",,terminal_output +2893,1741004,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2894,1741423,"TERMINAL",0,0,"6",,terminal_output +2895,1742456,"TERMINAL",0,0,"7",,terminal_output +2896,1743377,"TERMINAL",0,0,"8",,terminal_output +2897,1744085,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2898,1744376,"TERMINAL",0,0,"9",,terminal_output +2899,1745397,"TERMINAL",0,0,"30",,terminal_output +2900,1746557,"TERMINAL",0,0,"1",,terminal_output +2901,1747624,"TERMINAL",0,0,"2",,terminal_output +2902,1748666,"TERMINAL",0,0,"3",,terminal_output +2903,1749962,"TERMINAL",0,0,"4",,terminal_output +2904,1750624,"TERMINAL",0,0,"5",,terminal_output +2905,1751742,"TERMINAL",0,0,"6",,terminal_output +2906,1751983,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +2907,1752614,"TERMINAL",0,0,"7",,terminal_output +2908,1753576,"TERMINAL",0,0,"9",,terminal_output +2909,1754557,"TERMINAL",0,0,"40",,terminal_output +2910,1755578,"TERMINAL",0,0,"1",,terminal_output +2911,1756617,"TERMINAL",0,0,"2",,terminal_output +2912,1757668,"TERMINAL",0,0,"3",,terminal_output +2913,1758803,"TERMINAL",0,0,"4",,terminal_output +2914,1759818,"TERMINAL",0,0,"5",,terminal_output +2915,1760854,"TERMINAL",0,0,"6",,terminal_output +2916,1761759,"TERMINAL",0,0,"7",,terminal_output +2917,1762809,"TERMINAL",0,0,"8",,terminal_output +2918,1763772,"TERMINAL",0,0,"9",,terminal_output +2919,1763901,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +2920,1764991,"TERMINAL",0,0,"50",,terminal_output +2921,1765353,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1171,0,"",shellscript,selection_mouse +2922,1765354,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1170,0,"",shellscript,selection_command +2923,1766037,"TERMINAL",0,0,"1",,terminal_output +2924,1766207,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1171,0,"\n ",shellscript,content +2925,1766781,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1176,0,"-",shellscript,content +2926,1766782,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1177,0,"",shellscript,selection_keyboard +2927,1766838,"TERMINAL",0,0,"2",,terminal_output +2928,1766928,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1177,0,"-",shellscript,content +2929,1766928,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1178,0,"",shellscript,selection_keyboard +2930,1767216,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1178,0,"n",shellscript,content +2931,1767217,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1179,0,"",shellscript,selection_keyboard +2932,1767417,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1179,0,"o",shellscript,content +2933,1767418,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1180,0,"",shellscript,selection_keyboard +2934,1767697,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1180,0,"_",shellscript,content +2935,1767697,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1181,0,"",shellscript,selection_keyboard +2936,1767825,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1181,0,"e",shellscript,content +2937,1767826,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1182,0,"",shellscript,selection_keyboard +2938,1767921,"TERMINAL",0,0,"3",,terminal_output +2939,1768024,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1182,0,"v",shellscript,content +2940,1768025,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1183,0,"",shellscript,selection_keyboard +2941,1768253,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1183,0,"a",shellscript,content +2942,1768254,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1184,0,"",shellscript,selection_keyboard +2943,1768254,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1184,0,"l",shellscript,content +2944,1768254,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1185,0,"",shellscript,selection_keyboard +2945,1768509,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1185,0,"_",shellscript,content +2946,1768510,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1186,0,"",shellscript,selection_keyboard +2947,1768728,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1186,0,"f",shellscript,content +2948,1768729,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1187,0,"",shellscript,selection_keyboard +2949,1768874,"TERMINAL",0,0,"4",,terminal_output +2950,1768930,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1187,0,"u",shellscript,content +2951,1768931,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1188,0,"",shellscript,selection_keyboard +2952,1769141,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1188,0,"l",shellscript,content +2953,1769142,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1189,0,"",shellscript,selection_keyboard +2954,1769258,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1189,0,"l",shellscript,content +2955,1769259,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1190,0,"",shellscript,selection_keyboard +2956,1769592,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1190,0,"_",shellscript,content +2957,1769593,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1191,0,"",shellscript,selection_keyboard +2958,1770231,"TERMINAL",0,0,"5",,terminal_output +2959,1770462,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1191,0,"f",shellscript,content +2960,1770464,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1192,0,"",shellscript,selection_keyboard +2961,1771170,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1192,0,"r",shellscript,content +2962,1771171,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1193,0,"",shellscript,selection_keyboard +2963,1771171,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1193,0,"a",shellscript,content +2964,1771172,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1194,0,"",shellscript,selection_keyboard +2965,1771172,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1194,0,"m",shellscript,content +2966,1771173,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1195,0,"",shellscript,selection_keyboard +2967,1771173,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1195,0,"e",shellscript,content +2968,1771173,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1196,0,"",shellscript,selection_keyboard +2969,1771174,"TERMINAL",0,0,"6",,terminal_output +2970,1771246,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1195,0,"",shellscript,selection_command +2971,1771579,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1194,0,"",shellscript,selection_command +2972,1772125,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1193,0,"",shellscript,selection_command +2973,1772145,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1192,0,"",shellscript,selection_command +2974,1772146,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1191,0,"",shellscript,selection_command +2975,1772146,"TERMINAL",0,0,"7",,terminal_output +2976,1772239,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1190,1,"",shellscript,content +2977,1772972,"TERMINAL",0,0,"8",,terminal_output +2978,1773123,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1190,0,"-",shellscript,content +2979,1773124,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1191,0,"",shellscript,selection_keyboard +2980,1774011,"TERMINAL",0,0,"9",,terminal_output +2981,1774092,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1179,0,"",shellscript,selection_command +2982,1774311,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1180,0,"",shellscript,selection_command +2983,1774621,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1180,1,"",shellscript,content +2984,1775008,"TERMINAL",0,0,"7:00",,terminal_output +2985,1775205,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1180,0,"-",shellscript,content +2986,1775206,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1181,0,"",shellscript,selection_keyboard +2987,1775382,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1182,0,"",shellscript,selection_command +2988,1775560,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1183,0,"",shellscript,selection_command +2989,1775913,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1184,0,"",shellscript,selection_command +2990,1776040,"TERMINAL",0,0,"1",,terminal_output +2991,1776116,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1185,0,"",shellscript,selection_command +2992,1776362,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1185,1,"",shellscript,content +2993,1776700,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1185,0,"-",shellscript,content +2994,1776701,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1186,0,"",shellscript,selection_keyboard +2995,1777123,"TERMINAL",0,0,"2",,terminal_output +2996,1777652,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1196,0," ",shellscript,content +2997,1777653,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1197,0,"",shellscript,selection_keyboard +2998,1778036,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1197,0,"\",shellscript,content +2999,1778037,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1198,0,"",shellscript,selection_keyboard +3000,1778099,"TERMINAL",0,0,"3",,terminal_output +3001,1778163,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",1197,0,"",shellscript,selection_command +3002,1779114,"TERMINAL",0,0,"4",,terminal_output +3003,1779988,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3554157.5 task 0: running\r\n",,terminal_output +3004,1780132,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3554157.5\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nsrun: forcing job termination\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-7:\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nslurmstepd: error: *** STEP 3554157.5 ON hkn0401 CANCELLED AT 2025-10-08T23:07:05 ***\r\n",,terminal_output +3005,1780132,"TERMINAL",0,0,"5",,terminal_output +3006,1780323,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3554157.5\r\nsrun: job abort in progress\r\n",,terminal_output +3007,1780465,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh: line 41: ir: command not found\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3008,1780799,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +3009,1780995,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +3010,1781152,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +3011,1781153,"TERMINAL",0,0,"6",,terminal_output +3012,1781219,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3013,1782186,"TERMINAL",0,0,"7",,terminal_output +3014,1783147,"TERMINAL",0,0,"8",,terminal_output +3015,1784210,"TERMINAL",0,0,"9",,terminal_output +3016,1785196,"TERMINAL",0,0,"10",,terminal_output +3017,1786299,"TERMINAL",0,0,"1",,terminal_output +3018,1787244,"TERMINAL",0,0,"2",,terminal_output +3019,1788274,"TERMINAL",0,0,"3",,terminal_output +3020,1789298,"TERMINAL",0,0,"4",,terminal_output +3021,1790310,"TERMINAL",0,0,"5",,terminal_output +3022,1791334,"TERMINAL",0,0,"6",,terminal_output +3023,1791721,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +3024,1792461,"TERMINAL",0,0,"7",,terminal_output +3025,1793363,"TERMINAL",0,0,"8",,terminal_output +3026,1794444,"TERMINAL",0,0,"9",,terminal_output +3027,1794673,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3028,1795418,"TERMINAL",0,0,"20",,terminal_output +3029,1796542,"TERMINAL",0,0,"1",,terminal_output +3030,1797471,"TERMINAL",0,0,"2",,terminal_output +3031,1798497,"TERMINAL",0,0,"3",,terminal_output +3032,1799553,"TERMINAL",0,0,"4",,terminal_output +3033,1800663,"TERMINAL",0,0,"5",,terminal_output +3034,1801693,"TERMINAL",0,0,"7",,terminal_output +3035,1802373,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +3036,1802562,"TERMINAL",0,0,"8",,terminal_output +3037,1803603,"TERMINAL",0,0,"9",,terminal_output +3038,1804604,"TERMINAL",0,0,"30",,terminal_output +3039,1805685,"TERMINAL",0,0,"1",,terminal_output +3040,1806648,"TERMINAL",0,0,"2",,terminal_output +3041,1807671,"TERMINAL",0,0,"3",,terminal_output +3042,1808713,"TERMINAL",0,0,"4",,terminal_output +3043,1809789,"TERMINAL",0,0,"5",,terminal_output +3044,1810710,"TERMINAL",0,0,"6",,terminal_output +3045,1811751,"TERMINAL",0,0,"7",,terminal_output +3046,1812779,"TERMINAL",0,0,"8",,terminal_output +3047,1813781,"TERMINAL",0,0,"9",,terminal_output +3048,1814813,"TERMINAL",0,0,"40",,terminal_output +3049,1815828,"TERMINAL",0,0,"1",,terminal_output +3050,1816153,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3051,1816980,"TERMINAL",0,0,"2",,terminal_output +3052,1818050,"TERMINAL",0,0,"3",,terminal_output +3053,1818944,"TERMINAL",0,0,"4",,terminal_output +3054,1819981,"TERMINAL",0,0,"5",,terminal_output +3055,1820920,"TERMINAL",0,0,"6",,terminal_output +3056,1821960,"TERMINAL",0,0,"7",,terminal_output +3057,1822958,"TERMINAL",0,0,"8",,terminal_output +3058,1824117,"TERMINAL",0,0,"9",,terminal_output +3059,1824471,"jasmine/train_dynamics_appendix-c.py",19970,0,"",python,selection_mouse +3060,1824622,"jasmine/train_dynamics_appendix-c.py",19960,15,"eval_full_frame",python,selection_mouse +3061,1825076,"TERMINAL",0,0,"50",,terminal_output +3062,1826087,"TERMINAL",0,0,"1",,terminal_output +3063,1827195,"TERMINAL",0,0,"2",,terminal_output +3064,1828059,"TERMINAL",0,0,"3",,terminal_output +3065,1829085,"TERMINAL",0,0,"4",,terminal_output +3066,1830143,"TERMINAL",0,0,"5",,terminal_output +3067,1831031,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3068,1831134,"TERMINAL",0,0,"6",,terminal_output +3069,1832249,"TERMINAL",0,0,"7",,terminal_output +3070,1833342,"TERMINAL",0,0,"8",,terminal_output +3071,1834623,"TERMINAL",0,0,"9",,terminal_output +3072,1835480,"TERMINAL",0,0,"8:00",,terminal_output +3073,1836496,"TERMINAL",0,0,"1",,terminal_output +3074,1837258,"TERMINAL",0,0,"2",,terminal_output +3075,1837834,"jasmine/train_dynamics_appendix-c.py",17706,0,"",python,selection_mouse +3076,1837876,"jasmine/train_dynamics_appendix-c.py",17705,0,"",python,selection_command +3077,1838290,"TERMINAL",0,0,"3",,terminal_output +3078,1839298,"TERMINAL",0,0,"4",,terminal_output +3079,1840317,"TERMINAL",0,0,"5",,terminal_output +3080,1841369,"TERMINAL",0,0,"6",,terminal_output +3081,1842444,"TERMINAL",0,0,"7",,terminal_output +3082,1843440,"TERMINAL",0,0,"8",,terminal_output +3083,1844461,"TERMINAL",0,0,"9",,terminal_output +3084,1845524,"TERMINAL",0,0,"10",,terminal_output +3085,1846499,"TERMINAL",0,0,"1",,terminal_output +3086,1847539,"TERMINAL",0,0,"3",,terminal_output +3087,1848605,"TERMINAL",0,0,"4",,terminal_output +3088,1849565,"TERMINAL",0,0,"5",,terminal_output +3089,1850581,"TERMINAL",0,0,"6",,terminal_output +3090,1851612,"TERMINAL",0,0,"7",,terminal_output +3091,1852655,"TERMINAL",0,0,"8",,terminal_output +3092,1853644,"TERMINAL",0,0,"9",,terminal_output +3093,1854677,"TERMINAL",0,0,"20",,terminal_output +3094,1855698,"TERMINAL",0,0,"1",,terminal_output +3095,1856232,"appendix_c_nodes.md",0,0,"",markdown,tab +3096,1856809,"TERMINAL",0,0,"2",,terminal_output +3097,1857394,"appendix_c_nodes.md",73,0,"",markdown,selection_mouse +3098,1857740,"TERMINAL",0,0,"3",,terminal_output +3099,1858026,"appendix_c_nodes.md",69,0,"",markdown,selection_mouse +3100,1858149,"appendix_c_nodes.md",68,1,"0",markdown,selection_mouse +3101,1858317,"appendix_c_nodes.md",68,18,"0.7587440609931946",markdown,selection_mouse +3102,1858781,"TERMINAL",0,0,"4",,terminal_output +3103,1858989,"appendix_c_nodes.md",86,0,"",markdown,selection_mouse +3104,1859773,"TERMINAL",0,0,"5",,terminal_output +3105,1860828,"TERMINAL",0,0,"6",,terminal_output +3106,1861886,"TERMINAL",0,0,"7",,terminal_output +3107,1862842,"TERMINAL",0,0,"8",,terminal_output +3108,1864067,"TERMINAL",0,0,"9",,terminal_output +3109,1865163,"TERMINAL",0,0,"30",,terminal_output +3110,1866432,"TERMINAL",0,0,"1",,terminal_output +3111,1867332,"TERMINAL",0,0,"2",,terminal_output +3112,1868303,"TERMINAL",0,0,"3",,terminal_output +3113,1869076,"TERMINAL",0,0,"4",,terminal_output +3114,1870088,"TERMINAL",0,0,"5",,terminal_output +3115,1871078,"TERMINAL",0,0,"6",,terminal_output +3116,1872093,"TERMINAL",0,0,"7",,terminal_output +3117,1873165,"TERMINAL",0,0,"8",,terminal_output +3118,1874204,"TERMINAL",0,0,"9",,terminal_output +3119,1875195,"TERMINAL",0,0,"40",,terminal_output +3120,1876484,"TERMINAL",0,0,"1",,terminal_output +3121,1877377,"TERMINAL",0,0,"2",,terminal_output +3122,1878318,"TERMINAL",0,0,"3",,terminal_output +3123,1879274,"TERMINAL",0,0,"4",,terminal_output +3124,1880342,"TERMINAL",0,0,"5",,terminal_output +3125,1881405,"TERMINAL",0,0,"6",,terminal_output +3126,1882651,"TERMINAL",0,0,"7",,terminal_output +3127,1883660,"TERMINAL",0,0,"8",,terminal_output +3128,1884638,"TERMINAL",0,0,"9",,terminal_output +3129,1885570,"TERMINAL",0,0,"50",,terminal_output +3130,1886415,"TERMINAL",0,0,"1",,terminal_output +3131,1887546,"TERMINAL",0,0,"2",,terminal_output +3132,1888548,"TERMINAL",0,0,"3",,terminal_output +3133,1889619,"TERMINAL",0,0,"4",,terminal_output +3134,1890499,"TERMINAL",0,0,"5",,terminal_output +3135,1891541,"TERMINAL",0,0,"7",,terminal_output +3136,1892294,"TERMINAL",0,0,"Step 200000, validation loss: 1.9375224113464355\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(1.9375225586049698), 'val_entropy': np.float64(2.236036305334054), 'val_masked_token_top16_accuracy': np.float64(0.8798762057341781), 'val_masked_token_top1_accuracy': np.float64(0.5657584152969659), 'val_masked_token_top2_accuracy': np.float64(0.689856609877418), 'val_masked_token_top5_accuracy': np.float64(0.798516615933063), 'val_psnr': np.float64(15.530764074886546), 'val_select_logit': np.float64(10.11456164191751), 'val_select_p': np.float64(0.501306425707013), 'val_ssim': np.float64(0.5097568672077328), 'val_total_loss': np.float64(1.9375225586049698), 'val_z_loss': np.float64(149.545166015625), 'val_loss': np.float32(1.9375224), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\r\n",,terminal_output +3137,1892541,"TERMINAL",0,0,"8",,terminal_output +3138,1893577,"TERMINAL",0,0,"9",,terminal_output +3139,1894598,"TERMINAL",0,0,"9:00",,terminal_output +3140,1895715,"TERMINAL",0,0,"1",,terminal_output +3141,1896787,"TERMINAL",0,0,"2",,terminal_output +3142,1897758,"TERMINAL",0,0,"W1008 23:09:03.190005 1287238 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +3143,1897867,"TERMINAL",0,0,"3",,terminal_output +3144,1898134,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 5 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3145,1898415,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3146,1898945,"TERMINAL",0,0,"4",,terminal_output +3147,1899845,"TERMINAL",0,0,"5",,terminal_output +3148,1900890,"TERMINAL",0,0,"6",,terminal_output +3149,1902012,"TERMINAL",0,0,"7",,terminal_output +3150,1903109,"TERMINAL",0,0,"8",,terminal_output +3151,1904061,"TERMINAL",0,0,"9",,terminal_output +3152,1905079,"TERMINAL",0,0,"10",,terminal_output +3153,1906323,"TERMINAL",0,0,"1",,terminal_output +3154,1907097,"TERMINAL",0,0,"2",,terminal_output +3155,1908464,"TERMINAL",0,0,"3",,terminal_output +3156,1909383,"TERMINAL",0,0,"4",,terminal_output +3157,1910494,"TERMINAL",0,0,"5",,terminal_output +3158,1911727,"TERMINAL",0,0,"6",,terminal_output +3159,1912425,"TERMINAL",0,0,"7",,terminal_output +3160,1913453,"TERMINAL",0,0,"8",,terminal_output +3161,1914380,"TERMINAL",0,0,"9",,terminal_output +3162,1915307,"TERMINAL",0,0,"20",,terminal_output +3163,1916532,"TERMINAL",0,0,"1",,terminal_output +3164,1917420,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"",shellscript,tab +3165,1917533,"TERMINAL",0,0,"2",,terminal_output +3166,1918620,"TERMINAL",0,0,"3",,terminal_output +3167,1919553,"TERMINAL",0,0,"4",,terminal_output +3168,1921288,"TERMINAL",0,0,"5",,terminal_output +3169,1921887,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3170,1922196,"TERMINAL",0,0,"6",,terminal_output +3171,1922582,"TERMINAL",0,0,"7",,terminal_output +3172,1923468,"TERMINAL",0,0,"8",,terminal_output +3173,1923756,"appendix_c_nodes.md",0,0,"",markdown,tab +3174,1924469,"TERMINAL",0,0,"9",,terminal_output +3175,1925023,"appendix_c_nodes.md",1780,0,"",markdown,selection_mouse +3176,1925508,"TERMINAL",0,0,"30",,terminal_output +3177,1925631,"appendix_c_nodes.md",1780,0,"\n",markdown,content +3178,1925812,"appendix_c_nodes.md",1781,0,"\n",markdown,content +3179,1926841,"TERMINAL",0,0,"1",,terminal_output +3180,1927766,"TERMINAL",0,0,"3",,terminal_output +3181,1928062,"appendix_c_nodes.md",1782,0,"#",markdown,content +3182,1928063,"appendix_c_nodes.md",1783,0,"",markdown,selection_keyboard +3183,1928186,"appendix_c_nodes.md",1783,0,"#",markdown,content +3184,1928187,"appendix_c_nodes.md",1784,0,"",markdown,selection_keyboard +3185,1928353,"appendix_c_nodes.md",1784,0," ",markdown,content +3186,1928354,"appendix_c_nodes.md",1785,0,"",markdown,selection_keyboard +3187,1928572,"appendix_c_nodes.md",1785,0,"n",markdown,content +3188,1928573,"appendix_c_nodes.md",1786,0,"",markdown,selection_keyboard +3189,1928633,"TERMINAL",0,0,"4",,terminal_output +3190,1929109,"appendix_c_nodes.md",1785,1,"",markdown,content +3191,1929413,"appendix_c_nodes.md",1784,1,"",markdown,content +3192,1929622,"TERMINAL",0,0,"5",,terminal_output +3193,1930146,"appendix_c_nodes.md",1784,0,"#",markdown,content +3194,1930147,"appendix_c_nodes.md",1785,0,"",markdown,selection_keyboard +3195,1930478,"appendix_c_nodes.md",1785,0,"n",markdown,content +3196,1930479,"appendix_c_nodes.md",1786,0,"",markdown,selection_keyboard +3197,1930880,"TERMINAL",0,0,"6",,terminal_output +3198,1931037,"appendix_c_nodes.md",1785,1,"",markdown,content +3199,1931305,"appendix_c_nodes.md",1785,0," ",markdown,content +3200,1931306,"appendix_c_nodes.md",1786,0,"",markdown,selection_keyboard +3201,1931454,"appendix_c_nodes.md",1786,0,"n",markdown,content +3202,1931455,"appendix_c_nodes.md",1787,0,"",markdown,selection_keyboard +3203,1931650,"appendix_c_nodes.md",1787,0,"o",markdown,content +3204,1931651,"appendix_c_nodes.md",1788,0,"",markdown,selection_keyboard +3205,1931719,"appendix_c_nodes.md",1788,0,"i",markdown,content +3206,1931720,"appendix_c_nodes.md",1789,0,"",markdown,selection_keyboard +3207,1931833,"appendix_c_nodes.md",1789,0,"s",markdown,content +3208,1931834,"appendix_c_nodes.md",1790,0,"",markdown,selection_keyboard +3209,1932015,"TERMINAL",0,0,"7",,terminal_output +3210,1932103,"appendix_c_nodes.md",1790,0,"e",markdown,content +3211,1932104,"appendix_c_nodes.md",1791,0,"",markdown,selection_keyboard +3212,1932198,"appendix_c_nodes.md",1791,0," ",markdown,content +3213,1932199,"appendix_c_nodes.md",1792,0,"",markdown,selection_keyboard +3214,1932756,"TERMINAL",0,0,"8",,terminal_output +3215,1933257,"appendix_c_nodes.md",1792,0,"s",markdown,content +3216,1933258,"appendix_c_nodes.md",1793,0,"",markdown,selection_keyboard +3217,1933707,"appendix_c_nodes.md",1793,0,"t",markdown,content +3218,1933709,"appendix_c_nodes.md",1794,0,"",markdown,selection_keyboard +3219,1933789,"TERMINAL",0,0,"9",,terminal_output +3220,1934276,"appendix_c_nodes.md",1793,1,"",markdown,content +3221,1934359,"appendix_c_nodes.md",1793,0,"d",markdown,content +3222,1934360,"appendix_c_nodes.md",1794,0,"",markdown,selection_keyboard +3223,1934649,"appendix_c_nodes.md",1794,0,"t",markdown,content +3224,1934650,"appendix_c_nodes.md",1795,0,"",markdown,selection_keyboard +3225,1934833,"TERMINAL",0,0,"40",,terminal_output +3226,1935188,"appendix_c_nodes.md",1794,1,"",markdown,content +3227,1935333,"appendix_c_nodes.md",1793,1,"",markdown,content +3228,1936074,"appendix_c_nodes.md",1793,0,"t",markdown,content +3229,1936075,"appendix_c_nodes.md",1794,0,"",markdown,selection_keyboard +3230,1936104,"TERMINAL",0,0,"1",,terminal_output +3231,1936268,"appendix_c_nodes.md",1794,0,"d",markdown,content +3232,1936269,"appendix_c_nodes.md",1795,0,"",markdown,selection_keyboard +3233,1936564,"appendix_c_nodes.md",1795,0,"d",markdown,content +3234,1936565,"appendix_c_nodes.md",1796,0,"",markdown,selection_keyboard +3235,1936772,"appendix_c_nodes.md",1796,0,"e",markdown,content +3236,1936773,"appendix_c_nodes.md",1797,0,"",markdown,selection_keyboard +3237,1936892,"TERMINAL",0,0,"2",,terminal_output +3238,1937207,"appendix_c_nodes.md",1796,1,"",markdown,content +3239,1937364,"appendix_c_nodes.md",1795,1,"",markdown,content +3240,1937973,"TERMINAL",0,0,"3",,terminal_output +3241,1938217,"appendix_c_nodes.md",1795,0,"=",markdown,content +3242,1938218,"appendix_c_nodes.md",1796,0,"",markdown,selection_keyboard +3243,1938494,"appendix_c_nodes.md",1796,0,"0",markdown,content +3244,1938495,"appendix_c_nodes.md",1797,0,"",markdown,selection_keyboard +3245,1938629,"appendix_c_nodes.md",1797,0,".",markdown,content +3246,1938630,"appendix_c_nodes.md",1798,0,"",markdown,selection_keyboard +3247,1938768,"appendix_c_nodes.md",1798,0,"1",markdown,content +3248,1938769,"appendix_c_nodes.md",1799,0,"",markdown,selection_keyboard +3249,1939012,"TERMINAL",0,0,"4",,terminal_output +3250,1939046,"appendix_c_nodes.md",1799,0,"\n",markdown,content +3251,1939506,"appendix_c_nodes.md",1800,0,"Step 200000, validation loss: 1.9375224113464355\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(1.9375225586049698), 'val_entropy': np.float64(2.236036305334054), 'val_masked_token_top16_accuracy': np.float64(0.8798762057341781), 'val_masked_token_top1_accuracy': np.float64(0.5657584152969659), 'val_masked_token_top2_accuracy': np.float64(0.689856609877418), 'val_masked_token_top5_accuracy': np.float64(0.798516615933063), 'val_psnr': np.float64(15.530764074886546), 'val_select_logit': np.float64(10.11456164191751), 'val_select_p': np.float64(0.501306425707013), 'val_ssim': np.float64(0.5097568672077328), 'val_total_loss': np.float64(1.9375225586049698), 'val_z_loss': np.float64(149.545166015625), 'val_loss': np.float32(1.9375224), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}",markdown,content +3252,1940032,"TERMINAL",0,0,"5",,terminal_output +3253,1940496,"appendix_c_nodes.md",3539,0,"\n",markdown,content +3254,1940904,"appendix_c_nodes.md",3540,0,"\n",markdown,content +3255,1940965,"TERMINAL",0,0,"6",,terminal_output +3256,1941930,"TERMINAL",0,0,"7",,terminal_output +3257,1942267,"appendix_c_nodes.md",1799,0,"",markdown,selection_mouse +3258,1942268,"appendix_c_nodes.md",1798,0,"",markdown,selection_command +3259,1942783,"appendix_c_nodes.md",1816,0,"",markdown,selection_command +3260,1942967,"TERMINAL",0,0,"8",,terminal_output +3261,1943282,"appendix_c_nodes.md",1865,0,"",markdown,selection_command +3262,1943306,"appendix_c_nodes.md",3540,0,"",markdown,selection_command +3263,1943339,"appendix_c_nodes.md",3541,0,"",markdown,selection_command +3264,1944014,"appendix_c_nodes.md",3541,0,"\n### noise std=0.1",markdown,content +3265,1944019,"appendix_c_nodes.md",3542,0,"",markdown,selection_command +3266,1944063,"TERMINAL",0,0,"9",,terminal_output +3267,1944619,"appendix_c_nodes.md",3541,0,"",markdown,selection_command +3268,1945038,"appendix_c_nodes.md",3541,1,"",markdown,content +3269,1945117,"TERMINAL",0,0,"50",,terminal_output +3270,1945510,"appendix_c_nodes.md",3558,0,"",markdown,selection_command +3271,1945686,"appendix_c_nodes.md",3557,1,"",markdown,content +3272,1946258,"TERMINAL",0,0,"1",,terminal_output +3273,1946621,"appendix_c_nodes.md",3557,0,"2",markdown,content +3274,1946622,"appendix_c_nodes.md",3558,0,"",markdown,selection_keyboard +3275,1947072,"TERMINAL",0,0,"2",,terminal_output +3276,1948174,"TERMINAL",0,0,"3",,terminal_output +3277,1949125,"TERMINAL",0,0,"4",,terminal_output +3278,1949719,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3279,1950152,"TERMINAL",0,0,"5",,terminal_output +3280,1950334,"jasmine/models/dynamics.py",0,0,"",python,tab +3281,1951293,"TERMINAL",0,0,"6",,terminal_output +3282,1951693,"jasmine/models/dynamics.py",2311,0,"",python,selection_mouse +3283,1952140,"TERMINAL",0,0,"7",,terminal_output +3284,1952216,"jasmine/models/dynamics.py",2310,0,"",python,selection_command +3285,1952382,"jasmine/models/dynamics.py",2309,0,"",python,selection_command +3286,1953064,"jasmine/models/dynamics.py",2308,1,"",python,content +3287,1953143,"jasmine/models/dynamics.py",2308,0,"2",python,content +3288,1953144,"jasmine/models/dynamics.py",2309,0,"",python,selection_keyboard +3289,1953238,"TERMINAL",0,0,"8",,terminal_output +3290,1954410,"TERMINAL",0,0,"9",,terminal_output +3291,1955098,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +3292,1955256,"TERMINAL",0,0,"10:00",,terminal_output +3293,1956008,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +3294,1956146,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +3295,1956279,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3296,1956331,"TERMINAL",0,0,"1",,terminal_output +3297,1957314,"TERMINAL",0,0,"2",,terminal_output +3298,1958568,"TERMINAL",0,0,"3",,terminal_output +3299,1959441,"TERMINAL",0,0,"4",,terminal_output +3300,1960481,"TERMINAL",0,0,"5",,terminal_output +3301,1961429,"TERMINAL",0,0,"6",,terminal_output +3302,1962458,"TERMINAL",0,0,"7",,terminal_output +3303,1963475,"TERMINAL",0,0,"8",,terminal_output +3304,1964493,"TERMINAL",0,0,"9",,terminal_output +3305,1965548,"TERMINAL",0,0,"10",,terminal_output +3306,1966307,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +3307,1966478,"TERMINAL",0,0,"1",,terminal_output +3308,1967591,"TERMINAL",0,0,"2",,terminal_output +3309,1968561,"TERMINAL",0,0,"3",,terminal_output +3310,1969047,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3311,1969735,"TERMINAL",0,0,"4",,terminal_output +3312,1970841,"TERMINAL",0,0,"6",,terminal_output +3313,1971858,"TERMINAL",0,0,"7",,terminal_output +3314,1972710,"TERMINAL",0,0,"8",,terminal_output +3315,1973826,"TERMINAL",0,0,"9",,terminal_output +3316,1974852,"TERMINAL",0,0,"20",,terminal_output +3317,1975939,"TERMINAL",0,0,"1",,terminal_output +3318,1976813,"TERMINAL",0,0,"2",,terminal_output +3319,1977052,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +3320,1978073,"TERMINAL",0,0,"3",,terminal_output +3321,1978905,"TERMINAL",0,0,"4",,terminal_output +3322,1979743,"TERMINAL",0,0,"5",,terminal_output +3323,1980778,"TERMINAL",0,0,"6",,terminal_output +3324,1981829,"TERMINAL",0,0,"7",,terminal_output +3325,1982794,"TERMINAL",0,0,"8",,terminal_output +3326,1983944,"TERMINAL",0,0,"9",,terminal_output +3327,1984949,"TERMINAL",0,0,"30",,terminal_output +3328,1985967,"TERMINAL",0,0,"1",,terminal_output +3329,1987069,"TERMINAL",0,0,"2",,terminal_output +3330,1988000,"TERMINAL",0,0,"3",,terminal_output +3331,1989084,"TERMINAL",0,0,"4",,terminal_output +3332,1990060,"TERMINAL",0,0,"5",,terminal_output +3333,1991064,"TERMINAL",0,0,"6",,terminal_output +3334,1992183,"TERMINAL",0,0,"7",,terminal_output +3335,1993135,"TERMINAL",0,0,"8",,terminal_output +3336,1994126,"TERMINAL",0,0,"9",,terminal_output +3337,1995246,"TERMINAL",0,0,"40",,terminal_output +3338,1996298,"TERMINAL",0,0,"1",,terminal_output +3339,1997213,"TERMINAL",0,0,"2",,terminal_output +3340,1998396,"TERMINAL",0,0,"3",,terminal_output +3341,1999237,"TERMINAL",0,0,"4",,terminal_output +3342,2000340,"TERMINAL",0,0,"5",,terminal_output +3343,2001321,"TERMINAL",0,0,"6",,terminal_output +3344,2002453,"TERMINAL",0,0,"7",,terminal_output +3345,2003490,"TERMINAL",0,0,"8",,terminal_output +3346,2004353,"TERMINAL",0,0,"9",,terminal_output +3347,2005384,"TERMINAL",0,0,"50",,terminal_output +3348,2006388,"TERMINAL",0,0,"1",,terminal_output +3349,2007679,"TERMINAL",0,0,"2",,terminal_output +3350,2008418,"TERMINAL",0,0,"3",,terminal_output +3351,2009548,"TERMINAL",0,0,"4",,terminal_output +3352,2010484,"TERMINAL",0,0,"5",,terminal_output +3353,2011583,"TERMINAL",0,0,"6",,terminal_output +3354,2012637,"TERMINAL",0,0,"8",,terminal_output +3355,2013702,"TERMINAL",0,0,"9",,terminal_output +3356,2014714,"TERMINAL",0,0,"1:00",,terminal_output +3357,2015704,"TERMINAL",0,0,"1",,terminal_output +3358,2016849,"TERMINAL",0,0,"2",,terminal_output +3359,2017757,"TERMINAL",0,0,"3",,terminal_output +3360,2018776,"TERMINAL",0,0,"4",,terminal_output +3361,2019847,"TERMINAL",0,0,"5",,terminal_output +3362,2020679,"TERMINAL",0,0,"6",,terminal_output +3363,2021781,"TERMINAL",0,0,"7",,terminal_output +3364,2022913,"TERMINAL",0,0,"8",,terminal_output +3365,2024120,"TERMINAL",0,0,"9",,terminal_output +3366,2025108,"TERMINAL",0,0,"10",,terminal_output +3367,2026053,"TERMINAL",0,0,"1",,terminal_output +3368,2027057,"TERMINAL",0,0,"2",,terminal_output +3369,2028015,"TERMINAL",0,0,"3",,terminal_output +3370,2028895,"TERMINAL",0,0,"4",,terminal_output +3371,2030206,"TERMINAL",0,0,"5",,terminal_output +3372,2031263,"TERMINAL",0,0,"6",,terminal_output +3373,2031951,"TERMINAL",0,0,"7",,terminal_output +3374,2032929,"TERMINAL",0,0,"8",,terminal_output +3375,2033976,"TERMINAL",0,0,"9",,terminal_output +3376,2034971,"TERMINAL",0,0,"20",,terminal_output +3377,2036003,"TERMINAL",0,0,"1",,terminal_output +3378,2036999,"TERMINAL",0,0,"2",,terminal_output +3379,2038031,"TERMINAL",0,0,"3",,terminal_output +3380,2039165,"TERMINAL",0,0,"4",,terminal_output +3381,2040135,"TERMINAL",0,0,"5",,terminal_output +3382,2041121,"TERMINAL",0,0,"6",,terminal_output +3383,2042149,"TERMINAL",0,0,"7",,terminal_output +3384,2043696,"TERMINAL",0,0,"8",,terminal_output +3385,2044679,"TERMINAL",0,0,"9",,terminal_output +3386,2045811,"TERMINAL",0,0,"30",,terminal_output +3387,2046598,"TERMINAL",0,0,"1",,terminal_output +3388,2047501,"TERMINAL",0,0,"2",,terminal_output +3389,2048390,"TERMINAL",0,0,"3",,terminal_output +3390,2049401,"TERMINAL",0,0,"4",,terminal_output +3391,2050389,"TERMINAL",0,0,"5",,terminal_output +3392,2051334,"TERMINAL",0,0,"6",,terminal_output +3393,2052424,"TERMINAL",0,0,"7",,terminal_output +3394,2053342,"TERMINAL",0,0,"8",,terminal_output +3395,2054361,"TERMINAL",0,0,"9",,terminal_output +3396,2055390,"TERMINAL",0,0,"40",,terminal_output +3397,2056434,"TERMINAL",0,0,"1",,terminal_output +3398,2057472,"TERMINAL",0,0,"2",,terminal_output +3399,2058438,"TERMINAL",0,0,"3",,terminal_output +3400,2059470,"TERMINAL",0,0,"4",,terminal_output +3401,2060484,"TERMINAL",0,0,"5",,terminal_output +3402,2061538,"TERMINAL",0,0,"7",,terminal_output +3403,2062557,"TERMINAL",0,0,"8",,terminal_output +3404,2063555,"TERMINAL",0,0,"9",,terminal_output +3405,2064681,"TERMINAL",0,0,"50",,terminal_output +3406,2065678,"TERMINAL",0,0,"1",,terminal_output +3407,2066822,"TERMINAL",0,0,"2",,terminal_output +3408,2066911,"TERMINAL",0,0,"Step 200000, validation loss: 4.311364650726318\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(4.3113649312187645), 'val_entropy': np.float64(4.482920328776042), 'val_masked_token_top16_accuracy': np.float64(0.5900948678745943), 'val_masked_token_top1_accuracy': np.float64(0.24632565764819875), 'val_masked_token_top2_accuracy': np.float64(0.33755794225954544), 'val_masked_token_top5_accuracy': np.float64(0.4589417284610225), 'val_psnr': np.float64(12.573247198965035), 'val_select_logit': np.float64(6.313630515453863), 'val_select_p': np.float64(0.16665662007004606), 'val_ssim': np.float64(0.3333607175770928), 'val_total_loss': np.float64(4.3113649312187645), 'val_z_loss': np.float64(77.57380646350337), 'val_loss': np.float32(4.3113647), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\r\n",,terminal_output +3409,2067292,"TERMINAL",0,0,"W1008 23:11:52.705980 1289107 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:64365: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +3410,2067669,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 3 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3411,2067719,"TERMINAL",0,0,"3",,terminal_output +3412,2067903,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3413,2068769,"TERMINAL",0,0,"4",,terminal_output +3414,2069725,"TERMINAL",0,0,"5",,terminal_output +3415,2070775,"TERMINAL",0,0,"6",,terminal_output +3416,2071785,"TERMINAL",0,0,"7",,terminal_output +3417,2072899,"TERMINAL",0,0,"8",,terminal_output +3418,2074058,"TERMINAL",0,0,"9",,terminal_output +3419,2074963,"TERMINAL",0,0,"2:00",,terminal_output +3420,2076027,"TERMINAL",0,0,"1",,terminal_output +3421,2077048,"TERMINAL",0,0,"2",,terminal_output +3422,2077849,"TERMINAL",0,0,"3",,terminal_output +3423,2079243,"TERMINAL",0,0,"4",,terminal_output +3424,2080250,"TERMINAL",0,0,"5",,terminal_output +3425,2081494,"TERMINAL",0,0,"6",,terminal_output +3426,2082461,"TERMINAL",0,0,"7",,terminal_output +3427,2082745,"appendix_c_nodes.md",0,0,"",markdown,tab +3428,2083151,"TERMINAL",0,0,"8",,terminal_output +3429,2083639,"appendix_c_nodes.md",3558,0,"\n",markdown,content +3430,2084000,"appendix_c_nodes.md",3559,0,"Step 200000, validation loss: 4.311364650726318\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(4.3113649312187645), 'val_entropy': np.float64(4.482920328776042), 'val_masked_token_top16_accuracy': np.float64(0.5900948678745943), 'val_masked_token_top1_accuracy': np.float64(0.24632565764819875), 'val_masked_token_top2_accuracy': np.float64(0.33755794225954544), 'val_masked_token_top5_accuracy': np.float64(0.4589417284610225), 'val_psnr': np.float64(12.573247198965035), 'val_select_logit': np.float64(6.313630515453863), 'val_select_p': np.float64(0.16665662007004606), 'val_ssim': np.float64(0.3333607175770928), 'val_total_loss': np.float64(4.3113649312187645), 'val_z_loss': np.float64(77.57380646350337), 'val_loss': np.float32(4.3113647), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}",markdown,content +3431,2084281,"TERMINAL",0,0,"9",,terminal_output +3432,2085191,"TERMINAL",0,0,"10",,terminal_output +3433,2086528,"TERMINAL",0,0,"1",,terminal_output +3434,2087095,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +3435,2087861,"TERMINAL",0,0,"2",,terminal_output +3436,2088051,"jasmine/models/dynamics.py",0,0,"",python,tab +3437,2088828,"TERMINAL",0,0,"3",,terminal_output +3438,2089484,"jasmine/models/dynamics.py",2308,1,"",python,content +3439,2089535,"jasmine/models/dynamics.py",2308,0,"5",python,content +3440,2089536,"jasmine/models/dynamics.py",2309,0,"",python,selection_keyboard +3441,2089854,"TERMINAL",0,0,"4",,terminal_output +3442,2090601,"TERMINAL",0,0,"5",,terminal_output +3443,2091559,"TERMINAL",0,0,"6",,terminal_output +3444,2093749,"TERMINAL",0,0,"7",,terminal_output +3445,2094482,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\nGpuFreq=control_disabled\r\n",,terminal_output +3446,2094482,"TERMINAL",0,0,"8",,terminal_output +3447,2096375,"TERMINAL",0,0,"9",,terminal_output +3448,2098358,"TERMINAL",0,0,"2012",,terminal_output +3449,2098832,"TERMINAL",0,0,"3",,terminal_output +3450,2099437,"TERMINAL",0,0,"4",,terminal_output +3451,2100671,"TERMINAL",0,0,"5",,terminal_output +3452,2101520,"TERMINAL",0,0,"6",,terminal_output +3453,2102560,"TERMINAL",0,0,"7",,terminal_output +3454,2103740,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +3455,2103742,"TERMINAL",0,0,"8",,terminal_output +3456,2104703,"TERMINAL",0,0,"9",,terminal_output +3457,2105765,"TERMINAL",0,0,"30",,terminal_output +3458,2106550,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3459,2106866,"TERMINAL",0,0,"1",,terminal_output +3460,2107827,"TERMINAL",0,0,"2",,terminal_output +3461,2108555,"TERMINAL",0,0,"3",,terminal_output +3462,2109658,"TERMINAL",0,0,"4",,terminal_output +3463,2110737,"TERMINAL",0,0,"5",,terminal_output +3464,2111587,"TERMINAL",0,0,"7",,terminal_output +3465,2112582,"TERMINAL",0,0,"8",,terminal_output +3466,2113641,"TERMINAL",0,0,"9",,terminal_output +3467,2113642,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +3468,2114600,"TERMINAL",0,0,"40",,terminal_output +3469,2115650,"TERMINAL",0,0,"1",,terminal_output +3470,2116639,"TERMINAL",0,0,"2",,terminal_output +3471,2117667,"TERMINAL",0,0,"3",,terminal_output +3472,2118683,"TERMINAL",0,0,"4",,terminal_output +3473,2119718,"TERMINAL",0,0,"5",,terminal_output +3474,2120739,"TERMINAL",0,0,"6",,terminal_output +3475,2121747,"TERMINAL",0,0,"7",,terminal_output +3476,2122780,"TERMINAL",0,0,"8",,terminal_output +3477,2123868,"TERMINAL",0,0,"9",,terminal_output +3478,2124878,"TERMINAL",0,0,"50",,terminal_output +3479,2125821,"TERMINAL",0,0,"1",,terminal_output +3480,2126865,"TERMINAL",0,0,"2",,terminal_output +3481,2127891,"TERMINAL",0,0,"3",,terminal_output +3482,2128867,"TERMINAL",0,0,"4",,terminal_output +3483,2129890,"TERMINAL",0,0,"5",,terminal_output +3484,2131003,"TERMINAL",0,0,"6",,terminal_output +3485,2131982,"TERMINAL",0,0,"7",,terminal_output +3486,2132949,"TERMINAL",0,0,"8",,terminal_output +3487,2133972,"TERMINAL",0,0,"9",,terminal_output +3488,2135049,"TERMINAL",0,0,"3:00",,terminal_output +3489,2136039,"TERMINAL",0,0,"1",,terminal_output +3490,2137048,"TERMINAL",0,0,"2",,terminal_output +3491,2138064,"TERMINAL",0,0,"3",,terminal_output +3492,2139172,"TERMINAL",0,0,"4",,terminal_output +3493,2140098,"TERMINAL",0,0,"5",,terminal_output +3494,2141184,"TERMINAL",0,0,"6",,terminal_output +3495,2142197,"TERMINAL",0,0,"7",,terminal_output +3496,2143221,"TERMINAL",0,0,"8",,terminal_output +3497,2144298,"TERMINAL",0,0,"9",,terminal_output +3498,2145284,"TERMINAL",0,0,"10",,terminal_output +3499,2146224,"TERMINAL",0,0,"1",,terminal_output +3500,2147251,"TERMINAL",0,0,"2",,terminal_output +3501,2148262,"TERMINAL",0,0,"3",,terminal_output +3502,2149296,"TERMINAL",0,0,"4",,terminal_output +3503,2150320,"TERMINAL",0,0,"5",,terminal_output +3504,2151345,"TERMINAL",0,0,"6",,terminal_output +3505,2152372,"TERMINAL",0,0,"7",,terminal_output +3506,2153392,"TERMINAL",0,0,"8",,terminal_output +3507,2154393,"TERMINAL",0,0,"9",,terminal_output +3508,2155416,"TERMINAL",0,0,"20",,terminal_output +3509,2156461,"TERMINAL",0,0,"1",,terminal_output +3510,2157455,"TERMINAL",0,0,"2",,terminal_output +3511,2158476,"TERMINAL",0,0,"3",,terminal_output +3512,2159564,"TERMINAL",0,0,"4",,terminal_output +3513,2160736,"TERMINAL",0,0,"6",,terminal_output +3514,2161631,"TERMINAL",0,0,"7",,terminal_output +3515,2162589,"TERMINAL",0,0,"8",,terminal_output +3516,2163630,"TERMINAL",0,0,"9",,terminal_output +3517,2164612,"TERMINAL",0,0,"30",,terminal_output +3518,2165625,"TERMINAL",0,0,"1",,terminal_output +3519,2166660,"TERMINAL",0,0,"2",,terminal_output +3520,2167678,"TERMINAL",0,0,"3",,terminal_output +3521,2168727,"TERMINAL",0,0,"4",,terminal_output +3522,2169714,"TERMINAL",0,0,"5",,terminal_output +3523,2170737,"TERMINAL",0,0,"6",,terminal_output +3524,2171862,"TERMINAL",0,0,"7",,terminal_output +3525,2172790,"TERMINAL",0,0,"8",,terminal_output +3526,2173834,"TERMINAL",0,0,"9",,terminal_output +3527,2174852,"TERMINAL",0,0,"40",,terminal_output +3528,2175863,"TERMINAL",0,0,"1",,terminal_output +3529,2176889,"TERMINAL",0,0,"2",,terminal_output +3530,2177948,"TERMINAL",0,0,"3",,terminal_output +3531,2179048,"TERMINAL",0,0,"4",,terminal_output +3532,2180039,"TERMINAL",0,0,"5",,terminal_output +3533,2181158,"TERMINAL",0,0,"6",,terminal_output +3534,2182235,"TERMINAL",0,0,"7",,terminal_output +3535,2183098,"TERMINAL",0,0,"8",,terminal_output +3536,2184090,"TERMINAL",0,0,"9",,terminal_output +3537,2185577,"TERMINAL",0,0,"50",,terminal_output +3538,2186099,"TERMINAL",0,0,"1",,terminal_output +3539,2187262,"TERMINAL",0,0,"2",,terminal_output +3540,2188159,"TERMINAL",0,0,"3",,terminal_output +3541,2189383,"TERMINAL",0,0,"4",,terminal_output +3542,2190226,"TERMINAL",0,0,"5",,terminal_output +3543,2191223,"TERMINAL",0,0,"6",,terminal_output +3544,2192547,"TERMINAL",0,0,"7",,terminal_output +3545,2193413,"TERMINAL",0,0,"8",,terminal_output +3546,2194228,"TERMINAL",0,0,"9",,terminal_output +3547,2195272,"TERMINAL",0,0,"4:00",,terminal_output +3548,2196270,"TERMINAL",0,0,"1",,terminal_output +3549,2197310,"TERMINAL",0,0,"2",,terminal_output +3550,2198302,"TERMINAL",0,0,"3",,terminal_output +3551,2199323,"TERMINAL",0,0,"4",,terminal_output +3552,2200345,"TERMINAL",0,0,"5",,terminal_output +3553,2201368,"TERMINAL",0,0,"6",,terminal_output +3554,2202391,"TERMINAL",0,0,"7",,terminal_output +3555,2203205,"TERMINAL",0,0,"Step 200000, validation loss: 6.960869312286377\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.960869602128571), 'val_entropy': np.float64(5.414477582071342), 'val_masked_token_top16_accuracy': np.float64(0.18385726447198905), 'val_masked_token_top1_accuracy': np.float64(0.0353936998940566), 'val_masked_token_top2_accuracy': np.float64(0.05846153243499644), 'val_masked_token_top5_accuracy': np.float64(0.10274799825514064), 'val_psnr': np.float64(10.914788994134641), 'val_select_logit': np.float64(5.507573454987769), 'val_select_p': np.float64(0.07029049901985655), 'val_ssim': np.float64(0.2510850712949154), 'val_total_loss': np.float64(6.960869602128571), 'val_z_loss': np.float64(69.57815611596202), 'val_loss': np.float32(6.9608693), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\r\n",,terminal_output +3556,2203409,"TERMINAL",0,0,"8",,terminal_output +3557,2204432,"TERMINAL",0,0,"9",,terminal_output +3558,2205453,"TERMINAL",0,0,"10",,terminal_output +3559,2206473,"TERMINAL",0,0,"1",,terminal_output +3560,2207491,"TERMINAL",0,0,"2",,terminal_output +3561,2208664,"TERMINAL",0,0,"4",,terminal_output +3562,2208664,"TERMINAL",0,0,"W1008 23:14:14.066723 1290958 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""Cancelling all calls"", grpc_status:14}\r\n",,terminal_output +3563,2208964,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 8 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3564,2209248,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3565,2209539,"TERMINAL",0,0,"5",,terminal_output +3566,2210628,"TERMINAL",0,0,"6",,terminal_output +3567,2211691,"TERMINAL",0,0,"7",,terminal_output +3568,2212593,"TERMINAL",0,0,"8",,terminal_output +3569,2213794,"TERMINAL",0,0,"9",,terminal_output +3570,2214834,"TERMINAL",0,0,"20",,terminal_output +3571,2215991,"TERMINAL",0,0,"1",,terminal_output +3572,2216861,"appendix_c_nodes.md",0,0,"",markdown,tab +3573,2216958,"TERMINAL",0,0,"2",,terminal_output +3574,2217847,"TERMINAL",0,0,"3",,terminal_output +3575,2217931,"appendix_c_nodes.md",5304,0,"\n",markdown,content +3576,2218076,"appendix_c_nodes.md",5305,0,"\n",markdown,content +3577,2218439,"appendix_c_nodes.md",5306,0,"\n",markdown,content +3578,2218756,"appendix_c_nodes.md",5307,0,"Step 200000, validation loss: 6.960869312286377\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.960869602128571), 'val_entropy': np.float64(5.414477582071342), 'val_masked_token_top16_accuracy': np.float64(0.18385726447198905), 'val_masked_token_top1_accuracy': np.float64(0.0353936998940566), 'val_masked_token_top2_accuracy': np.float64(0.05846153243499644), 'val_masked_token_top5_accuracy': np.float64(0.10274799825514064), 'val_psnr': np.float64(10.914788994134641), 'val_select_logit': np.float64(5.507573454987769), 'val_select_p': np.float64(0.07029049901985655), 'val_ssim': np.float64(0.2510850712949154), 'val_total_loss': np.float64(6.960869602128571), 'val_z_loss': np.float64(69.57815611596202), 'val_loss': np.float32(6.9608693), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}",markdown,content +3579,2218774,"TERMINAL",0,0,"4",,terminal_output +3580,2219537,"appendix_c_nodes.md",7050,0,"",markdown,selection_command +3581,2219782,"TERMINAL",0,0,"5",,terminal_output +3582,2220438,"appendix_c_nodes.md",3547,0,"",markdown,selection_mouse +3583,2220850,"TERMINAL",0,0,"6",,terminal_output +3584,2221359,"appendix_c_nodes.md",3565,0,"",markdown,selection_command +3585,2221536,"appendix_c_nodes.md",3613,0,"",markdown,selection_command +3586,2221690,"appendix_c_nodes.md",5305,0,"",markdown,selection_command +3587,2221888,"TERMINAL",0,0,"7",,terminal_output +3588,2221967,"appendix_c_nodes.md",5306,0,"",markdown,selection_command +3589,2222302,"appendix_c_nodes.md",5306,0,"\n### noise std=0.2",markdown,content +3590,2222309,"appendix_c_nodes.md",5307,0,"",markdown,selection_command +3591,2222824,"TERMINAL",0,0,"8",,terminal_output +3592,2222853,"appendix_c_nodes.md",5306,0,"",markdown,selection_command +3593,2223226,"appendix_c_nodes.md",5306,1,"",markdown,content +3594,2223556,"appendix_c_nodes.md",5323,0,"",markdown,selection_command +3595,2223720,"appendix_c_nodes.md",5322,1,"",markdown,content +3596,2223916,"TERMINAL",0,0,"9",,terminal_output +3597,2224433,"appendix_c_nodes.md",5322,0,"5",markdown,content +3598,2224434,"appendix_c_nodes.md",5323,0,"",markdown,selection_keyboard +3599,2225210,"TERMINAL",0,0,"30",,terminal_output +3600,2225889,"TERMINAL",0,0,"1",,terminal_output +3601,2226906,"TERMINAL",0,0,"2",,terminal_output +3602,2227131,"appendix_c_nodes.md",24,0,"",markdown,selection_mouse +3603,2227939,"TERMINAL",0,0,"3",,terminal_output +3604,2228363,"appendix_c_nodes.md",7068,0,"",markdown,selection_mouse +3605,2228939,"TERMINAL",0,0,"4",,terminal_output +3606,2229236,"appendix_c_nodes.md",7068,0,"\n",markdown,content +3607,2229388,"appendix_c_nodes.md",7069,0,"\n",markdown,content +3608,2229969,"TERMINAL",0,0,"5",,terminal_output +3609,2230286,"appendix_c_nodes.md",7070,0,"#",markdown,content +3610,2230287,"appendix_c_nodes.md",7071,0,"",markdown,selection_keyboard +3611,2230414,"appendix_c_nodes.md",7071,0,"#",markdown,content +3612,2230415,"appendix_c_nodes.md",7072,0,"",markdown,selection_keyboard +3613,2230566,"appendix_c_nodes.md",7072,0," ",markdown,content +3614,2230567,"appendix_c_nodes.md",7073,0,"",markdown,selection_keyboard +3615,2231003,"TERMINAL",0,0,"6",,terminal_output +3616,2232148,"TERMINAL",0,0,"7",,terminal_output +3617,2233128,"TERMINAL",0,0,"8",,terminal_output +3618,2233532,"appendix_c_nodes.md",7073,0,"C",markdown,content +3619,2233533,"appendix_c_nodes.md",7074,0,"",markdown,selection_keyboard +3620,2234129,"TERMINAL",0,0,"9",,terminal_output +3621,2234606,"appendix_c_nodes.md",7074,0,"o",markdown,content +3622,2234607,"appendix_c_nodes.md",7075,0,"",markdown,selection_keyboard +3623,2234749,"appendix_c_nodes.md",7075,0,"n",markdown,content +3624,2234750,"appendix_c_nodes.md",7076,0,"",markdown,selection_keyboard +3625,2234840,"appendix_c_nodes.md",7076,0,"c",markdown,content +3626,2234841,"appendix_c_nodes.md",7077,0,"",markdown,selection_keyboard +3627,2235023,"appendix_c_nodes.md",7077,0,"a",markdown,content +3628,2235024,"appendix_c_nodes.md",7078,0,"",markdown,selection_keyboard +3629,2235116,"appendix_c_nodes.md",7078,0,"t",markdown,content +3630,2235117,"appendix_c_nodes.md",7079,0,"",markdown,selection_keyboard +3631,2235117,"TERMINAL",0,0,"40",,terminal_output +3632,2236133,"TERMINAL",0,0,"1",,terminal_output +3633,2237116,"TERMINAL",0,0,"2",,terminal_output +3634,2238205,"TERMINAL",0,0,"3",,terminal_output +3635,2239315,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3636,2239445,"TERMINAL",0,0,"4",,terminal_output +3637,2240445,"TERMINAL",0,0,"5",,terminal_output +3638,2241337,"TERMINAL",0,0,"6",,terminal_output +3639,2242346,"TERMINAL",0,0,"7",,terminal_output +3640,2243445,"TERMINAL",0,0,"8",,terminal_output +3641,2244598,"TERMINAL",0,0,"9",,terminal_output +3642,2244907,"jasmine/models/dynamics.py",0,0,"",python,tab +3643,2245386,"TERMINAL",0,0,"50",,terminal_output +3644,2246006,"jasmine/models/dynamics.py",2308,0,"",python,selection_command +3645,2246401,"TERMINAL",0,0,"1",,terminal_output +3646,2246638,"jasmine/models/dynamics.py",2283,71," noise_stddev = 0.5 # Standard deviation for the Gaussian noise",python,selection_command +3647,2246863,"jasmine/models/dynamics.py",2283,161," noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev",python,selection_command +3648,2247034,"jasmine/models/dynamics.py",2283,193," noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise",python,selection_command +3649,2247367,"TERMINAL",0,0,"2",,terminal_output +3650,2248402,"TERMINAL",0,0,"3",,terminal_output +3651,2248750,"jasmine/models/dynamics.py",2283,194,"",python,content +3652,2249386,"TERMINAL",0,0,"4",,terminal_output +3653,2250376,"TERMINAL",0,0,"5",,terminal_output +3654,2251004,"appendix_c_nodes.md",0,0,"",markdown,tab +3655,2251428,"TERMINAL",0,0,"6",,terminal_output +3656,2252422,"TERMINAL",0,0,"7",,terminal_output +3657,2252550,"appendix_c_nodes.md",17,0,"",markdown,selection_mouse +3658,2253272,"appendix_c_nodes.md",16,0,"",markdown,selection_command +3659,2253458,"TERMINAL",0,0,"8",,terminal_output +3660,2253761,"appendix_c_nodes.md",0,0,"\n",markdown,content +3661,2254040,"appendix_c_nodes.md",1,0,"\n",markdown,content +3662,2254330,"appendix_c_nodes.md",1,0,"",markdown,selection_command +3663,2254482,"TERMINAL",0,0,"9",,terminal_output +3664,2254642,"appendix_c_nodes.md",1,0,"Step 200000, validation loss: 6.960869312286377\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.960869602128571), 'val_entropy': np.float64(5.414477582071342), 'val_masked_token_top16_accuracy': np.float64(0.18385726447198905), 'val_masked_token_top1_accuracy': np.float64(0.0353936998940566), 'val_masked_token_top2_accuracy': np.float64(0.05846153243499644), 'val_masked_token_top5_accuracy': np.float64(0.10274799825514064), 'val_psnr': np.float64(10.914788994134641), 'val_select_logit': np.float64(5.507573454987769), 'val_select_p': np.float64(0.07029049901985655), 'val_ssim': np.float64(0.2510850712949154), 'val_total_loss': np.float64(6.960869602128571), 'val_z_loss': np.float64(69.57815611596202), 'val_loss': np.float32(6.9608693), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}",markdown,content +3665,2255477,"TERMINAL",0,0,"5:00",,terminal_output +3666,2255793,"appendix_c_nodes.md",1,1744,"",markdown,content +3667,2256219,"appendix_c_nodes.md",1,0,"\n noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise",markdown,content +3668,2256254,"appendix_c_nodes.md",10,0,"",markdown,selection_command +3669,2256625,"TERMINAL",0,0,"2",,terminal_output +3670,2257579,"TERMINAL",0,0,"3",,terminal_output +3671,2258046,"jasmine/models/dynamics.py",0,0,"",python,tab +3672,2258606,"TERMINAL",0,0,"4",,terminal_output +3673,2259680,"TERMINAL",0,0,"5",,terminal_output +3674,2260711,"TERMINAL",0,0,"6",,terminal_output +3675,2260926,"TERMINAL",0,0,"t",,terminal_output +3676,2261295,"TERMINAL",0,0,"g",,terminal_output +3677,2261566,"TERMINAL",0,0,"",,terminal_output +3678,2261637,"TERMINAL",0,0,"7",,terminal_output +3679,2261726,"TERMINAL",0,0,"",,terminal_output +3680,2261972,"TERMINAL",0,0,"g",,terminal_output +3681,2262079,"TERMINAL",0,0,"i",,terminal_output +3682,2262194,"TERMINAL",0,0,"t",,terminal_output +3683,2262320,"TERMINAL",0,0," ",,terminal_output +3684,2262475,"TERMINAL",0,0,"d",,terminal_output +3685,2262529,"TERMINAL",0,0,"i",,terminal_output +3686,2262680,"TERMINAL",0,0,"f",,terminal_output +3687,2262681,"TERMINAL",0,0,"8",,terminal_output +3688,2262840,"TERMINAL",0,0,"f",,terminal_output +3689,2262895,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +3690,2262974,"TERMINAL",0,0,"diff --git a/jasmine/models/dynamics.py b/jasmine/models/dynamics.py\r\nindex 6f0288b..12419d0 100644\r\n--- a/jasmine/models/dynamics.py\r\n+++ b/jasmine/models/dynamics.py\r\n@@ -80,6 +80,7 @@ class DynamicsMaskGIT(nnx.Module):\r\n latent_actions_BTm11L = batch[""latent_actions""]\r\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\r\n \r\n+\r\n batch_size = vid_embed_BTNM.shape[0]\r\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\r\n mask_prob = jax.random.uniform(\r\n\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3691,2263801,"TERMINAL",0,0,"9",,terminal_output +3692,2265007,"TERMINAL",0,0,"10",,terminal_output +3693,2265065,"TERMINAL",0,0,"git ",,terminal_output +3694,2265297,"TERMINAL",0,0,"c",,terminal_output +3695,2265431,"TERMINAL",0,0,"h",,terminal_output +3696,2265493,"TERMINAL",0,0,"e",,terminal_output +3697,2265668,"TERMINAL",0,0,"ck",,terminal_output +3698,2265759,"TERMINAL",0,0,"1",,terminal_output +3699,2265824,"TERMINAL",0,0,"o",,terminal_output +3700,2265923,"TERMINAL",0,0,"u",,terminal_output +3701,2266025,"TERMINAL",0,0,"t",,terminal_output +3702,2266093,"TERMINAL",0,0," ",,terminal_output +3703,2266789,"TERMINAL",0,0,"2",,terminal_output +3704,2267305,"TERMINAL",0,0,"p",,terminal_output +3705,2267394,"TERMINAL",0,0,"r",,terminal_output +3706,2267564,"TERMINAL",0,0,"e",,terminal_output +3707,2267617,"TERMINAL",0,0,"p",,terminal_output +3708,2267787,"TERMINAL",0,0,"3",,terminal_output +3709,2267866,"TERMINAL",0,0,"e",,terminal_output +3710,2267960,"TERMINAL",0,0,"n",,terminal_output +3711,2268093,"TERMINAL",0,0,"d",,terminal_output +3712,2268266,"TERMINAL",0,0,"-",,terminal_output +3713,2268409,"TERMINAL",0,0,"a",,terminal_output +3714,2268576,"TERMINAL",0,0,"c",,terminal_output +3715,2268779,"TERMINAL",0,0,"t",,terminal_output +3716,2268819,"TERMINAL",0,0,"4",,terminal_output +3717,2268831,"TERMINAL",0,0,"i",,terminal_output +3718,2269227,"TERMINAL",0,0,"o",,terminal_output +3719,2269454,"TERMINAL",0,0,"n",,terminal_output +3720,2269646,"TERMINAL",0,0,"-",,terminal_output +3721,2269855,"TERMINAL",0,0,"5",,terminal_output +3722,2269868,"TERMINAL",0,0,"m",,terminal_output +3723,2269937,"TERMINAL",0,0,"a",,terminal_output +3724,2270044,"TERMINAL",0,0,"s",,terminal_output +3725,2270167,"TERMINAL",0,0,"k",,terminal_output +3726,2270336,"TERMINAL",0,0,"g",,terminal_output +3727,2270460,"TERMINAL",0,0,"i",,terminal_output +3728,2270598,"TERMINAL",0,0,"t",,terminal_output +3729,2270844,"TERMINAL",0,0,"6",,terminal_output +3730,2271232,"TERMINAL",0,0,"\r\n[?2004l\rerror: Your local changes to the following files would be overwritten by checkout:\r\n\tjasmine/models/dynamics.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3731,2271864,"TERMINAL",0,0,"7",,terminal_output +3732,2272939,"TERMINAL",0,0,"8",,terminal_output +3733,2273073,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_output +3734,2273910,"TERMINAL",0,0,"9",,terminal_output +3735,2274078,"TERMINAL",0,0,"",,terminal_output +3736,2274935,"TERMINAL",0,0,"20",,terminal_output +3737,2275106,"TERMINAL",0,0,"gi",,terminal_output +3738,2275203,"TERMINAL",0,0,"t",,terminal_output +3739,2275258,"TERMINAL",0,0," ",,terminal_output +3740,2275429,"TERMINAL",0,0,"s",,terminal_output +3741,2275534,"TERMINAL",0,0,"t",,terminal_output +3742,2275674,"TERMINAL",0,0,"a",,terminal_output +3743,2275949,"TERMINAL",0,0,"1",,terminal_output +3744,2276205,"TERMINAL",0,0,"",,terminal_output +3745,2276406,"TERMINAL",0,0,"",,terminal_output +3746,2276781,"TERMINAL",0,0,"",,terminal_output +3747,2277168,"TERMINAL",0,0,"2",,terminal_output +3748,2277296,"TERMINAL",0,0,"s",,terminal_output +3749,2277516,"TERMINAL",0,0,"ta",,terminal_output +3750,2277661,"TERMINAL",0,0,"s",,terminal_output +3751,2277718,"TERMINAL",0,0,"h",,terminal_output +3752,2278138,"TERMINAL",0,0,"3",,terminal_output +3753,2278226,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3754,2278652,"TERMINAL",0,0,"Saved working directory and index state WIP on main: b8c92e7 feat: 10% decay on wsd (#219)\r\n",,terminal_output +3755,2278704,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3756,2279171,"TERMINAL",0,0,"4",,terminal_output +3757,2280079,"TERMINAL",0,0,"g",,terminal_output +3758,2280191,"TERMINAL",0,0,"i",,terminal_output +3759,2280239,"TERMINAL",0,0,"5",,terminal_output +3760,2280322,"TERMINAL",0,0,"t",,terminal_output +3761,2280408,"TERMINAL",0,0," ",,terminal_output +3762,2281145,"TERMINAL",0,0,"b",,terminal_output +3763,2281198,"TERMINAL",0,0,"6",,terminal_output +3764,2281239,"TERMINAL",0,0,"r",,terminal_output +3765,2281416,"TERMINAL",0,0,"a",,terminal_output +3766,2281488,"TERMINAL",0,0,"n",,terminal_output +3767,2281610,"TERMINAL",0,0,"c",,terminal_output +3768,2281704,"TERMINAL",0,0,"h",,terminal_output +3769,2281924,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +3770,2282162,"TERMINAL",0,0," ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n:",,terminal_output +3771,2282249,"TERMINAL",0,0,"7",,terminal_output +3772,2283155,"TERMINAL",0,0,"\r/",,terminal_output +3773,2283229,"TERMINAL",0,0,"8",,terminal_output +3774,2283658,"TERMINAL",0,0,"nn",,terminal_output +3775,2283839,"TERMINAL",0,0,"oo",,terminal_output +3776,2283977,"TERMINAL",0,0,"iiss",,terminal_output +3777,2284180,"TERMINAL",0,0,"ee",,terminal_output +3778,2284294,"TERMINAL",0,0,"9",,terminal_output +3779,2284926,"TERMINAL",0,0,"\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n:",,terminal_output +3780,2285291,"TERMINAL",0,0,"30",,terminal_output +3781,2286323,"TERMINAL",0,0,"1",,terminal_output +3782,2287311,"TERMINAL",0,0,"2",,terminal_output +3783,2288497,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3784,2288499,"TERMINAL",0,0,"3",,terminal_output +3785,2288840,"TERMINAL",0,0,"gi",,terminal_output +3786,2288915,"TERMINAL",0,0,"t",,terminal_output +3787,2289084,"TERMINAL",0,0," c",,terminal_output +3788,2289429,"TERMINAL",0,0,"4",,terminal_output +3789,2289549,"TERMINAL",0,0,"h",,terminal_output +3790,2289633,"TERMINAL",0,0,"e",,terminal_output +3791,2289819,"TERMINAL",0,0,"c",,terminal_output +3792,2289889,"TERMINAL",0,0,"k",,terminal_output +3793,2289988,"TERMINAL",0,0,"o",,terminal_output +3794,2290066,"TERMINAL",0,0,"u",,terminal_output +3795,2290185,"TERMINAL",0,0,"t",,terminal_output +3796,2290314,"TERMINAL",0,0," ",,terminal_output +3797,2290412,"TERMINAL",0,0,"5",,terminal_output +3798,2290684,"TERMINAL",0,0,"add-noise-to-combat-exposure-bias",,terminal_output +3799,2290927,"TERMINAL",0,0,"add-noise-to-combat-exposure-bias\r\n[?2004l\r",,terminal_output +3800,2291115,"TERMINAL",0,0,"Switched to branch 'add-noise-to-combat-exposure-bias'\r\nYour branch is up to date with 'origin/add-noise-to-combat-exposure-bias'.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3801,2291454,"TERMINAL",0,0,"6\radd-noise-to-combat-exposure-bias",,terminal_output +3802,2292117,"TERMINAL",0,0,"git checkout add-noise-to-combat-exposure-bias",,terminal_output +3803,2292339,"TERMINAL",0,0,"branch",,terminal_output +3804,2292478,"TERMINAL",0,0,"7",,terminal_output +3805,2292872,"TERMINAL",0,0,"stash",,terminal_output +3806,2293287,"TERMINAL",0,0,"checkout prepend-action-maskgit",,terminal_output +3807,2293479,"TERMINAL",0,0,"8",,terminal_output +3808,2294192,"TERMINAL",0,0,"diff",,terminal_output +3809,2294507,"TERMINAL",0,0,"40",,terminal_output +3810,2294676,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +3811,2295397,"",0,0,"Switched from branch 'main' to 'add-noise-to-combat-exposure-bias'",,git_branch_checkout +3812,2295534,"TERMINAL",0,0,"1",,terminal_output +3813,2296537,"TERMINAL",0,0,"2",,terminal_output +3814,2297557,"TERMINAL",0,0,"3",,terminal_output +3815,2298606,"TERMINAL",0,0,"4",,terminal_output +3816,2299692,"TERMINAL",0,0,"5",,terminal_output +3817,2300014,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +3818,2300418,"jasmine/models/dynamics.py",606,4997," max_noise_level: float,\n noise_buckets: int,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n rng, _rng_prob, *_rngs_mask = jax.random.split(batch[""rng""], B + 2)\n mask_prob = jax.random.uniform(_rng_prob, shape=(B,), minval=self.mask_limit)\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Apply noise augmentation ---\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, rng\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, batch[""rng""]\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 1:-1]\n",python,content +3819,2300761,"TERMINAL",0,0,"6",,terminal_output +3820,2301677,"TERMINAL",0,0,"7",,terminal_output +3821,2302737,"TERMINAL",0,0,"8",,terminal_output +3822,2303680,"TERMINAL",0,0,"9",,terminal_output +3823,2304715,"TERMINAL",0,0,"50",,terminal_output +3824,2305827,"TERMINAL",0,0,"1",,terminal_output +3825,2306837,"TERMINAL",0,0,"2",,terminal_output +3826,2307848,"TERMINAL",0,0,"3",,terminal_output +3827,2308893,"TERMINAL",0,0,"4",,terminal_output +3828,2309896,"TERMINAL",0,0,"5",,terminal_output +3829,2310928,"TERMINAL",0,0,"6",,terminal_output +3830,2311927,"TERMINAL",0,0,"7",,terminal_output +3831,2312947,"TERMINAL",0,0,"8",,terminal_output +3832,2313970,"TERMINAL",0,0,"9",,terminal_output +3833,2314990,"TERMINAL",0,0,"6:00",,terminal_output +3834,2316071,"TERMINAL",0,0,"1",,terminal_output +3835,2317054,"TERMINAL",0,0,"2",,terminal_output +3836,2318063,"TERMINAL",0,0,"3",,terminal_output +3837,2319143,"TERMINAL",0,0,"4",,terminal_output +3838,2320254,"TERMINAL",0,0,"5",,terminal_output +3839,2321147,"TERMINAL",0,0,"6",,terminal_output +3840,2322205,"TERMINAL",0,0,"7",,terminal_output +3841,2323260,"TERMINAL",0,0,"8",,terminal_output +3842,2324276,"TERMINAL",0,0,"9",,terminal_output +3843,2324318,"jasmine/models/dynamics.py",4678,0,"",python,selection_mouse +3844,2324345,"jasmine/models/dynamics.py",4677,0,"",python,selection_command +3845,2325310,"TERMINAL",0,0,"10",,terminal_output +3846,2325378,"jasmine/models/dynamics.py",4636,0,"",python,selection_mouse +3847,2325579,"jasmine/models/dynamics.py",2250,0,"",python,selection_command +3848,2326282,"TERMINAL",0,0,"1",,terminal_output +3849,2327304,"TERMINAL",0,0,"2",,terminal_output +3850,2328327,"TERMINAL",0,0,"3",,terminal_output +3851,2329349,"TERMINAL",0,0,"4",,terminal_output +3852,2329725,"jasmine/models/dynamics.py",2636,0,"",python,selection_mouse +3853,2330360,"TERMINAL",0,0,"5",,terminal_output +3854,2330395,"jasmine/models/dynamics.py",2628,0,"",python,selection_mouse +3855,2330518,"jasmine/models/dynamics.py",2625,4,"self",python,selection_mouse +3856,2331043,"jasmine/models/dynamics.py",2635,0,"",python,selection_mouse +3857,2331247,"jasmine/models/dynamics.py",2630,15,"max_noise_level",python,selection_mouse +3858,2331414,"TERMINAL",0,0,"6",,terminal_output +3859,2332408,"TERMINAL",0,0,"7",,terminal_output +3860,2333425,"TERMINAL",0,0,"8",,terminal_output +3861,2334449,"TERMINAL",0,0,"9",,terminal_output +3862,2335466,"TERMINAL",0,0,"20",,terminal_output +3863,2336525,"TERMINAL",0,0,"1",,terminal_output +3864,2337551,"TERMINAL",0,0,"3",,terminal_output +3865,2338576,"TERMINAL",0,0,"4",,terminal_output +3866,2338655,"jasmine/train_dynamics_appendix-c.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n print(val_metrics)\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +3867,2339594,"TERMINAL",0,0,"5",,terminal_output +3868,2340650,"TERMINAL",0,0,"6",,terminal_output +3869,2341634,"TERMINAL",0,0,"7",,terminal_output +3870,2342644,"TERMINAL",0,0,"8",,terminal_output +3871,2343662,"TERMINAL",0,0,"9",,terminal_output +3872,2344740,"TERMINAL",0,0,"30",,terminal_output +3873,2345138,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n max_noise_level: float = 0.7\n noise_buckets: int = 10\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n max_noise_level=args.max_noise_level,\n noise_buckets=args.noise_buckets,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n batch=inputs,\n seq_len=args.seq_len,\n noise_level=0.0,\n temperature=args.val_temperature,\n sample_argmax=args.val_sample_argmax,\n maskgit_steps=args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +3874,2345706,"TERMINAL",0,0,"1",,terminal_output +3875,2346723,"TERMINAL",0,0,"2",,terminal_output +3876,2347742,"TERMINAL",0,0,"3",,terminal_output +3877,2348822,"TERMINAL",0,0,"4",,terminal_output +3878,2349772,"TERMINAL",0,0,"\rgit diff",,terminal_output +3879,2349793,"TERMINAL",0,0,"5",,terminal_output +3880,2350488,"TERMINAL",0,0,"checkout prepend-action-maskgitstashbranchcheckout add-noise-to-combat-exposure-bias\r",,terminal_output +3881,2350649,"TERMINAL",0,0,"",,terminal_output +3882,2350783,"TERMINAL",0,0,"",,terminal_output +3883,2350834,"TERMINAL",0,0,"",,terminal_output +3884,2350860,"TERMINAL",0,0,"6",,terminal_output +3885,2351137,"TERMINAL",0,0,"g",,terminal_output +3886,2351213,"TERMINAL",0,0,"i",,terminal_output +3887,2351481,"TERMINAL",0,0,"t",,terminal_output +3888,2351752,"TERMINAL",0,0,"",,terminal_output +3889,2351852,"TERMINAL",0,0,"",,terminal_output +3890,2351852,"TERMINAL",0,0,"7",,terminal_output +3891,2352040,"TERMINAL",0,0,"",,terminal_output +3892,2352302,"TERMINAL",0,0,"",,terminal_output +3893,2352466,"TERMINAL",0,0,"d",,terminal_output +3894,2352575,"TERMINAL",0,0,"i",,terminal_output +3895,2352801,"TERMINAL",0,0,"f",,terminal_output +3896,2352860,"TERMINAL",0,0,"8",,terminal_output +3897,2352899,"TERMINAL",0,0,"f",,terminal_output +3898,2352952,"TERMINAL",0,0," ",,terminal_output +3899,2353898,"TERMINAL",0,0,"j",,terminal_output +3900,2353917,"TERMINAL",0,0,"9",,terminal_output +3901,2354136,"TERMINAL",0,0,"a",,terminal_output +3902,2354272,"TERMINAL",0,0,"smine/",,terminal_output +3903,2354719,"TERMINAL",0,0,"d",,terminal_output +3904,2354892,"TERMINAL",0,0,"40",,terminal_output +3905,2354947,"TERMINAL",0,0,"y",,terminal_output +3906,2355251,"TERMINAL",0,0,"",,terminal_output +3907,2355753,"TERMINAL",0,0,"",,terminal_output +3908,2355903,"TERMINAL",0,0,"",,terminal_output +3909,2355929,"TERMINAL",0,0,"1",,terminal_output +3910,2355982,"TERMINAL",0,0,"t",,terminal_output +3911,2356254,"TERMINAL",0,0,"rain_",,terminal_output +3912,2356948,"TERMINAL",0,0,"2",,terminal_output +3913,2357409,"TERMINAL",0,0,"d",,terminal_output +3914,2357826,"TERMINAL",0,0,"y",,terminal_output +3915,2358070,"TERMINAL",0,0,"3",,terminal_output +3916,2358834,"TERMINAL",0,0,"namics",,terminal_output +3917,2358987,"TERMINAL",0,0,"4",,terminal_output +3918,2359159,"TERMINAL",0,0,".",,terminal_output +3919,2359321,"TERMINAL",0,0,"py ",,terminal_output +3920,2360090,"TERMINAL",0,0,"5",,terminal_output +3921,2360587,"TERMINAL",0,0,"j",,terminal_output +3922,2360642,"TERMINAL",0,0,"a",,terminal_output +3923,2360878,"TERMINAL",0,0,"smine/",,terminal_output +3924,2361030,"TERMINAL",0,0,"6",,terminal_output +3925,2361267,"TERMINAL",0,0,"d",,terminal_output +3926,2361778,"TERMINAL",0,0,"",,terminal_output +3927,2362105,"TERMINAL",0,0,"t",,terminal_output +3928,2362105,"TERMINAL",0,0,"7",,terminal_output +3929,2362281,"TERMINAL",0,0,"r",,terminal_output +3930,2362419,"TERMINAL",0,0,"ain_",,terminal_output +3931,2363074,"TERMINAL",0,0,"8",,terminal_output +3932,2363128,"TERMINAL",0,0,"d",,terminal_output +3933,2363352,"TERMINAL",0,0,"y",,terminal_output +3934,2363602,"TERMINAL",0,0,"namics",,terminal_output +3935,2364096,"TERMINAL",0,0,"9",,terminal_output +3936,2364543,"TERMINAL",0,0,"_",,terminal_output +3937,2364712,"TERMINAL",0,0,"appendix-c.py ",,terminal_output +3938,2365157,"TERMINAL",0,0,"50",,terminal_output +3939,2365625,"TERMINAL",0,0,"\r\n[?2004l\r81,82d80\r\n< max_noise_level: float = 0.7\r\n< noise_buckets: int = 10\r\n142,143d139\r\n< max_noise_level=args.max_noise_level,\r\n< noise_buckets=args.noise_buckets,\r\n504,521d499\r\n< @nnx.jit(donate_argnums=0)\r\n< def train_step(\r\n< optimizer: nnx.ModelAndOptimizer, inputs: dict\r\n< ) -> tuple[jax.Array, jax.Array, dict]:\r\n< def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\r\n< model.train()\r\n< return dynamics_loss_fn(model, inputs)\r\n< \r\n< (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\r\n< optimizer.model\r\n< )\r\n< optimizer.update(grads)\r\n< if args.log_gradients:\r\n< metrics[""gradients_std/""] = jax.tree.map(\r\n< lambda x: x.std(), grads[""params""][""dynamics""]\r\n< )\r\n< return loss, recon, metrics\r\n< \r\n545,550c523,527\r\n< batch=inputs,\r\n< seq_len=args.seq_len,\r\n< noise_level=0.0,\r\n< temperature=args.val_temperature,\r\n< sample_argmax=args.val_sample_argmax,\r\n< maskgit_steps=args.val_maskgit_steps,\r\n---\r\n> inputs,\r\n> args.seq_len,\r\n> args.val_temperature,\r\n> args.val_sample_argmax,\r\n> args.val_maskgit_steps,\r\n620,635d596\r\n< # --- TRAIN LOOP ---\r\n< dataloader_train = (\r\n< {\r\n< ""videos"": jax.make_array_from_process_local_data(\r\n< videos_sharding, local_data=elem[""videos""]\r\n< ),\r\n< ""actions"": (\r\n< jax.make_array_from_process_local_data(\r\n< actions_sharding, elem[""actions""]\r\n< )\r\n< if args.use_gt_actions\r\n< else None\r\n< ),\r\n< }\r\n< for elem in train_iterator\r\n< )\r\n653,660d613\r\n< if jax.process_index() == 0:\r\n< first_batch = next(dataloader_train)\r\n< first_batch[""rng""] = rng # type: ignore\r\n< compiled = train_step.lower(optimizer, first_batch).compile()\r\n< print_compiled_memory_stats(compiled.memory_analysis())\r\n< print_compiled_cost_analysis(compiled.cost_analysis())\r\n< # Do not skip the first batch during training\r\n< dataloader_train = itertools.chain([first_batch], dataloader_train)\r\n662,818c615,626\r\n< first_step = step\r\n< while step < args.num_steps:\r\n< for batch in dataloader_train:\r\n< # --- Train step ---\r\n< rng, _rng_mask = jax.random.split(rng, 2)\r\n< batch[""rng""] = _rng_mask\r\n< loss, recon, metrics = train_step(optimizer, batch)\r\n< if step == first_step:\r\n< print_mem_stats(""After params initialized"")\r\n< step += 1\r\n< \r\n< # --- Validation loss ---\r\n< val_results = {}\r\n< if dataloader_val and step % args.val_interval == 0:\r\n< rng, _rng_mask_val = jax.random.split(rng, 2)\r\n< print(""Calculating validation metrics..."")\r\n< val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\r\n< calculate_validation_metrics(\r\n< dataloader_val, optimizer.model, _rng_mask_val\r\n< )\r\n< )\r\n< print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\r\n< val_results = {\r\n< ""metrics"": val_metrics,\r\n< ""gt_batch"": val_gt_batch,\r\n< ""recon"": val_recon,\r\n< ""full_frame"": val_recon_full_frame,\r\n< }\r\n< \r\n< # --- Logging ---\r\n< if args.log:\r\n< if step % args.log_interval == 0 and jax.process_index() == 0:\r\n< log_dict = {""loss"": loss, ""step"": step, **metrics}\r\n< if val_results:\r\n< log_dict.update(val_results[""metrics""])\r\n< wandb.log(log_dict)\r\n< if step % args.log_image_interval == 0:\r\n< gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\r\n< recon_seq = recon[0].clip(0, 1)\r\n< comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n< comparison_seq = einops.rearrange(\r\n< comparison_seq * 255, ""t h w c -> h (t w) c""\r\n< )\r\n< if val_results:\r\n< val_results[""gt_seq_val""] = (\r\n< val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\r\n< / 255.0\r\n< )\r\n< val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\r\n< 0, 1\r\n< )\r\n< val_comparison_seq = jnp.concatenate(\r\n< (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\r\n< axis=1,\r\n< )\r\n< val_results[""val_comparison_seq""] = einops.rearrange(\r\n< val_comparison_seq * 255, ""t h w c -> h (t w) c""\r\n< )\r\n< if args.eval_full_frame:\r\n< val_results[""full_frame_seq_val""] = val_results[\r\n< ""full_frame""\r\n< ][0].clip(0, 1)\r\n< val_results[""val_full_frame_comparison_seq""] = (\r\n< jnp.concatenate(\r\n< (\r\n< val_results[""gt_seq_val""],\r\n< val_results[""full_frame_seq_val""],\r\n< ),\r\n< axis=1,\r\n< )\r\n< )\r\n< val_results[""val_full_frame_comparison_seq""] = (\r\n< einops.rearrange(\r\n< val_results[""val_full_frame_comparison_seq""] * 255,\r\n< ""t h w c -> h (t w) c"",\r\n< )\r\n< )\r\n< # NOTE: Process-dependent control flow deliberately happens\r\n< # after indexing operation since it must not contain code\r\n< # sections that lead to cross-accelerator communication.\r\n< if jax.process_index() == 0:\r\n< log_images = dict(\r\n< image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\r\n< recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\r\n< true_vs_recon=wandb.Image(\r\n< np.asarray(comparison_seq.astype(np.uint8))\r\n< ),\r\n< )\r\n< if val_results:\r\n< log_images.update(\r\n< dict(\r\n< val_image=wandb.Image(\r\n< np.asarray(\r\n< val_results[""gt_seq_val""][args.seq_len - 1]\r\n< )\r\n< ),\r\n< val_recon=wandb.Image(\r\n< np.asarray(\r\n< val_results[""recon_seq_val""][\r\n< args.seq_len - 1\r\n< ]\r\n< )\r\n< ),\r\n< val_true_vs_recon=wandb.Image(\r\n< np.asarray(\r\n< val_results[""val_comparison_seq""].astype(\r\n< np.uint8\r\n< )\r\n< )\r\n< ),\r\n< )\r\n< )\r\n< if args.eval_full_frame:\r\n< log_images.update(\r\n< dict(\r\n< val_full_frame=wandb.Image(\r\n< np.asarray(\r\n< val_results[""full_frame_seq_val""][\r\n< args.seq_len - 1\r\n< ]\r\n< )\r\n< ),\r\n< val_true_vs_full_frame=wandb.Image(\r\n< np.asarray(\r\n< val_results[\r\n< ""val_full_frame_comparison_seq""\r\n< ].astype(np.uint8)\r\n< )\r\n< ),\r\n< )\r\n< )\r\n< wandb.log(log_images)\r\n< # --- Checkpointing ---\r\n< if args.save_ckpt and step % args.log_checkpoint_interval == 0:\r\n< assert checkpoint_manager is not None\r\n< optimizer_state = nnx.state(optimizer)\r\n< if val_iterator:\r\n< ckpt_manager_args = ocp.args.Composite(\r\n< model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\r\n< train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n< train_iterator # type: ignore\r\n< ),\r\n< val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n< val_iterator # type: ignore\r\n< ),\r\n< )\r\n< else:\r\n< ckpt_manager_args = ocp.args.Composite(\r\n< model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\r\n< train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n< train_iterator # type: ignore\r\n< ),\r\n< )\r\n< checkpoint_manager.save(step, args=ckpt_manager_args)\r\n< print(f""Saved checkpoint at step {step}"")\r\n< if step >= args.num_steps:\r\n< break\r\n---\r\n> # --- Validation loss ---\r\n> val_results = {}\r\n> if dataloader_val and step % args.val_interval == 0:\r\n> rng, _rng_mask_val = jax.random.split(rng, 2)\r\n> print(""Calculating validation metrics..."")\r\n> val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\r\n> calculate_validation_metrics(\r\n> dataloader_val, optimizer.model, _rng_mask_val\r\n> )\r\n> )\r\n> print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\r\n> print(val_metrics)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3940,2366242,"TERMINAL",0,0,"1",,terminal_output +3941,2367699,"TERMINAL",0,0,"2",,terminal_output +3942,2368643,"TERMINAL",0,0,"3",,terminal_output +3943,2369412,"TERMINAL",0,0,"4",,terminal_output +3944,2370490,"TERMINAL",0,0,"5",,terminal_output +3945,2372461,"TERMINAL",0,0,"6",,terminal_output +3946,2372652,"TERMINAL",0,0,"7",,terminal_output +3947,2373421,"TERMINAL",0,0,"8",,terminal_output +3948,2374520,"TERMINAL",0,0,"9",,terminal_output +3949,2375693,"TERMINAL",0,0,"7:00",,terminal_output +3950,2377021,"TERMINAL",0,0,"1",,terminal_output +3951,2377712,"TERMINAL",0,0,"2",,terminal_output +3952,2378514,"TERMINAL",0,0,"3",,terminal_output +3953,2379459,"TERMINAL",0,0,"4",,terminal_output +3954,2380687,"TERMINAL",0,0,"5",,terminal_output +3955,2381844,"TERMINAL",0,0,"6",,terminal_output +3956,2382504,"TERMINAL",0,0,"7",,terminal_output +3957,2383708,"TERMINAL",0,0,"9",,terminal_output +3958,2384949,"TERMINAL",0,0,"10",,terminal_output +3959,2385269,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +3960,2385998,"TERMINAL",0,0,"1",,terminal_output +3961,2386745,"TERMINAL",0,0,"2",,terminal_output +3962,2387685,"TERMINAL",0,0,"3",,terminal_output +3963,2388916,"TERMINAL",0,0,"4",,terminal_output +3964,2389879,"TERMINAL",0,0,"5",,terminal_output +3965,2390855,"TERMINAL",0,0,"6",,terminal_output +3966,2391966,"TERMINAL",0,0,"7",,terminal_output +3967,2392755,"jasmine/train_dynamics_appendix-c.py",10875,0,"",python,selection_mouse +3968,2393027,"TERMINAL",0,0,"8",,terminal_output +3969,2393897,"TERMINAL",0,0,"9",,terminal_output +3970,2394812,"TERMINAL",0,0,"20",,terminal_output +3971,2395853,"TERMINAL",0,0,"1",,terminal_output +3972,2396865,"TERMINAL",0,0,"2",,terminal_output +3973,2397905,"TERMINAL",0,0,"3",,terminal_output +3974,2399389,"TERMINAL",0,0,"4",,terminal_output +3975,2400310,"TERMINAL",0,0,"5",,terminal_output +3976,2401519,"TERMINAL",0,0,"6",,terminal_output +3977,2402524,"TERMINAL",0,0,"7",,terminal_output +3978,2403320,"TERMINAL",0,0,"8",,terminal_output +3979,2404080,"TERMINAL",0,0,"9",,terminal_output +3980,2404992,"TERMINAL",0,0,"30",,terminal_output +3981,2405996,"TERMINAL",0,0,"1",,terminal_output +3982,2407382,"TERMINAL",0,0,"2",,terminal_output +3983,2408500,"TERMINAL",0,0,"3",,terminal_output +3984,2409222,"TERMINAL",0,0,"4",,terminal_output +3985,2410445,"TERMINAL",0,0,"5",,terminal_output +3986,2411167,"TERMINAL",0,0,"6",,terminal_output +3987,2412305,"TERMINAL",0,0,"7",,terminal_output +3988,2413158,"TERMINAL",0,0,"8",,terminal_output +3989,2414230,"TERMINAL",0,0,"9",,terminal_output +3990,2415481,"TERMINAL",0,0,"40",,terminal_output +3991,2416621,"TERMINAL",0,0,"1",,terminal_output +3992,2417861,"TERMINAL",0,0,"2",,terminal_output +3993,2418161,"jasmine/train_dynamics_appendix-c.py",18327,0,"",python,selection_mouse +3994,2418162,"jasmine/train_dynamics_appendix-c.py",18326,0,"",python,selection_command +3995,2418337,"jasmine/train_dynamics_appendix-c.py",18325,1,"s",python,selection_mouse +3996,2418338,"jasmine/train_dynamics_appendix-c.py",18325,2,"s,",python,selection_command +3997,2418435,"jasmine/train_dynamics_appendix-c.py",18317,10,"git_steps,",python,selection_mouse +3998,2418435,"jasmine/train_dynamics_appendix-c.py",18273,54,"sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +3999,2418436,"jasmine/train_dynamics_appendix-c.py",18270,57,"al_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4000,2418436,"jasmine/train_dynamics_appendix-c.py",18266,61,"gs.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4001,2418436,"jasmine/train_dynamics_appendix-c.py",18226,101,"args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4002,2418437,"jasmine/train_dynamics_appendix-c.py",18221,106," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4003,2418447,"jasmine/train_dynamics_appendix-c.py",18220,107," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4004,2418464,"jasmine/train_dynamics_appendix-c.py",18218,109," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4005,2418506,"jasmine/train_dynamics_appendix-c.py",18217,110," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4006,2418506,"jasmine/train_dynamics_appendix-c.py",18216,111," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4007,2418526,"jasmine/train_dynamics_appendix-c.py",18215,112," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4008,2418544,"jasmine/train_dynamics_appendix-c.py",18214,113," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4009,2418572,"jasmine/train_dynamics_appendix-c.py",18213,114," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4010,2418619,"jasmine/train_dynamics_appendix-c.py",18212,115," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4011,2418620,"TERMINAL",0,0,"3",,terminal_output +4012,2418675,"jasmine/train_dynamics_appendix-c.py",18211,116," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4013,2418743,"jasmine/train_dynamics_appendix-c.py",18210,117," args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4014,2418786,"jasmine/train_dynamics_appendix-c.py",18180,147," args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4015,2418906,"jasmine/train_dynamics_appendix-c.py",18156,171," inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,",python,selection_mouse +4016,2419414,"TERMINAL",0,0,"4",,terminal_output +4017,2419904,"jasmine/train_dynamics_appendix-c.py",18156,171,"",python,content +4018,2420328,"jasmine/train_dynamics_appendix-c.py",18156,0,"< batch=inputs,\n< seq_len=args.seq_len,\n< noise_level=0.0,\n< temperature=args.val_temperature,\n< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,content +4019,2420655,"TERMINAL",0,0,"5",,terminal_output +4020,2421161,"jasmine/train_dynamics_appendix-c.py",18425,0,"",python,selection_command +4021,2421520,"TERMINAL",0,0,"6",,terminal_output +4022,2422060,"jasmine/train_dynamics_appendix-c.py",18371,55,"< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4023,2422247,"jasmine/train_dynamics_appendix-c.py",18315,111,"< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4024,2422433,"TERMINAL",0,0,"7",,terminal_output +4025,2422434,"jasmine/train_dynamics_appendix-c.py",18263,163,"< temperature=args.val_temperature,\n< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4026,2422567,"jasmine/train_dynamics_appendix-c.py",18228,198,"< noise_level=0.0,\n< temperature=args.val_temperature,\n< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4027,2422692,"jasmine/train_dynamics_appendix-c.py",18188,238,"< seq_len=args.seq_len,\n< noise_level=0.0,\n< temperature=args.val_temperature,\n< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4028,2422821,"jasmine/train_dynamics_appendix-c.py",18156,270,"< batch=inputs,\n< seq_len=args.seq_len,\n< noise_level=0.0,\n< temperature=args.val_temperature,\n< sample_argmax=args.val_sample_argmax,\n< maskgit_steps=args.val_maskgit_steps,",python,selection_command +4029,2423092,"jasmine/train_dynamics_appendix-c.py",18156,0,"",python,selection_command +4030,2423363,"TERMINAL",0,0,"8",,terminal_output +4031,2423431,"jasmine/train_dynamics_appendix-c.py",18371,1,"",python,content +4032,2423432,"jasmine/train_dynamics_appendix-c.py",18315,1,"",python,content +4033,2423432,"jasmine/train_dynamics_appendix-c.py",18263,1,"",python,content +4034,2423432,"jasmine/train_dynamics_appendix-c.py",18228,1,"",python,content +4035,2423432,"jasmine/train_dynamics_appendix-c.py",18188,1,"",python,content +4036,2423432,"jasmine/train_dynamics_appendix-c.py",18156,1,"",python,content +4037,2424306,"jasmine/train_dynamics_appendix-c.py",18366,1,"",python,content +4038,2424306,"jasmine/train_dynamics_appendix-c.py",18311,1,"",python,content +4039,2424306,"jasmine/train_dynamics_appendix-c.py",18260,1,"",python,content +4040,2424306,"jasmine/train_dynamics_appendix-c.py",18226,1,"",python,content +4041,2424306,"jasmine/train_dynamics_appendix-c.py",18187,1,"",python,content +4042,2424306,"jasmine/train_dynamics_appendix-c.py",18156,1,"",python,content +4043,2424787,"TERMINAL",0,0,"9",,terminal_output +4044,2424850,"jasmine/train_dynamics_appendix-c.py",18156,0,"",python,selection_command +4045,2425784,"TERMINAL",0,0,"50",,terminal_output +4046,2426869,"TERMINAL",0,0,"1",,terminal_output +4047,2427761,"TERMINAL",0,0,"2",,terminal_output +4048,2428898,"TERMINAL",0,0,"3",,terminal_output +4049,2429816,"TERMINAL",0,0,"4",,terminal_output +4050,2430503,"TERMINAL",0,0,"5",,terminal_output +4051,2431747,"TERMINAL",0,0,"6",,terminal_output +4052,2432662,"TERMINAL",0,0,"8",,terminal_output +4053,2433513,"TERMINAL",0,0,"9",,terminal_output +4054,2434536,"TERMINAL",0,0,"8:00",,terminal_output +4055,2435571,"TERMINAL",0,0,"1",,terminal_output +4056,2436602,"TERMINAL",0,0,"2",,terminal_output +4057,2437607,"TERMINAL",0,0,"3",,terminal_output +4058,2438667,"TERMINAL",0,0,"4",,terminal_output +4059,2439052,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4060,2439685,"TERMINAL",0,0,"5",,terminal_output +4061,2440704,"TERMINAL",0,0,"6",,terminal_output +4062,2441849,"TERMINAL",0,0,"7",,terminal_output +4063,2442827,"TERMINAL",0,0,"8",,terminal_output +4064,2443719,"jasmine/train_dynamics_appendix-c.py",2262,0,"",python,selection_mouse +4065,2443873,"TERMINAL",0,0,"9",,terminal_output +4066,2444840,"TERMINAL",0,0,"10",,terminal_output +4067,2445295,"jasmine/train_dynamics_appendix-c.py",2879,0,"",python,selection_mouse +4068,2445295,"jasmine/train_dynamics_appendix-c.py",2878,0,"",python,selection_command +4069,2445879,"TERMINAL",0,0,"1",,terminal_output +4070,2446930,"TERMINAL",0,0,"2",,terminal_output +4071,2448008,"TERMINAL",0,0,"3",,terminal_output +4072,2448936,"TERMINAL",0,0,"4",,terminal_output +4073,2449960,"TERMINAL",0,0,"5",,terminal_output +4074,2450986,"TERMINAL",0,0,"6",,terminal_output +4075,2451600,"jasmine/train_dynamics_appendix-c.py",2187,0,"",python,selection_mouse +4076,2451999,"TERMINAL",0,0,"7",,terminal_output +4077,2452296,"jasmine/train_dynamics_appendix-c.py",2126,0,"",python,selection_mouse +4078,2453022,"TERMINAL",0,0,"8",,terminal_output +4079,2453429,"jasmine/train_dynamics_appendix-c.py",2138,0,"\n ",python,content +4080,2453805,"jasmine/train_dynamics_appendix-c.py",2139,4,"",python,content +4081,2454100,"TERMINAL",0,0,"9",,terminal_output +4082,2454121,"jasmine/train_dynamics_appendix-c.py",2139,0,"< max_noise_level: float = 0.7\n< noise_buckets: int = 10",python,content +4083,2454662,"jasmine/train_dynamics_appendix-c.py",2202,0,"",python,selection_command +4084,2455068,"TERMINAL",0,0,"20",,terminal_output +4085,2455300,"jasmine/train_dynamics_appendix-c.py",2174,29,"< noise_buckets: int = 10",python,selection_command +4086,2455495,"jasmine/train_dynamics_appendix-c.py",2139,64,"< max_noise_level: float = 0.7\n< noise_buckets: int = 10",python,selection_command +4087,2455637,"jasmine/train_dynamics_appendix-c.py",2108,95," z_loss_weight: float = 0.0\n< max_noise_level: float = 0.7\n< noise_buckets: int = 10",python,selection_command +4088,2456097,"jasmine/train_dynamics_appendix-c.py",2139,64,"< max_noise_level: float = 0.7\n< noise_buckets: int = 10",python,selection_command +4089,2456119,"TERMINAL",0,0,"1",,terminal_output +4090,2456343,"jasmine/train_dynamics_appendix-c.py",2139,0,"",python,selection_command +4091,2456866,"jasmine/train_dynamics_appendix-c.py",2174,1,"",python,content +4092,2456867,"jasmine/train_dynamics_appendix-c.py",2139,1,"",python,content +4093,2457006,"jasmine/train_dynamics_appendix-c.py",2173,1,"",python,content +4094,2457006,"jasmine/train_dynamics_appendix-c.py",2139,1,"",python,content +4095,2457301,"TERMINAL",0,0,"2",,terminal_output +4096,2457678,"jasmine/train_dynamics_appendix-c.py",2139,0,"",python,selection_command +4097,2458140,"TERMINAL",0,0,"3",,terminal_output +4098,2459156,"TERMINAL",0,0,"4",,terminal_output +4099,2459537,"TERMINAL",0,0,"diff jasmine/train_dynamics.py jasmine/train_dynamics_appendix-c.py ",,terminal_output +4100,2460217,"TERMINAL",0,0,"5",,terminal_output +4101,2460548,"TERMINAL",0,0,"\rgit checkout add-noise-to-combat-exposure-bias",,terminal_output +4102,2461289,"TERMINAL",0,0,"6",,terminal_output +4103,2461701,"TERMINAL",0,0,"branch",,terminal_output +4104,2462375,"TERMINAL",0,0,"7",,terminal_output +4105,2462427,"TERMINAL",0,0,"stash",,terminal_output +4106,2462944,"TERMINAL",0,0,"checkout prepend-action-maskgit",,terminal_output +4107,2463239,"TERMINAL",0,0,"8",,terminal_output +4108,2464480,"TERMINAL",0,0,"9",,terminal_output +4109,2464867,"TERMINAL",0,0,"diff",,terminal_output +4110,2465703,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +4111,2465871,"TERMINAL",0,0,"30",,terminal_output +4112,2466563,"TERMINAL",0,0,"1",,terminal_output +4113,2467117,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4114,2467318,"TERMINAL",0,0,"2",,terminal_output +4115,2468311,"jasmine/train_dynamics_appendix-c.py",2171,0,"",python,selection_command +4116,2468430,"TERMINAL",0,0,"3",,terminal_output +4117,2468469,"jasmine/train_dynamics_appendix-c.py",2170,1,"",python,content +4118,2469151,"jasmine/train_dynamics_appendix-c.py",2170,0,"0",python,content +4119,2469152,"jasmine/train_dynamics_appendix-c.py",2171,0,"",python,selection_keyboard +4120,2469369,"jasmine/train_dynamics_appendix-c.py",2170,0,"",python,selection_command +4121,2469588,"TERMINAL",0,0,"4",,terminal_output +4122,2470587,"TERMINAL",0,0,"5",,terminal_output +4123,2471607,"TERMINAL",0,0,"6",,terminal_output +4124,2472372,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +4125,2472770,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +4126,2473027,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4127,2473403,"TERMINAL",0,0,"7",,terminal_output +4128,2474330,"TERMINAL",0,0,"8",,terminal_output +4129,2475033,"TERMINAL",0,0,"9",,terminal_output +4130,2475936,"TERMINAL",0,0,"Running on 1 devices.\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 637, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 425, in main\r\n genie, rng = build_model(args, rng)\r\n ^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 115, in build_model\r\n genie = Genie(\r\n ^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 289, in __call__\r\n return _graph_node_meta_call(cls, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 301, in _graph_node_meta_call\r\n cls._pytree_meta_construct(node, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 292, in _pytree_meta_construct\r\n self.__init__(*args, **kwargs)\r\nTypeError: Genie.__init__() missing 2 required positional arguments: 'max_noise_level' and 'noise_buckets'\r\nW1008 23:18:41.042049 1293039 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +4131,2475943,"TERMINAL",0,0,"41",,terminal_output +4132,2476261,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4133,2476979,"TERMINAL",0,0,"2",,terminal_output +4134,2477944,"TERMINAL",0,0,"3",,terminal_output +4135,2479003,"TERMINAL",0,0,"4",,terminal_output +4136,2480041,"TERMINAL",0,0,"5",,terminal_output +4137,2481669,"TERMINAL",0,0,"6",,terminal_output +4138,2482591,"TERMINAL",0,0,"7",,terminal_output +4139,2483358,"TERMINAL",0,0,"8",,terminal_output +4140,2484127,"TERMINAL",0,0,"9",,terminal_output +4141,2485191,"TERMINAL",0,0,"50",,terminal_output +4142,2485975,"TERMINAL",0,0,"1",,terminal_output +4143,2486898,"TERMINAL",0,0,"2",,terminal_output +4144,2487327,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4145,2487992,"TERMINAL",0,0,"3",,terminal_output +4146,2488424,"slurm/jobs/mihir/horeka/doom/resolution60x80/sample.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=sample_dynamics_doom_60x80\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/dynamics_doom_60x80_500k/500k-run\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=8 \\n --start_frame=4 \\n --image_height=60 \\n --image_width=80 \\n --num_actions=18 \\n --patch_size=16 \\n --output_dir=gifs/doom/action-prepend-branch/60x80/500k-test/ $@",shellscript,tab +4147,2489021,"TERMINAL",0,0,"4",,terminal_output +4148,2490124,"TERMINAL",0,0,"5",,terminal_output +4149,2490987,"TERMINAL",0,0,"6",,terminal_output +4150,2492002,"TERMINAL",0,0,"7",,terminal_output +4151,2492838,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4152,2493053,"TERMINAL",0,0,"8",,terminal_output +4153,2494079,"TERMINAL",0,0,"9",,terminal_output +4154,2495093,"TERMINAL",0,0,"9:00",,terminal_output +4155,2496210,"TERMINAL",0,0,"1",,terminal_output +4156,2496973,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\n\n# main\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\n\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics_appendix-c.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --no-log \\n --no-eval-full-frame \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \n",shellscript,tab +4157,2497469,"TERMINAL",0,0,"2",,terminal_output +4158,2498594,"TERMINAL",0,0,"3",,terminal_output +4159,2499480,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",592,0,"",shellscript,selection_mouse +4160,2499523,"TERMINAL",0,0,"4",,terminal_output +4161,2500204,"TERMINAL",0,0,"5",,terminal_output +4162,2500691,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",706,0,"\n",shellscript,content +4163,2501021,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",707,0,"\n",shellscript,content +4164,2501197,"TERMINAL",0,0,"6",,terminal_output +4165,2501715,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466",shellscript,content +4166,2502348,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",821,0,"",shellscript,selection_command +4167,2502416,"TERMINAL",0,0,"7",,terminal_output +4168,2502491,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",707,0,"",shellscript,selection_command +4169,2503074,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",690,0,"",shellscript,selection_command +4170,2503414,"TERMINAL",0,0,"8",,terminal_output +4171,2504612,"TERMINAL",0,0,"9",,terminal_output +4172,2504717,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",577,0,"",shellscript,selection_command +4173,2505691,"TERMINAL",0,0,"10",,terminal_output +4174,2506758,"TERMINAL",0,0,"1",,terminal_output +4175,2506815,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",590,0,"",shellscript,selection_command +4176,2507742,"TERMINAL",0,0,"2",,terminal_output +4177,2508500,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",577,0,"",shellscript,selection_command +4178,2508740,"TERMINAL",0,0,"3",,terminal_output +4179,2509038,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",707,0,"",shellscript,selection_command +4180,2509422,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4181,2509758,"TERMINAL",0,0,"4",,terminal_output +4182,2510718,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"CHECKPOINT_DI",shellscript,content +4183,2510720,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",721,0,"",shellscript,selection_command +4184,2510720,"TERMINAL",0,0,"5",,terminal_output +4185,2511784,"TERMINAL",0,0,"6",,terminal_output +4186,2511905,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,13,"",shellscript,content +4187,2511916,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4188,2512618,"TERMINAL",0,0,"7",,terminal_output +4189,2513733,"TERMINAL",0,0,"8",,terminal_output +4190,2514262,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"=",shellscript,content +4191,2514264,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"",shellscript,selection_keyboard +4192,2514724,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4193,2514781,"TERMINAL",0,0,"9",,terminal_output +4194,2515225,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0," ",shellscript,content +4195,2515226,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"",shellscript,selection_keyboard +4196,2515424,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4197,2515625,"TERMINAL",0,0,"20",,terminal_output +4198,2516154,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",824,0,"\n ",shellscript,content +4199,2516722,"TERMINAL",0,0,"1",,terminal_output +4200,2517384,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",825,1,"",shellscript,content +4201,2517586,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4202,2517988,"TERMINAL",0,0,"2",,terminal_output +4203,2518748,"TERMINAL",0,0,"3",,terminal_output +4204,2518791,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"CHECKPOINT_DI",shellscript,content +4205,2518792,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",721,0,"",shellscript,selection_command +4206,2519841,"TERMINAL",0,0,"5",,terminal_output +4207,2520414,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",722,0,"",shellscript,selection_command +4208,2520680,"TERMINAL",0,0,"6",,terminal_output +4209,2521527,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",722,0,"R",shellscript,content +4210,2521528,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",723,0,"",shellscript,selection_keyboard +4211,2521696,"TERMINAL",0,0,"7",,terminal_output +4212,2522033,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",839,0,"",shellscript,selection_keyboard +4213,2522736,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",838,1,"",shellscript,content +4214,2522921,"TERMINAL",0,0,"8",,terminal_output +4215,2523859,"TERMINAL",0,0,"9",,terminal_output +4216,2524196,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",837,0,"",shellscript,selection_command +4217,2524612,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"",shellscript,selection_command +4218,2524751,"TERMINAL",0,0,"30",,terminal_output +4219,2525204,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,1,"",shellscript,content +4220,2525363,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",708,0,"",shellscript,selection_command +4221,2525723,"TERMINAL",0,0,"1",,terminal_output +4222,2525857,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",707,0,"",shellscript,selection_command +4223,2526519,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",577,0,"",shellscript,selection_command +4224,2526702,"TERMINAL",0,0,"2",,terminal_output +4225,2527849,"TERMINAL",0,0,"3",,terminal_output +4226,2528570,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",577,0,"#",shellscript,content +4227,2528571,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",578,0,"",shellscript,selection_keyboard +4228,2528664,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",578,0," ",shellscript,content +4229,2528665,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",579,0,"",shellscript,selection_keyboard +4230,2528811,"TERMINAL",0,0,"4",,terminal_output +4231,2529169,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",578,0,"",shellscript,selection_command +4232,2529281,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"",shellscript,selection_command +4233,2529463,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",711,0,"",shellscript,selection_command +4234,2529967,"TERMINAL",0,0,"5",,terminal_output +4235,2530301,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",710,1,"",shellscript,content +4236,2530826,"TERMINAL",0,0,"6",,terminal_output +4237,2531222,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",710,0,"C",shellscript,content +4238,2531223,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",711,0,"",shellscript,selection_keyboard +4239,2531843,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",710,0,"",shellscript,selection_command +4240,2532029,"TERMINAL",0,0,"7",,terminal_output +4241,2533069,"TERMINAL",0,0,"8",,terminal_output +4242,2534005,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"",shellscript,selection_mouse +4243,2534154,"TERMINAL",0,0,"9",,terminal_output +4244,2534783,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",709,0,"\n",shellscript,content +4245,2535193,"TERMINAL",0,0,"40",,terminal_output +4246,2535348,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",710,0,"#",shellscript,content +4247,2535349,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",711,0,"",shellscript,selection_keyboard +4248,2536056,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",711,0," ",shellscript,content +4249,2536056,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,0,"",shellscript,selection_keyboard +4250,2536084,"TERMINAL",0,0,"1",,terminal_output +4251,2536182,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,0,"a",shellscript,content +4252,2536183,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"",shellscript,selection_keyboard +4253,2536317,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"c",shellscript,content +4254,2536318,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"",shellscript,selection_keyboard +4255,2536532,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"t",shellscript,content +4256,2536533,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",715,0,"",shellscript,selection_keyboard +4257,2536863,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,1,"",shellscript,content +4258,2537000,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,1,"",shellscript,content +4259,2537140,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,1,"",shellscript,content +4260,2537144,"TERMINAL",0,0,"2",,terminal_output +4261,2537363,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,0,"n",shellscript,content +4262,2537364,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"",shellscript,selection_keyboard +4263,2537514,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"o",shellscript,content +4264,2537515,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"",shellscript,selection_keyboard +4265,2537596,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"s",shellscript,content +4266,2537597,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",715,0,"",shellscript,selection_keyboard +4267,2537604,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",715,0,"i",shellscript,content +4268,2537605,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",716,0,"",shellscript,selection_keyboard +4269,2537734,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",716,0,"e",shellscript,content +4270,2537734,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",717,0,"",shellscript,selection_keyboard +4271,2537844,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",717,0," ",shellscript,content +4272,2537845,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",718,0,"",shellscript,selection_keyboard +4273,2538014,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",718,0,"b",shellscript,content +4274,2538015,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",719,0,"",shellscript,selection_keyboard +4275,2538065,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",719,0,"r",shellscript,content +4276,2538066,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",720,0,"",shellscript,selection_keyboard +4277,2538212,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",720,0,"a",shellscript,content +4278,2538213,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",721,0,"",shellscript,selection_keyboard +4279,2538307,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",721,0,"n",shellscript,content +4280,2538308,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",722,0,"",shellscript,selection_keyboard +4281,2538308,"TERMINAL",0,0,"3",,terminal_output +4282,2538417,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",722,0,"c",shellscript,content +4283,2538418,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",723,0,"",shellscript,selection_keyboard +4284,2538468,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",723,0,"h",shellscript,content +4285,2538469,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",724,0,"",shellscript,selection_keyboard +4286,2539165,"TERMINAL",0,0,"4",,terminal_output +4287,2539600,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"",shellscript,selection_mouse +4288,2539785,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,5,"nosie",shellscript,selection_mouse +4289,2540004,"TERMINAL",0,0,"5",,terminal_output +4290,2540770,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,5,"",shellscript,content +4291,2541039,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",712,0,"n",shellscript,content +4292,2541040,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"",shellscript,selection_keyboard +4293,2541194,"TERMINAL",0,0,"6",,terminal_output +4294,2541221,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",713,0,"o",shellscript,content +4295,2541222,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"",shellscript,selection_keyboard +4296,2541290,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",714,0,"i",shellscript,content +4297,2541291,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",715,0,"",shellscript,selection_keyboard +4298,2541330,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",715,0,"s",shellscript,content +4299,2541330,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",716,0,"",shellscript,selection_keyboard +4300,2541439,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",716,0,"e",shellscript,content +4301,2541440,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",717,0,"",shellscript,selection_keyboard +4302,2542177,"TERMINAL",0,0,"7",,terminal_output +4303,2543300,"TERMINAL",0,0,"8",,terminal_output +4304,2544175,"TERMINAL",0,0,"9",,terminal_output +4305,2545213,"TERMINAL",0,0,"50",,terminal_output +4306,2546098,"TERMINAL",0,0,"1",,terminal_output +4307,2546652,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --exclude=hkn0735\n#SBATCH --job-name=train_tokenizer_default_single_node_120x160\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=3546530\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer_full_precision.py \\n --save_ckpt \\n --restore_ckpt \\n --image_height=120 \\n --image_width=160 \\n --no-use-flash-attention \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-tokenizer-default-120x160-$slurm_job_id-requeue \\n --tags tokenizer doom default 120x160 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +4308,2547168,"TERMINAL",0,0,"2",,terminal_output +4309,2548308,"TERMINAL",0,0,"3",,terminal_output +4310,2549125,"TERMINAL",0,0,"4",,terminal_output +4311,2550171,"TERMINAL",0,0,"5",,terminal_output +4312,2551149,"TERMINAL",0,0,"6",,terminal_output +4313,2552183,"TERMINAL",0,0,"7",,terminal_output +4314,2553208,"TERMINAL",0,0,"8",,terminal_output +4315,2554233,"TERMINAL",0,0,"9",,terminal_output +4316,2554686,"slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",0,0,"",shellscript,tab +4317,2555228,"TERMINAL",0,0,"20:00",,terminal_output +4318,2556253,"TERMINAL",0,0,"1",,terminal_output +4319,2557439,"TERMINAL",0,0,"2",,terminal_output +4320,2558372,"TERMINAL",0,0,"3",,terminal_output +4321,2559423,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4322,2559531,"TERMINAL",0,0,"4",,terminal_output +4323,2560490,"TERMINAL",0,0,"5",,terminal_output +4324,2561487,"TERMINAL",0,0,"6",,terminal_output +4325,2562445,"TERMINAL",0,0,"7",,terminal_output +4326,2563478,"TERMINAL",0,0,"8",,terminal_output +4327,2564444,"TERMINAL",0,0,"watch",,terminal_focus +4328,2564521,"TERMINAL",0,0,"9",,terminal_output +4329,2565515,"TERMINAL",0,0,"11",,terminal_output +4330,2566538,"TERMINAL",0,0,"2",,terminal_output +4331,2567559,"TERMINAL",0,0,"3",,terminal_output +4332,2568628,"TERMINAL",0,0,"4",,terminal_output +4333,2569846,"TERMINAL",0,0,"5",,terminal_output +4334,2570934,"TERMINAL",0,0,"6",,terminal_output +4335,2572290,"TERMINAL",0,0,"7",,terminal_output +4336,2573061,"TERMINAL",0,0,"8",,terminal_output +4337,2574078,"TERMINAL",0,0,"9",,terminal_output +4338,2574578,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4339,2575003,"TERMINAL",0,0,"20",,terminal_output +4340,2577334,"TERMINAL",0,0,"12",,terminal_output +4341,2577573,"jasmine/train_dynamics_appendix-c.py",4149,0,"",python,selection_mouse +4342,2578010,"TERMINAL",0,0,"3",,terminal_output +4343,2579247,"TERMINAL",0,0,"4",,terminal_output +4344,2579587,"jasmine/train_dynamics_appendix-c.py",4160,0,"\n ",python,content +4345,2580042,"TERMINAL",0,0,"5",,terminal_output +4346,2580579,"jasmine/train_dynamics_appendix-c.py",4169,0,"m",python,content +4347,2580580,"jasmine/train_dynamics_appendix-c.py",4170,0,"",python,selection_keyboard +4348,2580653,"jasmine/train_dynamics_appendix-c.py",4170,0,"a",python,content +4349,2580655,"jasmine/train_dynamics_appendix-c.py",4171,0,"",python,selection_keyboard +4350,2580882,"TERMINAL",0,0,"6",,terminal_output +4351,2581851,"TERMINAL",0,0,"7",,terminal_output +4352,2581940,"jasmine/train_dynamics_appendix-c.py",4171,0,"x",python,content +4353,2581942,"jasmine/train_dynamics_appendix-c.py",4172,0,"",python,selection_keyboard +4354,2582879,"jasmine/train_dynamics_appendix-c.py",4172,0,"_",python,content +4355,2582880,"jasmine/train_dynamics_appendix-c.py",4173,0,"",python,selection_keyboard +4356,2583047,"TERMINAL",0,0,"8",,terminal_output +4357,2583282,"jasmine/train_dynamics_appendix-c.py",4173,0,"n",python,content +4358,2583282,"jasmine/train_dynamics_appendix-c.py",4174,0,"",python,selection_keyboard +4359,2583470,"jasmine/train_dynamics_appendix-c.py",4174,0,"o",python,content +4360,2583471,"jasmine/train_dynamics_appendix-c.py",4175,0,"",python,selection_keyboard +4361,2583552,"jasmine/train_dynamics_appendix-c.py",4175,0,"i",python,content +4362,2583552,"jasmine/train_dynamics_appendix-c.py",4176,0,"",python,selection_keyboard +4363,2583619,"jasmine/train_dynamics_appendix-c.py",4176,0,"s",python,content +4364,2583620,"jasmine/train_dynamics_appendix-c.py",4177,0,"",python,selection_keyboard +4365,2583876,"jasmine/train_dynamics_appendix-c.py",4177,0,"e",python,content +4366,2583877,"jasmine/train_dynamics_appendix-c.py",4178,0,"",python,selection_keyboard +4367,2584019,"TERMINAL",0,0,"9",,terminal_output +4368,2584065,"jasmine/train_dynamics_appendix-c.py",4178,0,"_",python,content +4369,2584065,"jasmine/train_dynamics_appendix-c.py",4179,0,"",python,selection_keyboard +4370,2584384,"jasmine/train_dynamics_appendix-c.py",4179,0,"l",python,content +4371,2584385,"jasmine/train_dynamics_appendix-c.py",4180,0,"",python,selection_keyboard +4372,2584506,"jasmine/train_dynamics_appendix-c.py",4180,0,"e",python,content +4373,2584507,"jasmine/train_dynamics_appendix-c.py",4181,0,"",python,selection_keyboard +4374,2584664,"jasmine/train_dynamics_appendix-c.py",4181,0,"v",python,content +4375,2584664,"jasmine/train_dynamics_appendix-c.py",4182,0,"",python,selection_keyboard +4376,2584824,"jasmine/train_dynamics_appendix-c.py",4182,0,"e",python,content +4377,2584825,"jasmine/train_dynamics_appendix-c.py",4183,0,"",python,selection_keyboard +4378,2584865,"jasmine/train_dynamics_appendix-c.py",4183,0,"l",python,content +4379,2584866,"jasmine/train_dynamics_appendix-c.py",4184,0,"",python,selection_keyboard +4380,2585045,"TERMINAL",0,0,"30",,terminal_output +4381,2585353,"jasmine/train_dynamics_appendix-c.py",4184,0,"=",python,content +4382,2585353,"jasmine/train_dynamics_appendix-c.py",4185,0,"",python,selection_keyboard +4383,2585665,"jasmine/train_dynamics_appendix-c.py",4185,0,"a",python,content +4384,2585666,"jasmine/train_dynamics_appendix-c.py",4186,0,"",python,selection_keyboard +4385,2585845,"jasmine/train_dynamics_appendix-c.py",4186,0,"r",python,content +4386,2585847,"jasmine/train_dynamics_appendix-c.py",4187,0,"",python,selection_keyboard +4387,2586085,"TERMINAL",0,0,"1",,terminal_output +4388,2587054,"TERMINAL",0,0,"2",,terminal_output +4389,2587347,"jasmine/train_dynamics_appendix-c.py",4185,2,"args",python,content +4390,2587640,"jasmine/train_dynamics_appendix-c.py",4189,0,".",python,content +4391,2587641,"jasmine/train_dynamics_appendix-c.py",4190,0,"",python,selection_keyboard +4392,2587968,"jasmine/train_dynamics_appendix-c.py",4190,0,"m",python,content +4393,2587969,"jasmine/train_dynamics_appendix-c.py",4191,0,"",python,selection_keyboard +4394,2588069,"jasmine/train_dynamics_appendix-c.py",4191,0,"a",python,content +4395,2588070,"jasmine/train_dynamics_appendix-c.py",4192,0,"",python,selection_keyboard +4396,2588071,"TERMINAL",0,0,"3",,terminal_output +4397,2589073,"TERMINAL",0,0,"4",,terminal_output +4398,2589442,"jasmine/train_dynamics_appendix-c.py",4190,2,"max_noise_level",python,content +4399,2590096,"TERMINAL",0,0,"5",,terminal_output +4400,2590383,"jasmine/train_dynamics_appendix-c.py",4205,0,",",python,content +4401,2590384,"jasmine/train_dynamics_appendix-c.py",4206,0,"",python,selection_keyboard +4402,2590533,"jasmine/train_dynamics_appendix-c.py",4206,0,"\n ",python,content +4403,2591133,"jasmine/train_dynamics_appendix-c.py",4215,0,"n",python,content +4404,2591134,"jasmine/train_dynamics_appendix-c.py",4216,0,"",python,selection_keyboard +4405,2591135,"TERMINAL",0,0,"6",,terminal_output +4406,2591301,"jasmine/train_dynamics_appendix-c.py",4216,0,"o",python,content +4407,2591302,"jasmine/train_dynamics_appendix-c.py",4217,0,"",python,selection_keyboard +4408,2591409,"jasmine/train_dynamics_appendix-c.py",4217,0,"i",python,content +4409,2591409,"jasmine/train_dynamics_appendix-c.py",4218,0,"",python,selection_keyboard +4410,2591477,"jasmine/train_dynamics_appendix-c.py",4218,0,"s",python,content +4411,2591478,"jasmine/train_dynamics_appendix-c.py",4219,0,"",python,selection_keyboard +4412,2591606,"jasmine/train_dynamics_appendix-c.py",4219,0,"e",python,content +4413,2591608,"jasmine/train_dynamics_appendix-c.py",4220,0,"",python,selection_keyboard +4414,2591779,"jasmine/train_dynamics_appendix-c.py",4220,0,"_",python,content +4415,2591780,"jasmine/train_dynamics_appendix-c.py",4221,0,"",python,selection_keyboard +4416,2592089,"jasmine/train_dynamics_appendix-c.py",4221,0,"b",python,content +4417,2592089,"jasmine/train_dynamics_appendix-c.py",4222,0,"",python,selection_keyboard +4418,2592180,"TERMINAL",0,0,"7",,terminal_output +4419,2592220,"jasmine/train_dynamics_appendix-c.py",4222,0,"u",python,content +4420,2592221,"jasmine/train_dynamics_appendix-c.py",4223,0,"",python,selection_keyboard +4421,2592330,"jasmine/train_dynamics_appendix-c.py",4223,0,"c",python,content +4422,2592331,"jasmine/train_dynamics_appendix-c.py",4224,0,"",python,selection_keyboard +4423,2592463,"jasmine/train_dynamics_appendix-c.py",4224,0,"k",python,content +4424,2592464,"jasmine/train_dynamics_appendix-c.py",4225,0,"",python,selection_keyboard +4425,2592578,"jasmine/train_dynamics_appendix-c.py",4225,0,"e",python,content +4426,2592579,"jasmine/train_dynamics_appendix-c.py",4226,0,"",python,selection_keyboard +4427,2592781,"jasmine/train_dynamics_appendix-c.py",4226,0,"t",python,content +4428,2592782,"jasmine/train_dynamics_appendix-c.py",4227,0,"",python,selection_keyboard +4429,2592952,"jasmine/train_dynamics_appendix-c.py",4227,0,"s",python,content +4430,2592954,"jasmine/train_dynamics_appendix-c.py",4228,0,"",python,selection_keyboard +4431,2593158,"TERMINAL",0,0,"8",,terminal_output +4432,2593187,"jasmine/train_dynamics_appendix-c.py",4228,0,"=",python,content +4433,2593188,"jasmine/train_dynamics_appendix-c.py",4229,0,"",python,selection_keyboard +4434,2593699,"jasmine/train_dynamics_appendix-c.py",4229,0,"a",python,content +4435,2593700,"jasmine/train_dynamics_appendix-c.py",4230,0,"",python,selection_keyboard +4436,2593886,"jasmine/train_dynamics_appendix-c.py",4230,0,"r",python,content +4437,2593887,"jasmine/train_dynamics_appendix-c.py",4231,0,"",python,selection_keyboard +4438,2594053,"jasmine/train_dynamics_appendix-c.py",4231,0,"g",python,content +4439,2594054,"jasmine/train_dynamics_appendix-c.py",4232,0,"",python,selection_keyboard +4440,2594183,"jasmine/train_dynamics_appendix-c.py",4232,0,"s",python,content +4441,2594184,"jasmine/train_dynamics_appendix-c.py",4233,0,"",python,selection_keyboard +4442,2594212,"TERMINAL",0,0,"9",,terminal_output +4443,2594279,"jasmine/train_dynamics_appendix-c.py",4233,0,".",python,content +4444,2594280,"jasmine/train_dynamics_appendix-c.py",4234,0,"",python,selection_keyboard +4445,2594586,"jasmine/train_dynamics_appendix-c.py",4234,0,"n",python,content +4446,2594587,"jasmine/train_dynamics_appendix-c.py",4235,0,"",python,selection_keyboard +4447,2594743,"jasmine/train_dynamics_appendix-c.py",4235,0,"o",python,content +4448,2594744,"jasmine/train_dynamics_appendix-c.py",4236,0,"",python,selection_keyboard +4449,2594811,"jasmine/train_dynamics_appendix-c.py",4236,0,"i",python,content +4450,2594812,"jasmine/train_dynamics_appendix-c.py",4237,0,"",python,selection_keyboard +4451,2594875,"jasmine/train_dynamics_appendix-c.py",4237,0,"s",python,content +4452,2594876,"jasmine/train_dynamics_appendix-c.py",4238,0,"",python,selection_keyboard +4453,2595027,"jasmine/train_dynamics_appendix-c.py",4238,0,"e",python,content +4454,2595028,"jasmine/train_dynamics_appendix-c.py",4239,0,"",python,selection_keyboard +4455,2595263,"TERMINAL",0,0,"40",,terminal_output +4456,2595538,"jasmine/train_dynamics_appendix-c.py",4234,5,"noise_buckets",python,content +4457,2596273,"TERMINAL",0,0,"1",,terminal_output +4458,2596354,"jasmine/train_dynamics_appendix-c.py",4247,0,",",python,content +4459,2596355,"jasmine/train_dynamics_appendix-c.py",4248,0,"",python,selection_keyboard +4460,2597334,"TERMINAL",0,0,"2",,terminal_output +4461,2597969,"TERMINAL",0,0,"srun",,terminal_focus +4462,2598326,"TERMINAL",0,0,"3",,terminal_output +4463,2598657,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +4464,2599145,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\n# noise branch\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +4465,2599240,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +4466,2599292,"TERMINAL",0,0,"4",,terminal_output +4467,2599384,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4468,2600349,"TERMINAL",0,0,"5",,terminal_output +4469,2601391,"TERMINAL",0,0,"6",,terminal_output +4470,2602363,"TERMINAL",0,0,"7",,terminal_output +4471,2603389,"TERMINAL",0,0,"8",,terminal_output +4472,2604410,"TERMINAL",0,0,"9",,terminal_output +4473,2605405,"TERMINAL",0,0,"50",,terminal_output +4474,2606518,"TERMINAL",0,0,"1",,terminal_output +4475,2607490,"TERMINAL",0,0,"2",,terminal_output +4476,2608477,"TERMINAL",0,0,"3",,terminal_output +4477,2609488,"TERMINAL",0,0,"4",,terminal_output +4478,2609726,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +4479,2609811,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +4480,2610514,"TERMINAL",0,0,"6",,terminal_output +4481,2611545,"TERMINAL",0,0,"7",,terminal_output +4482,2612601,"TERMINAL",0,0,"8",,terminal_output +4483,2613058,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4484,2613574,"TERMINAL",0,0,"9",,terminal_output +4485,2614775,"TERMINAL",0,0,"1:00",,terminal_output +4486,2615748,"TERMINAL",0,0,"1",,terminal_output +4487,2616934,"TERMINAL",0,0,"2",,terminal_output +4488,2617768,"TERMINAL",0,0,"3",,terminal_output +4489,2618741,"TERMINAL",0,0,"4",,terminal_output +4490,2619785,"TERMINAL",0,0,"5",,terminal_output +4491,2620705,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26561024, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78691136}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +4492,2620928,"TERMINAL",0,0,"6",,terminal_output +4493,2621843,"TERMINAL",0,0,"7",,terminal_output +4494,2622837,"TERMINAL",0,0,"8",,terminal_output +4495,2623839,"TERMINAL",0,0,"9",,terminal_output +4496,2624881,"TERMINAL",0,0,"10",,terminal_output +4497,2625919,"TERMINAL",0,0,"1",,terminal_output +4498,2626957,"TERMINAL",0,0,"2",,terminal_output +4499,2627927,"TERMINAL",0,0,"3",,terminal_output +4500,2628948,"TERMINAL",0,0,"4",,terminal_output +4501,2629969,"TERMINAL",0,0,"5",,terminal_output +4502,2630997,"TERMINAL",0,0,"6",,terminal_output +4503,2632013,"TERMINAL",0,0,"7",,terminal_output +4504,2633075,"TERMINAL",0,0,"8",,terminal_output +4505,2634103,"TERMINAL",0,0,"9",,terminal_output +4506,2635126,"TERMINAL",0,0,"20",,terminal_output +4507,2636148,"TERMINAL",0,0,"1",,terminal_output +4508,2637192,"TERMINAL",0,0,"2",,terminal_output +4509,2638321,"TERMINAL",0,0,"3",,terminal_output +4510,2639292,"TERMINAL",0,0,"4",,terminal_output +4511,2640605,"TERMINAL",0,0,"5",,terminal_output +4512,2641385,"TERMINAL",0,0,"6",,terminal_output +4513,2642505,"TERMINAL",0,0,"7",,terminal_output +4514,2643460,"TERMINAL",0,0,"8",,terminal_output +4515,2644417,"TERMINAL",0,0,"9",,terminal_output +4516,2645459,"TERMINAL",0,0,"30",,terminal_output +4517,2646383,"TERMINAL",0,0,"1",,terminal_output +4518,2647459,"TERMINAL",0,0,"2",,terminal_output +4519,2648547,"TERMINAL",0,0,"3",,terminal_output +4520,2649621,"TERMINAL",0,0,"4",,terminal_output +4521,2650755,"TERMINAL",0,0,"6",,terminal_output +4522,2651894,"TERMINAL",0,0,"7",,terminal_output +4523,2652936,"TERMINAL",0,0,"8",,terminal_output +4524,2653958,"TERMINAL",0,0,"9",,terminal_output +4525,2654781,"TERMINAL",0,0,"40",,terminal_output +4526,2655899,"TERMINAL",0,0,"1",,terminal_output +4527,2656985,"TERMINAL",0,0,"2",,terminal_output +4528,2657828,"TERMINAL",0,0,"3",,terminal_output +4529,2658905,"TERMINAL",0,0,"4",,terminal_output +4530,2659818,"TERMINAL",0,0,"5",,terminal_output +4531,2660764,"TERMINAL",0,0,"6",,terminal_output +4532,2661851,"TERMINAL",0,0,"7",,terminal_output +4533,2662924,"TERMINAL",0,0,"8",,terminal_output +4534,2663891,"TERMINAL",0,0,"9",,terminal_output +4535,2664976,"TERMINAL",0,0,"50",,terminal_output +4536,2666108,"TERMINAL",0,0,"1",,terminal_output +4537,2667143,"TERMINAL",0,0,"2",,terminal_output +4538,2668148,"TERMINAL",0,0,"3",,terminal_output +4539,2669182,"TERMINAL",0,0,"4",,terminal_output +4540,2670212,"TERMINAL",0,0,"5",,terminal_output +4541,2671239,"TERMINAL",0,0,"6",,terminal_output +4542,2672200,"TERMINAL",0,0,"7",,terminal_output +4543,2673235,"TERMINAL",0,0,"8",,terminal_output +4544,2674254,"TERMINAL",0,0,"9",,terminal_output +4545,2675287,"TERMINAL",0,0,"2:00",,terminal_output +4546,2676305,"TERMINAL",0,0,"1",,terminal_output +4547,2677286,"TERMINAL",0,0,"2",,terminal_output +4548,2678317,"TERMINAL",0,0,"3",,terminal_output +4549,2679350,"TERMINAL",0,0,"4",,terminal_output +4550,2680385,"TERMINAL",0,0,"5",,terminal_output +4551,2681394,"TERMINAL",0,0,"6",,terminal_output +4552,2682418,"TERMINAL",0,0,"7",,terminal_output +4553,2683432,"TERMINAL",0,0,"8",,terminal_output +4554,2684472,"TERMINAL",0,0,"9",,terminal_output +4555,2685500,"TERMINAL",0,0,"10",,terminal_output +4556,2686508,"TERMINAL",0,0,"1",,terminal_output +4557,2687507,"TERMINAL",0,0,"2",,terminal_output +4558,2688565,"TERMINAL",0,0,"3",,terminal_output +4559,2689603,"TERMINAL",0,0,"4",,terminal_output +4560,2690644,"TERMINAL",0,0,"5",,terminal_output +4561,2691631,"TERMINAL",0,0,"7",,terminal_output +4562,2692667,"TERMINAL",0,0,"8",,terminal_output +4563,2693742,"TERMINAL",0,0,"9",,terminal_output +4564,2694709,"TERMINAL",0,0,"20",,terminal_output +4565,2695756,"TERMINAL",0,0,"1",,terminal_output +4566,2696739,"TERMINAL",0,0,"2",,terminal_output +4567,2697950,"TERMINAL",0,0,"3",,terminal_output +4568,2698793,"TERMINAL",0,0,"4",,terminal_output +4569,2699733,"TERMINAL",0,0,"5",,terminal_output +4570,2700772,"TERMINAL",0,0,"6",,terminal_output +4571,2701868,"TERMINAL",0,0,"7",,terminal_output +4572,2702915,"TERMINAL",0,0,"8",,terminal_output +4573,2703981,"TERMINAL",0,0,"9",,terminal_output +4574,2704905,"TERMINAL",0,0,"30",,terminal_output +4575,2706083,"TERMINAL",0,0,"1",,terminal_output +4576,2707026,"TERMINAL",0,0,"2",,terminal_output +4577,2708083,"TERMINAL",0,0,"3",,terminal_output +4578,2709048,"TERMINAL",0,0,"4",,terminal_output +4579,2710044,"TERMINAL",0,0,"5",,terminal_output +4580,2711060,"TERMINAL",0,0,"6",,terminal_output +4581,2712088,"TERMINAL",0,0,"7",,terminal_output +4582,2713106,"TERMINAL",0,0,"8",,terminal_output +4583,2714127,"TERMINAL",0,0,"9",,terminal_output +4584,2715155,"TERMINAL",0,0,"40",,terminal_output +4585,2716161,"TERMINAL",0,0,"1",,terminal_output +4586,2717178,"TERMINAL",0,0,"2",,terminal_output +4587,2718201,"TERMINAL",0,0,"3",,terminal_output +4588,2719232,"TERMINAL",0,0,"4",,terminal_output +4589,2720294,"TERMINAL",0,0,"5",,terminal_output +4590,2720326,"TERMINAL",0,0,"Step 200000, validation loss: 0.7374703884124756\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(0.7374703171206456), 'val_entropy': np.float64(0.8575162127906201), 'val_masked_token_top16_accuracy': np.float64(0.9825415529456794), 'val_masked_token_top1_accuracy': np.float64(0.776269306154812), 'val_masked_token_top2_accuracy': np.float64(0.8820458533717137), 'val_masked_token_top5_accuracy': np.float64(0.949582414299834), 'val_psnr': np.float64(22.455391640756645), 'val_select_logit': np.float64(15.983110203462488), 'val_select_p': np.float64(0.7558641293469597), 'val_ssim': np.float64(0.6962924330842262), 'val_total_loss': np.float64(0.7374703171206456), 'val_z_loss': np.float64(313.3120548023897), 'val_loss': np.float32(0.7374704), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}\r\n",,terminal_output +4591,2720727,"TERMINAL",0,0,"W1008 23:22:46.240735 1293303 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +4592,2721095,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 2 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +4593,2721312,"TERMINAL",0,0,"6",,terminal_output +4594,2721407,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4595,2722327,"TERMINAL",0,0,"7",,terminal_output +4596,2723359,"TERMINAL",0,0,"8",,terminal_output +4597,2724395,"TERMINAL",0,0,"9",,terminal_output +4598,2725398,"TERMINAL",0,0,"50",,terminal_output +4599,2726414,"TERMINAL",0,0,"1",,terminal_output +4600,2727469,"TERMINAL",0,0,"2",,terminal_output +4601,2728460,"TERMINAL",0,0,"3",,terminal_output +4602,2729484,"TERMINAL",0,0,"4",,terminal_output +4603,2730507,"TERMINAL",0,0,"6",,terminal_output +4604,2731588,"TERMINAL",0,0,"7",,terminal_output +4605,2732701,"TERMINAL",0,0,"8",,terminal_output +4606,2733686,"TERMINAL",0,0,"9",,terminal_output +4607,2734665,"TERMINAL",0,0,"3:00",,terminal_output +4608,2735761,"TERMINAL",0,0,"1",,terminal_output +4609,2736728,"TERMINAL",0,0,"2",,terminal_output +4610,2737835,"TERMINAL",0,0,"3",,terminal_output +4611,2738885,"TERMINAL",0,0,"4",,terminal_output +4612,2739854,"TERMINAL",0,0,"5",,terminal_output +4613,2740704,"TERMINAL",0,0,"6",,terminal_output +4614,2741740,"TERMINAL",0,0,"7",,terminal_output +4615,2742773,"TERMINAL",0,0,"8",,terminal_output +4616,2743801,"TERMINAL",0,0,"9",,terminal_output +4617,2744788,"TERMINAL",0,0,"10",,terminal_output +4618,2745808,"TERMINAL",0,0,"1",,terminal_output +4619,2746833,"TERMINAL",0,0,"2",,terminal_output +4620,2747849,"TERMINAL",0,0,"3",,terminal_output +4621,2748865,"TERMINAL",0,0,"4",,terminal_output +4622,2749892,"TERMINAL",0,0,"5",,terminal_output +4623,2750912,"TERMINAL",0,0,"6",,terminal_output +4624,2751939,"TERMINAL",0,0,"7",,terminal_output +4625,2752965,"TERMINAL",0,0,"8",,terminal_output +4626,2753993,"TERMINAL",0,0,"9",,terminal_output +4627,2755027,"TERMINAL",0,0,"20",,terminal_output +4628,2756022,"TERMINAL",0,0,"1",,terminal_output +4629,2757094,"TERMINAL",0,0,"2",,terminal_output +4630,2758065,"TERMINAL",0,0,"3",,terminal_output +4631,2759115,"TERMINAL",0,0,"4",,terminal_output +4632,2760103,"TERMINAL",0,0,"5",,terminal_output +4633,2761129,"TERMINAL",0,0,"6",,terminal_output +4634,2762152,"TERMINAL",0,0,"7",,terminal_output +4635,2763182,"TERMINAL",0,0,"8",,terminal_output +4636,2764238,"TERMINAL",0,0,"9",,terminal_output +4637,2764468,"appendix_c_nodes.md",0,0,"\n\n noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise\n\n## Additive embeddings\n\n### no noise\nStep 200000, validation loss: 0.7587440609931946\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(0.758744008400861), 'val_entropy': np.float64(0.8734586110302046), 'val_masked_token_top16_accuracy': np.float64(0.9815394200530707), 'val_masked_token_top1_accuracy': np.float64(0.772024557870977), 'val_masked_token_top2_accuracy': np.float64(0.8777434533717585), 'val_masked_token_top5_accuracy': np.float64(0.9463883007273954), 'val_psnr': np.float64(18.16088799869313), 'val_select_logit': np.float64(15.902265399110084), 'val_select_p': np.float64(0.7548150642245424), 'val_ssim': np.float64(0.6326266468739977), 'val_total_loss': np.float64(0.758744008400861), 'val_z_loss': np.float64(310.9372079886642), 'val_loss': np.float32(0.75874406), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\n\n### noise std=0.1\nStep 200000, validation loss: 1.9375224113464355\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(1.9375225586049698), 'val_entropy': np.float64(2.236036305334054), 'val_masked_token_top16_accuracy': np.float64(0.8798762057341781), 'val_masked_token_top1_accuracy': np.float64(0.5657584152969659), 'val_masked_token_top2_accuracy': np.float64(0.689856609877418), 'val_masked_token_top5_accuracy': np.float64(0.798516615933063), 'val_psnr': np.float64(15.530764074886546), 'val_select_logit': np.float64(10.11456164191751), 'val_select_p': np.float64(0.501306425707013), 'val_ssim': np.float64(0.5097568672077328), 'val_total_loss': np.float64(1.9375225586049698), 'val_z_loss': np.float64(149.545166015625), 'val_loss': np.float32(1.9375224), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\n\n### noise std=0.2\nStep 200000, validation loss: 4.311364650726318\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(4.3113649312187645), 'val_entropy': np.float64(4.482920328776042), 'val_masked_token_top16_accuracy': np.float64(0.5900948678745943), 'val_masked_token_top1_accuracy': np.float64(0.24632565764819875), 'val_masked_token_top2_accuracy': np.float64(0.33755794225954544), 'val_masked_token_top5_accuracy': np.float64(0.4589417284610225), 'val_psnr': np.float64(12.573247198965035), 'val_select_logit': np.float64(6.313630515453863), 'val_select_p': np.float64(0.16665662007004606), 'val_ssim': np.float64(0.3333607175770928), 'val_total_loss': np.float64(4.3113649312187645), 'val_z_loss': np.float64(77.57380646350337), 'val_loss': np.float32(4.3113647), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\n\n### noise std=0.5\nStep 200000, validation loss: 6.960869312286377\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.960869602128571), 'val_entropy': np.float64(5.414477582071342), 'val_masked_token_top16_accuracy': np.float64(0.18385726447198905), 'val_masked_token_top1_accuracy': np.float64(0.0353936998940566), 'val_masked_token_top2_accuracy': np.float64(0.05846153243499644), 'val_masked_token_top5_accuracy': np.float64(0.10274799825514064), 'val_psnr': np.float64(10.914788994134641), 'val_select_logit': np.float64(5.507573454987769), 'val_select_p': np.float64(0.07029049901985655), 'val_ssim': np.float64(0.2510850712949154), 'val_total_loss': np.float64(6.960869602128571), 'val_z_loss': np.float64(69.57815611596202), 'val_loss': np.float32(6.9608693), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(20.045630099726658), 'val_full_frame_entropy': np.float64(1.0269374473422181), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.7251559460864347), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.38186273446270064), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.47075533224087135), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.5903074694614784), 'val_full_frame_psnr': np.float64(16.088213939292757), 'val_full_frame_select_logit': np.float64(84.10442965638404), 'val_full_frame_select_p': np.float64(0.7601301389582017), 'val_full_frame_ssim': np.float64(0.5480527725874209), 'val_full_frame_z_loss': np.float64(108920.47901348039), 'val_full_frame_loss': np.float32(20.045631)}\n\n## Concat",markdown,tab +4638,2765266,"TERMINAL",0,0,"30",,terminal_output +4639,2766299,"TERMINAL",0,0,"1",,terminal_output +4640,2767142,"appendix_c_nodes.md",7275,0,"",markdown,selection_mouse +4641,2767186,"appendix_c_nodes.md",7274,0,"",markdown,selection_command +4642,2767316,"TERMINAL",0,0,"2",,terminal_output +4643,2767909,"appendix_c_nodes.md",7275,0,"\n",markdown,content +4644,2768095,"appendix_c_nodes.md",7276,0,"\n",markdown,content +4645,2768329,"TERMINAL",0,0,"3",,terminal_output +4646,2768653,"appendix_c_nodes.md",7277,0,"Step 200000, validation loss: 0.7374703884124756\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(0.7374703171206456), 'val_entropy': np.float64(0.8575162127906201), 'val_masked_token_top16_accuracy': np.float64(0.9825415529456794), 'val_masked_token_top1_accuracy': np.float64(0.776269306154812), 'val_masked_token_top2_accuracy': np.float64(0.8820458533717137), 'val_masked_token_top5_accuracy': np.float64(0.949582414299834), 'val_psnr': np.float64(22.455391640756645), 'val_select_logit': np.float64(15.983110203462488), 'val_select_p': np.float64(0.7558641293469597), 'val_ssim': np.float64(0.6962924330842262), 'val_total_loss': np.float64(0.7374703171206456), 'val_z_loss': np.float64(313.3120548023897), 'val_loss': np.float32(0.7374704), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}",markdown,content +4647,2769389,"TERMINAL",0,0,"4",,terminal_output +4648,2770367,"TERMINAL",0,0,"5",,terminal_output +4649,2770408,"appendix_c_nodes.md",9015,0,"",markdown,selection_command +4650,2771424,"TERMINAL",0,0,"6",,terminal_output +4651,2771645,"appendix_c_nodes.md",195,0,"",markdown,selection_mouse +4652,2771646,"appendix_c_nodes.md",194,0,"",markdown,selection_command +4653,2772449,"appendix_c_nodes.md",164,31," vid_embed_BTNM += noise",markdown,selection_command +4654,2772453,"TERMINAL",0,0,"7",,terminal_output +4655,2772700,"appendix_c_nodes.md",74,121," noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise",markdown,selection_command +4656,2772985,"appendix_c_nodes.md",2,193," noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise",markdown,selection_command +4657,2773433,"TERMINAL",0,0,"8",,terminal_output +4658,2774123,"appendix_c_nodes.md",2,194,"",markdown,content +4659,2774484,"TERMINAL",0,0,"9",,terminal_output +4660,2775472,"jasmine/train_dynamics_appendix-c.py",0,0,"",python,tab +4661,2775571,"TERMINAL",0,0,"40",,terminal_output +4662,2776509,"TERMINAL",0,0,"2",,terminal_output +4663,2776905,"jasmine/models/dynamics.py",0,0,"",python,tab +4664,2777561,"TERMINAL",0,0,"3",,terminal_output +4665,2778555,"TERMINAL",0,0,"4",,terminal_output +4666,2779612,"TERMINAL",0,0,"5",,terminal_output +4667,2780586,"TERMINAL",0,0,"6",,terminal_output +4668,2781636,"TERMINAL",0,0,"7",,terminal_output +4669,2782712,"TERMINAL",0,0,"8",,terminal_output +4670,2783690,"TERMINAL",0,0,"9",,terminal_output +4671,2784727,"TERMINAL",0,0,"50",,terminal_output +4672,2785792,"TERMINAL",0,0,"1",,terminal_output +4673,2786428,"jasmine/models/dynamics.py",3691,0,"",python,selection_mouse +4674,2786789,"TERMINAL",0,0,"2",,terminal_output +4675,2787760,"TERMINAL",0,0,"3",,terminal_output +4676,2788014,"jasmine/models/dynamics.py",3927,0,"",python,selection_mouse +4677,2788788,"TERMINAL",0,0,"4",,terminal_output +4678,2788808,"jasmine/models/dynamics.py",3927,0,"\n noise_stddev = 0.5 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise",python,content +4679,2788833,"jasmine/models/dynamics.py",3936,0,"",python,selection_command +4680,2789586,"jasmine/models/dynamics.py",4008,0,"",python,selection_command +4681,2789704,"jasmine/models/dynamics.py",4098,0,"",python,selection_command +4682,2789864,"TERMINAL",0,0,"5",,terminal_output +4683,2790718,"jasmine/models/dynamics.py",4121,0,"\n ",python,content +4684,2790833,"TERMINAL",0,0,"6",,terminal_output +4685,2791047,"jasmine/models/dynamics.py",4122,8,"",python,content +4686,2791850,"TERMINAL",0,0,"7",,terminal_output +4687,2792120,"jasmine/models/dynamics.py",3953,0,"",python,selection_mouse +4688,2792877,"TERMINAL",0,0,"8",,terminal_output +4689,2793146,"jasmine/models/dynamics.py",3953,1,"1",python,content +4690,2793934,"TERMINAL",0,0,"9",,terminal_output +4691,2794923,"TERMINAL",0,0,"4:00",,terminal_output +4692,2794978,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +4693,2795951,"TERMINAL",0,0,"1",,terminal_output +4694,2797011,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\n# noise branch\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +4695,2797012,"TERMINAL",0,0,"2",,terminal_output +4696,2797101,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +4697,2797244,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4698,2798008,"TERMINAL",0,0,"3",,terminal_output +4699,2798194,"TERMINAL",0,0,"watch",,terminal_focus +4700,2798860,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +4701,2800867,"TERMINAL",0,0,"queue",,terminal_command +4702,2800952,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Wed Oct 8 23:24:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549097 accelerat tokenize tum_cte0 R 1-12:40:53\t 1 hkn06323554157 dev_accel interact tum_cte0 R45:22\t 1 hkn0401",,terminal_output +4703,2801989,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +4704,2803218,"TERMINAL",0,0,"watch -n1 git rev-parse --abbrev-ref HEAD",,terminal_command +4705,2803294,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: git rev-parse --abbrev-ref HEADhkn1993.localdomain: Wed Oct 8 23:24:08 2025add-noise-to-combat-exposure-bias",,terminal_output +4706,2804320,"TERMINAL",0,0,"9",,terminal_output +4707,2805339,"TERMINAL",0,0,"10",,terminal_output +4708,2806358,"TERMINAL",0,0,"1",,terminal_output +4709,2807412,"TERMINAL",0,0,"2",,terminal_output +4710,2807937,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +4711,2808412,"TERMINAL",0,0,"3",,terminal_output +4712,2809433,"TERMINAL",0,0,"4",,terminal_output +4713,2810486,"TERMINAL",0,0,"5",,terminal_output +4714,2810555,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4715,2811486,"TERMINAL",0,0,"6",,terminal_output +4716,2812582,"TERMINAL",0,0,"8",,terminal_output +4717,2813521,"TERMINAL",0,0,"9",,terminal_output +4718,2814649,"TERMINAL",0,0,"20",,terminal_output +4719,2815614,"TERMINAL",0,0,"1",,terminal_output +4720,2816606,"TERMINAL",0,0,"2",,terminal_output +4721,2817814,"TERMINAL",0,0,"3",,terminal_output +4722,2818547,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26561024, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78691136}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +4723,2818696,"TERMINAL",0,0,"4",,terminal_output +4724,2819706,"TERMINAL",0,0,"5",,terminal_output +4725,2820709,"TERMINAL",0,0,"6",,terminal_output +4726,2821711,"TERMINAL",0,0,"7",,terminal_output +4727,2822730,"TERMINAL",0,0,"8",,terminal_output +4728,2823371,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 639, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 626, in main\r\n calculate_validation_metrics(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 569, in calculate_validation_metrics\r\n val_outputs = val_step(genie, batch)\r\n ^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/transforms/compilation.py"", line 431, in __call__\r\n pure_args_out, pure_kwargs_out, pure_out = self.jitted_fn(\r\n ^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/transforms/compilation.py"", line 126, in __call__\r\n out = self.f(*args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 509, in val_step\r\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics_appendix-c.py"", line 495, in dynamics_loss_fn\r\n outputs = model(inputs)\r\n ^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py"", line 196, in __call__\r\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\r\n ^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/models/dynamics.py"", line 122, in __call__\r\n noise = jax.random.normal(batch[""mask_rng""], vid_embed_BTNM.shape) * noise_stddev\r\n ~~~~~^^^^^^^^^^^^\r\nKeyError: 'mask_rng'\r\n--------------------\r\nFor simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n",,terminal_output +4729,2823835,"TERMINAL",0,0,"9",,terminal_output +4730,2824901,"TERMINAL",0,0,"30",,terminal_output +4731,2825878,"TERMINAL",0,0,"1",,terminal_output +4732,2826940,"TERMINAL",0,0,"2",,terminal_output +4733,2828034,"TERMINAL",0,0,"3",,terminal_output +4734,2828939,"TERMINAL",0,0,"4",,terminal_output +4735,2829845,"TERMINAL",0,0,"W1008 23:24:35.244876 1295304 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +4736,2829979,"TERMINAL",0,0,"5",,terminal_output +4737,2830168,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 21 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +4738,2830474,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4739,2831033,"TERMINAL",0,0,"6",,terminal_output +4740,2832050,"TERMINAL",0,0,"7",,terminal_output +4741,2833090,"TERMINAL",0,0,"8",,terminal_output +4742,2834122,"TERMINAL",0,0,"9",,terminal_output +4743,2835164,"TERMINAL",0,0,"40",,terminal_output +4744,2836122,"TERMINAL",0,0,"1",,terminal_output +4745,2837174,"TERMINAL",0,0,"2",,terminal_output +4746,2838184,"TERMINAL",0,0,"3",,terminal_output +4747,2838597,"jasmine/models/dynamics.py",0,0,"",python,tab +4748,2839217,"TERMINAL",0,0,"4",,terminal_output +4749,2840141,"TERMINAL",0,0,"5",,terminal_output +4750,2841229,"TERMINAL",0,0,"6",,terminal_output +4751,2841544,"jasmine/models/dynamics.py",4045,0,"",python,selection_mouse +4752,2842153,"TERMINAL",0,0,"7",,terminal_output +4753,2843242,"TERMINAL",0,0,"8",,terminal_output +4754,2843522,"jasmine/models/dynamics.py",4046,0,"",python,selection_command +4755,2844088,"jasmine/models/dynamics.py",4045,1,"",python,content +4756,2844266,"jasmine/models/dynamics.py",4044,1,"",python,content +4757,2844269,"TERMINAL",0,0,"9",,terminal_output +4758,2844440,"jasmine/models/dynamics.py",4043,1,"",python,content +4759,2844536,"jasmine/models/dynamics.py",4042,1,"",python,content +4760,2844667,"jasmine/models/dynamics.py",4041,1,"",python,content +4761,2845282,"TERMINAL",0,0,"50",,terminal_output +4762,2845738,"TERMINAL",0,0,"srun",,terminal_focus +4763,2846311,"TERMINAL",0,0,"1",,terminal_output +4764,2846803,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +4765,2847227,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\n# noise branch\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +4766,2847321,"TERMINAL",0,0,"2",,terminal_output +4767,2847390,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +4768,2847496,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4769,2848351,"TERMINAL",0,0,"3",,terminal_output +4770,2849429,"TERMINAL",0,0,"4",,terminal_output +4771,2850486,"TERMINAL",0,0,"5",,terminal_output +4772,2851449,"TERMINAL",0,0,"6",,terminal_output +4773,2852489,"TERMINAL",0,0,"7",,terminal_output +4774,2853525,"TERMINAL",0,0,"8",,terminal_output +4775,2854407,"TERMINAL",0,0,"9",,terminal_output +4776,2855211,"jasmine/models/dynamics.py",0,0,"",python,tab +4777,2855506,"TERMINAL",0,0,"5:00",,terminal_output +4778,2856636,"TERMINAL",0,0,"1",,terminal_output +4779,2857731,"TERMINAL",0,0,"2",,terminal_output +4780,2858043,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +4781,2858635,"TERMINAL",0,0,"3",,terminal_output +4782,2859809,"TERMINAL",0,0,"5",,terminal_output +4783,2860736,"TERMINAL",0,0,"6",,terminal_output +4784,2861039,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4785,2861607,"TERMINAL",0,0,"7",,terminal_output +4786,2862650,"TERMINAL",0,0,"8",,terminal_output +4787,2863585,"TERMINAL",0,0,"9",,terminal_output +4788,2864605,"TERMINAL",0,0,"10",,terminal_output +4789,2865626,"TERMINAL",0,0,"1",,terminal_output +4790,2866650,"TERMINAL",0,0,"2",,terminal_output +4791,2867674,"TERMINAL",0,0,"3",,terminal_output +4792,2868702,"TERMINAL",0,0,"4",,terminal_output +4793,2868888,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26561024, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78691136}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +4794,2869725,"TERMINAL",0,0,"5",,terminal_output +4795,2870732,"TERMINAL",0,0,"6",,terminal_output +4796,2871760,"TERMINAL",0,0,"7",,terminal_output +4797,2872779,"TERMINAL",0,0,"8",,terminal_output +4798,2873802,"TERMINAL",0,0,"9",,terminal_output +4799,2874857,"TERMINAL",0,0,"20",,terminal_output +4800,2875888,"TERMINAL",0,0,"1",,terminal_output +4801,2876853,"TERMINAL",0,0,"2",,terminal_output +4802,2877872,"TERMINAL",0,0,"3",,terminal_output +4803,2878892,"TERMINAL",0,0,"4",,terminal_output +4804,2879920,"TERMINAL",0,0,"5",,terminal_output +4805,2880942,"TERMINAL",0,0,"6",,terminal_output +4806,2881969,"TERMINAL",0,0,"7",,terminal_output +4807,2882978,"TERMINAL",0,0,"8",,terminal_output +4808,2884009,"TERMINAL",0,0,"9",,terminal_output +4809,2885051,"TERMINAL",0,0,"30",,terminal_output +4810,2886077,"TERMINAL",0,0,"1",,terminal_output +4811,2887109,"TERMINAL",0,0,"2",,terminal_output +4812,2888144,"TERMINAL",0,0,"3",,terminal_output +4813,2889155,"TERMINAL",0,0,"4",,terminal_output +4814,2890188,"TERMINAL",0,0,"5",,terminal_output +4815,2891200,"TERMINAL",0,0,"6",,terminal_output +4816,2892219,"TERMINAL",0,0,"7",,terminal_output +4817,2893245,"TERMINAL",0,0,"8",,terminal_output +4818,2894271,"TERMINAL",0,0,"9",,terminal_output +4819,2895288,"TERMINAL",0,0,"40",,terminal_output +4820,2896304,"TERMINAL",0,0,"1",,terminal_output +4821,2897328,"TERMINAL",0,0,"2",,terminal_output +4822,2898348,"TERMINAL",0,0,"3",,terminal_output +4823,2899382,"TERMINAL",0,0,"4",,terminal_output +4824,2900398,"TERMINAL",0,0,"5",,terminal_output +4825,2901422,"TERMINAL",0,0,"6",,terminal_output +4826,2902489,"TERMINAL",0,0,"7",,terminal_output +4827,2903484,"TERMINAL",0,0,"8",,terminal_output +4828,2904822,"TERMINAL",0,0,"9",,terminal_output +4829,2905622,"TERMINAL",0,0,"50",,terminal_output +4830,2906833,"TERMINAL",0,0,"2",,terminal_output +4831,2907740,"TERMINAL",0,0,"3",,terminal_output +4832,2908680,"TERMINAL",0,0,"4",,terminal_output +4833,2909606,"TERMINAL",0,0,"5",,terminal_output +4834,2910666,"TERMINAL",0,0,"6",,terminal_output +4835,2911657,"TERMINAL",0,0,"7",,terminal_output +4836,2912663,"TERMINAL",0,0,"8",,terminal_output +4837,2913683,"TERMINAL",0,0,"9",,terminal_output +4838,2914702,"TERMINAL",0,0,"6:00",,terminal_output +4839,2915738,"TERMINAL",0,0,"1",,terminal_output +4840,2916747,"TERMINAL",0,0,"2",,terminal_output +4841,2917768,"TERMINAL",0,0,"3",,terminal_output +4842,2918793,"TERMINAL",0,0,"4",,terminal_output +4843,2919811,"TERMINAL",0,0,"5",,terminal_output +4844,2920829,"TERMINAL",0,0,"6",,terminal_output +4845,2921858,"TERMINAL",0,0,"7",,terminal_output +4846,2922877,"TERMINAL",0,0,"8",,terminal_output +4847,2923996,"TERMINAL",0,0,"9",,terminal_output +4848,2924925,"TERMINAL",0,0,"10",,terminal_output +4849,2926045,"TERMINAL",0,0,"1",,terminal_output +4850,2926974,"TERMINAL",0,0,"2",,terminal_output +4851,2927991,"TERMINAL",0,0,"3",,terminal_output +4852,2929006,"TERMINAL",0,0,"4",,terminal_output +4853,2930036,"TERMINAL",0,0,"5",,terminal_output +4854,2931060,"TERMINAL",0,0,"6",,terminal_output +4855,2932086,"TERMINAL",0,0,"7",,terminal_output +4856,2933107,"TERMINAL",0,0,"8",,terminal_output +4857,2934135,"TERMINAL",0,0,"9",,terminal_output +4858,2935168,"TERMINAL",0,0,"20",,terminal_output +4859,2936144,"TERMINAL",0,0,"1",,terminal_output +4860,2937171,"TERMINAL",0,0,"2",,terminal_output +4861,2938189,"TERMINAL",0,0,"3",,terminal_output +4862,2939250,"TERMINAL",0,0,"4",,terminal_output +4863,2940291,"TERMINAL",0,0,"5",,terminal_output +4864,2941297,"TERMINAL",0,0,"6",,terminal_output +4865,2942302,"TERMINAL",0,0,"7",,terminal_output +4866,2943341,"TERMINAL",0,0,"8",,terminal_output +4867,2944384,"TERMINAL",0,0,"9",,terminal_output +4868,2945419,"TERMINAL",0,0,"30",,terminal_output +4869,2946413,"TERMINAL",0,0,"1",,terminal_output +4870,2947444,"TERMINAL",0,0,"2",,terminal_output +4871,2948439,"TERMINAL",0,0,"3",,terminal_output +4872,2949462,"TERMINAL",0,0,"4",,terminal_output +4873,2950525,"TERMINAL",0,0,"5",,terminal_output +4874,2951540,"TERMINAL",0,0,"7",,terminal_output +4875,2952538,"TERMINAL",0,0,"8",,terminal_output +4876,2953622,"TERMINAL",0,0,"9",,terminal_output +4877,2954567,"TERMINAL",0,0,"40",,terminal_output +4878,2955583,"TERMINAL",0,0,"1",,terminal_output +4879,2956712,"TERMINAL",0,0,"2",,terminal_output +4880,2957625,"TERMINAL",0,0,"3",,terminal_output +4881,2958644,"TERMINAL",0,0,"4",,terminal_output +4882,2959670,"TERMINAL",0,0,"5",,terminal_output +4883,2960700,"TERMINAL",0,0,"6",,terminal_output +4884,2961718,"TERMINAL",0,0,"7",,terminal_output +4885,2962737,"TERMINAL",0,0,"8",,terminal_output +4886,2963758,"TERMINAL",0,0,"9",,terminal_output +4887,2964823,"TERMINAL",0,0,"50",,terminal_output +4888,2965811,"TERMINAL",0,0,"1",,terminal_output +4889,2966819,"TERMINAL",0,0,"2",,terminal_output +4890,2967839,"TERMINAL",0,0,"3",,terminal_output +4891,2968727,"TERMINAL",0,0,"Step 200000, validation loss: 3.6271512508392334\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(3.6271509796965358), 'val_entropy': np.float64(4.294355560751522), 'val_masked_token_top16_accuracy': np.float64(0.7079328228445614), 'val_masked_token_top1_accuracy': np.float64(0.29852578511425093), 'val_masked_token_top2_accuracy': np.float64(0.41297777552230686), 'val_masked_token_top5_accuracy': np.float64(0.5610486105376599), 'val_psnr': np.float64(14.66877135108499), 'val_select_logit': np.float64(5.881039871889002), 'val_select_p': np.float64(0.18452119885706433), 'val_ssim': np.float64(0.4027574781109305), 'val_total_loss': np.float64(3.6271509796965358), 'val_z_loss': np.float64(67.83754580628639), 'val_loss': np.float32(3.6271513), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}\r\n",,terminal_output +4892,2968869,"TERMINAL",0,0,"4",,terminal_output +4893,2969883,"TERMINAL",0,0,"5",,terminal_output +4894,2970905,"TERMINAL",0,0,"6",,terminal_output +4895,2971920,"TERMINAL",0,0,"7",,terminal_output +4896,2972941,"TERMINAL",0,0,"8",,terminal_output +4897,2973418,"appendix_c_nodes.md",0,0,"",markdown,tab +4898,2974011,"TERMINAL",0,0,"9",,terminal_output +4899,2974100,"TERMINAL",0,0,"W1008 23:26:59.626095 1296569 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +4900,2974495,"TERMINAL",0,0,"/usr/lib64/python3.12/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 6 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +4901,2974794,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4902,2975023,"TERMINAL",0,0,"7:00",,terminal_output +4903,2976007,"TERMINAL",0,0,"1",,terminal_output +4904,2977083,"TERMINAL",0,0,"2",,terminal_output +4905,2978053,"TERMINAL",0,0,"3",,terminal_output +4906,2979157,"TERMINAL",0,0,"4",,terminal_output +4907,2980091,"TERMINAL",0,0,"5",,terminal_output +4908,2981162,"jasmine/models/dynamics.py",0,0,"",python,tab +4909,2981222,"TERMINAL",0,0,"6",,terminal_output +4910,2982131,"TERMINAL",0,0,"7",,terminal_output +4911,2983179,"TERMINAL",0,0,"8",,terminal_output +4912,2984177,"TERMINAL",0,0,"9",,terminal_output +4913,2985229,"TERMINAL",0,0,"10",,terminal_output +4914,2986271,"TERMINAL",0,0,"1",,terminal_output +4915,2987290,"TERMINAL",0,0,"2",,terminal_output +4916,2988359,"TERMINAL",0,0,"3",,terminal_output +4917,2989437,"TERMINAL",0,0,"4",,terminal_output +4918,2990348,"TERMINAL",0,0,"5",,terminal_output +4919,2991368,"TERMINAL",0,0,"6",,terminal_output +4920,2992389,"TERMINAL",0,0,"7",,terminal_output +4921,2993434,"TERMINAL",0,0,"8",,terminal_output +4922,2994425,"TERMINAL",0,0,"9",,terminal_output +4923,2995491,"TERMINAL",0,0,"20",,terminal_output +4924,2996487,"TERMINAL",0,0,"1",,terminal_output +4925,2997505,"TERMINAL",0,0,"2",,terminal_output +4926,2998514,"TERMINAL",0,0,"4",,terminal_output +4927,2999581,"TERMINAL",0,0,"5",,terminal_output +4928,3000601,"TERMINAL",0,0,"6",,terminal_output +4929,3001582,"TERMINAL",0,0,"7",,terminal_output +4930,3002594,"TERMINAL",0,0,"8",,terminal_output +4931,3003615,"TERMINAL",0,0,"9",,terminal_output +4932,3004696,"TERMINAL",0,0,"30",,terminal_output +4933,3005718,"TERMINAL",0,0,"1",,terminal_output +4934,3006747,"TERMINAL",0,0,"2",,terminal_output +4935,3007776,"TERMINAL",0,0,"3",,terminal_output +4936,3008730,"TERMINAL",0,0,"4",,terminal_output +4937,3009737,"TERMINAL",0,0,"5",,terminal_output +4938,3010762,"TERMINAL",0,0,"6",,terminal_output +4939,3012129,"TERMINAL",0,0,"7",,terminal_output +4940,3012902,"TERMINAL",0,0,"8",,terminal_output +4941,3013906,"TERMINAL",0,0,"9",,terminal_output +4942,3014839,"TERMINAL",0,0,"40",,terminal_output +4943,3016074,"TERMINAL",0,0,"1",,terminal_output +4944,3017559,"TERMINAL",0,0,"2",,terminal_output +4945,3018342,"TERMINAL",0,0,"3",,terminal_output +4946,3019242,"TERMINAL",0,0,"4",,terminal_output +4947,3020168,"TERMINAL",0,0,"5",,terminal_output +4948,3021383,"TERMINAL",0,0,"6",,terminal_output +4949,3022160,"TERMINAL",0,0,"7",,terminal_output +4950,3023046,"TERMINAL",0,0,"8",,terminal_output +4951,3024051,"TERMINAL",0,0,"9",,terminal_output +4952,3025048,"TERMINAL",0,0,"50",,terminal_output +4953,3026104,"TERMINAL",0,0,"1",,terminal_output +4954,3027108,"TERMINAL",0,0,"2",,terminal_output +4955,3028194,"TERMINAL",0,0,"3",,terminal_output +4956,3029224,"TERMINAL",0,0,"4",,terminal_output +4957,3030226,"TERMINAL",0,0,"5",,terminal_output +4958,3031199,"TERMINAL",0,0,"6",,terminal_output +4959,3032193,"TERMINAL",0,0,"7",,terminal_output +4960,3033214,"TERMINAL",0,0,"8",,terminal_output +4961,3034238,"TERMINAL",0,0,"9",,terminal_output +4962,3035368,"TERMINAL",0,0,"8:00",,terminal_output +4963,3036338,"TERMINAL",0,0,"1",,terminal_output +4964,3037371,"TERMINAL",0,0,"2",,terminal_output +4965,3038382,"TERMINAL",0,0,"3",,terminal_output +4966,3039484,"TERMINAL",0,0,"4",,terminal_output +4967,3040493,"TERMINAL",0,0,"5",,terminal_output +4968,3041456,"TERMINAL",0,0,"6",,terminal_output +4969,3042525,"TERMINAL",0,0,"7",,terminal_output +4970,3043481,"TERMINAL",0,0,"8",,terminal_output +4971,3044501,"TERMINAL",0,0,"10",,terminal_output +4972,3045555,"TERMINAL",0,0,"1",,terminal_output +4973,3046538,"TERMINAL",0,0,"2",,terminal_output +4974,3047618,"TERMINAL",0,0,"3",,terminal_output +4975,3048602,"TERMINAL",0,0,"4",,terminal_output +4976,3049638,"TERMINAL",0,0,"5",,terminal_output +4977,3050681,"TERMINAL",0,0,"6",,terminal_output +4978,3051579,"appendix_c_nodes.md",0,0,"",markdown,tab +4979,3051684,"TERMINAL",0,0,"7",,terminal_output +4980,3052711,"TERMINAL",0,0,"8",,terminal_output +4981,3053708,"appendix_c_nodes.md",8822,0,"",markdown,selection_mouse +4982,3053710,"appendix_c_nodes.md",8821,0,"",markdown,selection_command +4983,3053732,"TERMINAL",0,0,"9",,terminal_output +4984,3054737,"TERMINAL",0,0,"20",,terminal_output +4985,3054808,"appendix_c_nodes.md",8822,0,"\n",markdown,content +4986,3055005,"appendix_c_nodes.md",8823,0,"\n",markdown,content +4987,3055159,"appendix_c_nodes.md",8824,0,"\n",markdown,content +4988,3055754,"TERMINAL",0,0,"1",,terminal_output +4989,3055795,"appendix_c_nodes.md",8825,0,"#",markdown,content +4990,3055795,"appendix_c_nodes.md",8826,0,"",markdown,selection_keyboard +4991,3055932,"appendix_c_nodes.md",8826,0,"#",markdown,content +4992,3055932,"appendix_c_nodes.md",8827,0,"",markdown,selection_keyboard +4993,3056075,"appendix_c_nodes.md",8827,0," ",markdown,content +4994,3056076,"appendix_c_nodes.md",8828,0,"",markdown,selection_keyboard +4995,3056308,"appendix_c_nodes.md",8828,0,"j",markdown,content +4996,3056308,"appendix_c_nodes.md",8829,0,"",markdown,selection_keyboard +4997,3056667,"appendix_c_nodes.md",8829,0,"o",markdown,content +4998,3056667,"appendix_c_nodes.md",8830,0,"",markdown,selection_keyboard +4999,3056826,"appendix_c_nodes.md",8830,0,"i",markdown,content +5000,3056826,"appendix_c_nodes.md",8831,0,"",markdown,selection_keyboard +5001,3056827,"TERMINAL",0,0,"2",,terminal_output +5002,3057080,"appendix_c_nodes.md",8830,1,"",markdown,content +5003,3057247,"appendix_c_nodes.md",8829,1,"",markdown,content +5004,3057387,"appendix_c_nodes.md",8828,1,"",markdown,content +5005,3057795,"TERMINAL",0,0,"3",,terminal_output +5006,3058238,"appendix_c_nodes.md",8828,0,"n",markdown,content +5007,3058238,"appendix_c_nodes.md",8829,0,"",markdown,selection_keyboard +5008,3058447,"appendix_c_nodes.md",8829,0,"o",markdown,content +5009,3058447,"appendix_c_nodes.md",8830,0,"",markdown,selection_keyboard +5010,3058488,"appendix_c_nodes.md",8830,0,"i",markdown,content +5011,3058488,"appendix_c_nodes.md",8831,0,"",markdown,selection_keyboard +5012,3058550,"appendix_c_nodes.md",8831,0,"s",markdown,content +5013,3058550,"appendix_c_nodes.md",8832,0,"",markdown,selection_keyboard +5014,3058750,"appendix_c_nodes.md",8832,0,"e",markdown,content +5015,3058751,"appendix_c_nodes.md",8833,0,"",markdown,selection_keyboard +5016,3058841,"appendix_c_nodes.md",8833,0," ",markdown,content +5017,3058842,"appendix_c_nodes.md",8834,0,"",markdown,selection_keyboard +5018,3058842,"TERMINAL",0,0,"4",,terminal_output +5019,3059031,"appendix_c_nodes.md",8834,0,"s",markdown,content +5020,3059032,"appendix_c_nodes.md",8835,0,"",markdown,selection_keyboard +5021,3059536,"appendix_c_nodes.md",8835,0,"t",markdown,content +5022,3059537,"appendix_c_nodes.md",8836,0,"",markdown,selection_keyboard +5023,3059782,"appendix_c_nodes.md",8836,0,"d",markdown,content +5024,3059783,"appendix_c_nodes.md",8837,0,"",markdown,selection_keyboard +5025,3059902,"appendix_c_nodes.md",8837,0," ",markdown,content +5026,3059903,"appendix_c_nodes.md",8838,0,"",markdown,selection_keyboard +5027,3059903,"TERMINAL",0,0,"5",,terminal_output +5028,3060855,"TERMINAL",0,0,"6",,terminal_output +5029,3061423,"appendix_c_nodes.md",8838,0,"0",markdown,content +5030,3061425,"appendix_c_nodes.md",8839,0,"",markdown,selection_keyboard +5031,3061595,"appendix_c_nodes.md",8839,0,".",markdown,content +5032,3061596,"appendix_c_nodes.md",8840,0,"",markdown,selection_keyboard +5033,3061906,"appendix_c_nodes.md",8840,0,"1",markdown,content +5034,3061906,"appendix_c_nodes.md",8841,0,"",markdown,selection_keyboard +5035,3062009,"TERMINAL",0,0,"7",,terminal_output +5036,3062068,"appendix_c_nodes.md",8841,0,"\n",markdown,content +5037,3062709,"appendix_c_nodes.md",8842,0,"Step 200000, validation loss: 3.6271512508392334\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(3.6271509796965358), 'val_entropy': np.float64(4.294355560751522), 'val_masked_token_top16_accuracy': np.float64(0.7079328228445614), 'val_masked_token_top1_accuracy': np.float64(0.29852578511425093), 'val_masked_token_top2_accuracy': np.float64(0.41297777552230686), 'val_masked_token_top5_accuracy': np.float64(0.5610486105376599), 'val_psnr': np.float64(14.66877135108499), 'val_select_logit': np.float64(5.881039871889002), 'val_select_p': np.float64(0.18452119885706433), 'val_ssim': np.float64(0.4027574781109305), 'val_total_loss': np.float64(3.6271509796965358), 'val_z_loss': np.float64(67.83754580628639), 'val_loss': np.float32(3.6271513), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}",markdown,content +5038,3063054,"TERMINAL",0,0,"8",,terminal_output +5039,3063978,"TERMINAL",0,0,"9",,terminal_output +5040,3064966,"TERMINAL",0,0,"30",,terminal_output +5041,3065990,"TERMINAL",0,0,"1",,terminal_output +5042,3067009,"TERMINAL",0,0,"2",,terminal_output +5043,3068025,"TERMINAL",0,0,"3",,terminal_output +5044,3069033,"TERMINAL",0,0,"4",,terminal_output +5045,3069292,"jasmine/models/dynamics.py",0,0,"",python,tab +5046,3070053,"TERMINAL",0,0,"5",,terminal_output +5047,3071094,"TERMINAL",0,0,"6",,terminal_output +5048,3072112,"TERMINAL",0,0,"7",,terminal_output +5049,3073123,"TERMINAL",0,0,"8",,terminal_output +5050,3074159,"TERMINAL",0,0,"9",,terminal_output +5051,3074998,"jasmine/models/dynamics.py",3954,0,"",python,selection_mouse +5052,3075186,"TERMINAL",0,0,"40",,terminal_output +5053,3075962,"jasmine/models/dynamics.py",3953,1,"",python,content +5054,3076224,"TERMINAL",0,0,"1",,terminal_output +5055,3076439,"jasmine/models/dynamics.py",3953,0,"2",python,content +5056,3076440,"jasmine/models/dynamics.py",3954,0,"",python,selection_keyboard +5057,3077208,"TERMINAL",0,0,"2",,terminal_output +5058,3078233,"TERMINAL",0,0,"3",,terminal_output +5059,3079150,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/ablations/appendix-c.sh",,terminal_output +5060,3079256,"TERMINAL",0,0,"4",,terminal_output +5061,3080295,"TERMINAL",0,0,"5",,terminal_output +5062,3080417,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\r\n\r\n# main\r\n# CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3533426\r\n\r\n# noise branch\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466\r\n\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics_appendix-c.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --patch_size=16 \\r\n --max_lr=3e-5 \\r\n --no-log \\r\n --no-eval-full-frame \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --eval_full_frame \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \r\n",,terminal_output +5063,3080559,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1279162\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759955924\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759959524\r\nSLURM_PMI2_SRUN_PORT=34473\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3554157\r\nSLURM_PTY_PORT=43479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=35\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=181\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33897\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3554157\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33897\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +5064,3080671,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +5065,3081324,"TERMINAL",0,0,"6",,terminal_output +5066,3082461,"TERMINAL",0,0,"7",,terminal_output +5067,3083342,"TERMINAL",0,0,"8",,terminal_output +5068,3084389,"TERMINAL",0,0,"9",,terminal_output +5069,3085385,"TERMINAL",0,0,"50",,terminal_output +5070,3086407,"TERMINAL",0,0,"1",,terminal_output +5071,3087587,"TERMINAL",0,0,"2",,terminal_output +5072,3088580,"TERMINAL",0,0,"3",,terminal_output +5073,3089509,"TERMINAL",0,0,"4",,terminal_output +5074,3090676,"TERMINAL",0,0,"5",,terminal_output +5075,3091621,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/040000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/200000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/080000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/120000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/140000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/180000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/060000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/198000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/199000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/100000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/160000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/train_dynamics_maskgit/3532466/020000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +5076,3091624,"TERMINAL",0,0,"7",,terminal_output +5077,3092563,"TERMINAL",0,0,"8",,terminal_output +5078,3093549,"TERMINAL",0,0,"9",,terminal_output +5079,3094455,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5080,3094578,"TERMINAL",0,0,"9:00",,terminal_output +5081,3095839,"TERMINAL",0,0,"1",,terminal_output +5082,3096828,"TERMINAL",0,0,"2",,terminal_output +5083,3097728,"TERMINAL",0,0,"3",,terminal_output +5084,3098660,"TERMINAL",0,0,"4",,terminal_output +5085,3099674,"TERMINAL",0,0,"5",,terminal_output +5086,3100702,"TERMINAL",0,0,"6",,terminal_output +5087,3101718,"TERMINAL",0,0,"7",,terminal_output +5088,3102085,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26561024, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78691136}\r\nRestored dataloader and model state from step 200000\r\nStarting training from step 200000...\r\nCalculating validation metrics...\r\n",,terminal_output +5089,3102740,"TERMINAL",0,0,"8",,terminal_output +5090,3103797,"TERMINAL",0,0,"9",,terminal_output +5091,3104822,"TERMINAL",0,0,"10",,terminal_output +5092,3105846,"TERMINAL",0,0,"1",,terminal_output +5093,3106874,"TERMINAL",0,0,"2",,terminal_output +5094,3107883,"TERMINAL",0,0,"3",,terminal_output +5095,3108903,"TERMINAL",0,0,"4",,terminal_output +5096,3109916,"TERMINAL",0,0,"5",,terminal_output +5097,3110954,"TERMINAL",0,0,"6",,terminal_output +5098,3112039,"TERMINAL",0,0,"7",,terminal_output +5099,3113019,"TERMINAL",0,0,"8",,terminal_output +5100,3114008,"TERMINAL",0,0,"9",,terminal_output +5101,3115023,"TERMINAL",0,0,"20",,terminal_output +5102,3116045,"TERMINAL",0,0,"1",,terminal_output +5103,3117093,"TERMINAL",0,0,"2",,terminal_output +5104,3118084,"TERMINAL",0,0,"3",,terminal_output +5105,3119108,"TERMINAL",0,0,"4",,terminal_output +5106,3120187,"TERMINAL",0,0,"5",,terminal_output +5107,3121223,"TERMINAL",0,0,"6",,terminal_output +5108,3122177,"TERMINAL",0,0,"7",,terminal_output +5109,3123192,"TERMINAL",0,0,"8",,terminal_output +5110,3124210,"TERMINAL",0,0,"9",,terminal_output +5111,3125232,"TERMINAL",0,0,"30",,terminal_output +5112,3126250,"TERMINAL",0,0,"1",,terminal_output +5113,3127266,"TERMINAL",0,0,"2",,terminal_output +5114,3128301,"TERMINAL",0,0,"3",,terminal_output +5115,3129326,"TERMINAL",0,0,"4",,terminal_output +5116,3130352,"TERMINAL",0,0,"5",,terminal_output +5117,3131437,"TERMINAL",0,0,"6",,terminal_output +5118,3132372,"TERMINAL",0,0,"7",,terminal_output +5119,3133394,"TERMINAL",0,0,"8",,terminal_output +5120,3134419,"TERMINAL",0,0,"9",,terminal_output +5121,3135439,"TERMINAL",0,0,"40",,terminal_output +5122,3136461,"TERMINAL",0,0,"1",,terminal_output +5123,3137586,"TERMINAL",0,0,"2",,terminal_output +5124,3138511,"TERMINAL",0,0,"4",,terminal_output +5125,3139557,"TERMINAL",0,0,"5",,terminal_output +5126,3140654,"TERMINAL",0,0,"6",,terminal_output +5127,3141680,"TERMINAL",0,0,"7",,terminal_output +5128,3142726,"TERMINAL",0,0,"8",,terminal_output +5129,3143639,"TERMINAL",0,0,"9",,terminal_output +5130,3144685,"TERMINAL",0,0,"50",,terminal_output +5131,3145655,"TERMINAL",0,0,"1",,terminal_output +5132,3146690,"TERMINAL",0,0,"2",,terminal_output +5133,3147707,"TERMINAL",0,0,"3",,terminal_output +5134,3148726,"TERMINAL",0,0,"4",,terminal_output +5135,3149769,"TERMINAL",0,0,"5",,terminal_output +5136,3150775,"TERMINAL",0,0,"6",,terminal_output +5137,3151800,"TERMINAL",0,0,"7",,terminal_output +5138,3152839,"TERMINAL",0,0,"8",,terminal_output +5139,3153845,"TERMINAL",0,0,"9",,terminal_output +5140,3154900,"TERMINAL",0,0,"30:00",,terminal_output +5141,3155888,"TERMINAL",0,0,"1",,terminal_output +5142,3156916,"TERMINAL",0,0,"2",,terminal_output +5143,3157940,"TERMINAL",0,0,"3",,terminal_output +5144,3158958,"TERMINAL",0,0,"4",,terminal_output +5145,3160019,"TERMINAL",0,0,"5",,terminal_output +5146,3161157,"TERMINAL",0,0,"6",,terminal_output +5147,3162082,"TERMINAL",0,0,"7",,terminal_output +5148,3163137,"TERMINAL",0,0,"8",,terminal_output +5149,3164176,"TERMINAL",0,0,"9",,terminal_output +5150,3165199,"TERMINAL",0,0,"10",,terminal_output +5151,3166185,"TERMINAL",0,0,"1",,terminal_output +5152,3167099,"TERMINAL",0,0,"2",,terminal_output +5153,3168120,"TERMINAL",0,0,"3",,terminal_output +5154,3169175,"TERMINAL",0,0,"4",,terminal_output +5155,3170165,"TERMINAL",0,0,"5",,terminal_output +5156,3171188,"TERMINAL",0,0,"6",,terminal_output +5157,3172209,"TERMINAL",0,0,"7",,terminal_output +5158,3173301,"TERMINAL",0,0,"8",,terminal_output +5159,3174263,"TERMINAL",0,0,"9",,terminal_output +5160,3175351,"TERMINAL",0,0,"20",,terminal_output +5161,3176299,"TERMINAL",0,0,"1",,terminal_output +5162,3177318,"TERMINAL",0,0,"2",,terminal_output +5163,3178358,"TERMINAL",0,0,"3",,terminal_output +5164,3179471,"TERMINAL",0,0,"4",,terminal_output +5165,3180487,"TERMINAL",0,0,"5",,terminal_output +5166,3181407,"TERMINAL",0,0,"6",,terminal_output +5167,3182422,"TERMINAL",0,0,"7",,terminal_output +5168,3183443,"TERMINAL",0,0,"8",,terminal_output +5169,3184461,"TERMINAL",0,0,"9",,terminal_output +5170,3185493,"TERMINAL",0,0,"30",,terminal_output +5171,3186500,"TERMINAL",0,0,"2",,terminal_output +5172,3187599,"TERMINAL",0,0,"3",,terminal_output +5173,3188632,"TERMINAL",0,0,"4",,terminal_output +5174,3189685,"TERMINAL",0,0,"5",,terminal_output +5175,3190722,"TERMINAL",0,0,"6",,terminal_output +5176,3191685,"TERMINAL",0,0,"7",,terminal_output +5177,3192720,"TERMINAL",0,0,"8",,terminal_output +5178,3193723,"TERMINAL",0,0,"9",,terminal_output +5179,3194748,"TERMINAL",0,0,"40",,terminal_output +5180,3195769,"TERMINAL",0,0,"1",,terminal_output +5181,3196786,"TERMINAL",0,0,"2",,terminal_output +5182,3197853,"TERMINAL",0,0,"3",,terminal_output +5183,3198833,"TERMINAL",0,0,"4",,terminal_output +5184,3199957,"TERMINAL",0,0,"5",,terminal_output +5185,3200869,"TERMINAL",0,0,"6",,terminal_output +5186,3201903,"TERMINAL",0,0,"7",,terminal_output +5187,3202298,"TERMINAL",0,0,"Step 200000, validation loss: 6.788418769836426\r\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.788418115354052), 'val_entropy': np.float64(5.4373855964810245), 'val_masked_token_top16_accuracy': np.float64(0.19955201680753745), 'val_masked_token_top1_accuracy': np.float64(0.032450725634892784), 'val_masked_token_top2_accuracy': np.float64(0.05669430161223692), 'val_masked_token_top5_accuracy': np.float64(0.10574950832946628), 'val_psnr': np.float64(10.75548319723092), 'val_select_logit': np.float64(4.335007929334454), 'val_select_p': np.float64(0.06822160222366744), 'val_ssim': np.float64(0.25764136659164055), 'val_total_loss': np.float64(6.788418115354052), 'val_z_loss': np.float64(54.11139491960114), 'val_loss': np.float32(6.788419), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}\r\n",,terminal_output +5188,3202906,"TERMINAL",0,0,"8",,terminal_output +5189,3203928,"TERMINAL",0,0,"9",,terminal_output +5190,3204953,"TERMINAL",0,0,"50",,terminal_output +5191,3205970,"TERMINAL",0,0,"1",,terminal_output +5192,3206996,"TERMINAL",0,0,"2",,terminal_output +5193,3207733,"TERMINAL",0,0,"W1008 23:30:53.224281 1298635 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""Cancelling all calls"", grpc_status:14}\r\n",,terminal_output +5194,3208013,"TERMINAL",0,0,"3",,terminal_output +5195,3208420,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5196,3209034,"TERMINAL",0,0,"4",,terminal_output +5197,3210072,"TERMINAL",0,0,"5",,terminal_output +5198,3211074,"TERMINAL",0,0,"6",,terminal_output +5199,3212095,"TERMINAL",0,0,"7",,terminal_output +5200,3213116,"TERMINAL",0,0,"8",,terminal_output +5201,3214157,"TERMINAL",0,0,"9",,terminal_output +5202,3215162,"TERMINAL",0,0,"1:00",,terminal_output +5203,3216227,"TERMINAL",0,0,"1",,terminal_output +5204,3217206,"TERMINAL",0,0,"2",,terminal_output +5205,3218237,"TERMINAL",0,0,"3",,terminal_output +5206,3219258,"TERMINAL",0,0,"4",,terminal_output +5207,3220301,"TERMINAL",0,0,"5",,terminal_output +5208,3221321,"TERMINAL",0,0,"6",,terminal_output +5209,3222338,"TERMINAL",0,0,"7",,terminal_output +5210,3223413,"appendix_c_nodes.md",0,0,"",markdown,tab +5211,3223474,"TERMINAL",0,0,"8",,terminal_output +5212,3224408,"TERMINAL",0,0,"9",,terminal_output +5213,3225240,"appendix_c_nodes.md",10583,0,"\n",markdown,content +5214,3225419,"appendix_c_nodes.md",10584,0,"\n",markdown,content +5215,3225454,"TERMINAL",0,0,"10",,terminal_output +5216,3226456,"TERMINAL",0,0,"1",,terminal_output +5217,3226495,"appendix_c_nodes.md",10585,0,"#",markdown,content +5218,3226496,"appendix_c_nodes.md",10586,0,"",markdown,selection_keyboard +5219,3226669,"appendix_c_nodes.md",10586,0,"#",markdown,content +5220,3226670,"appendix_c_nodes.md",10587,0,"",markdown,selection_keyboard +5221,3227474,"TERMINAL",0,0,"2",,terminal_output +5222,3227759,"appendix_c_nodes.md",10587,0," ",markdown,content +5223,3227760,"appendix_c_nodes.md",10588,0,"",markdown,selection_keyboard +5224,3228435,"appendix_c_nodes.md",10588,0,"n",markdown,content +5225,3228436,"appendix_c_nodes.md",10589,0,"",markdown,selection_keyboard +5226,3228503,"TERMINAL",0,0,"4",,terminal_output +5227,3228676,"appendix_c_nodes.md",10589,0,"o",markdown,content +5228,3228677,"appendix_c_nodes.md",10590,0,"",markdown,selection_keyboard +5229,3229446,"appendix_c_nodes.md",10589,1,"",markdown,content +5230,3229538,"TERMINAL",0,0,"5",,terminal_output +5231,3230180,"appendix_c_nodes.md",10589,0,"i",markdown,content +5232,3230181,"appendix_c_nodes.md",10590,0,"",markdown,selection_keyboard +5233,3230184,"appendix_c_nodes.md",10590,0,"o",markdown,content +5234,3230185,"appendix_c_nodes.md",10591,0,"",markdown,selection_keyboard +5235,3230574,"TERMINAL",0,0,"6",,terminal_output +5236,3230614,"appendix_c_nodes.md",10591,0,"s",markdown,content +5237,3230615,"appendix_c_nodes.md",10592,0,"",markdown,selection_keyboard +5238,3231167,"appendix_c_nodes.md",10592,0,"e",markdown,content +5239,3231168,"appendix_c_nodes.md",10593,0,"",markdown,selection_keyboard +5240,3231565,"TERMINAL",0,0,"7",,terminal_output +5241,3231676,"appendix_c_nodes.md",10592,1,"",markdown,content +5242,3231856,"appendix_c_nodes.md",10591,1,"",markdown,content +5243,3232011,"appendix_c_nodes.md",10590,1,"",markdown,content +5244,3232138,"appendix_c_nodes.md",10589,1,"",markdown,content +5245,3232626,"TERMINAL",0,0,"8",,terminal_output +5246,3232814,"appendix_c_nodes.md",10589,0,"o",markdown,content +5247,3232815,"appendix_c_nodes.md",10590,0,"",markdown,selection_keyboard +5248,3232870,"appendix_c_nodes.md",10590,0,"i",markdown,content +5249,3232871,"appendix_c_nodes.md",10591,0,"",markdown,selection_keyboard +5250,3233314,"appendix_c_nodes.md",10591,0,"s",markdown,content +5251,3233315,"appendix_c_nodes.md",10592,0,"",markdown,selection_keyboard +5252,3233404,"appendix_c_nodes.md",10592,0,"e",markdown,content +5253,3233405,"appendix_c_nodes.md",10593,0,"",markdown,selection_keyboard +5254,3233637,"appendix_c_nodes.md",10593,0," ",markdown,content +5255,3233638,"appendix_c_nodes.md",10594,0,"",markdown,selection_keyboard +5256,3233638,"TERMINAL",0,0,"9",,terminal_output +5257,3233853,"appendix_c_nodes.md",10594,0,"s",markdown,content +5258,3233854,"appendix_c_nodes.md",10595,0,"",markdown,selection_keyboard +5259,3234113,"appendix_c_nodes.md",10595,0,"t",markdown,content +5260,3234114,"appendix_c_nodes.md",10596,0,"",markdown,selection_keyboard +5261,3234380,"appendix_c_nodes.md",10596,0,"d",markdown,content +5262,3234381,"appendix_c_nodes.md",10597,0,"",markdown,selection_keyboard +5263,3234621,"TERMINAL",0,0,"20",,terminal_output +5264,3235674,"TERMINAL",0,0,"1",,terminal_output +5265,3236685,"TERMINAL",0,0,"2",,terminal_output +5266,3237170,"appendix_c_nodes.md",10597,0," ",markdown,content +5267,3237172,"appendix_c_nodes.md",10598,0,"",markdown,selection_keyboard +5268,3237700,"appendix_c_nodes.md",10598,0,"0",markdown,content +5269,3237701,"appendix_c_nodes.md",10599,0,"",markdown,selection_keyboard +5270,3237702,"TERMINAL",0,0,"3",,terminal_output +5271,3237867,"appendix_c_nodes.md",10599,0,".",markdown,content +5272,3237868,"appendix_c_nodes.md",10600,0,"",markdown,selection_keyboard +5273,3238354,"appendix_c_nodes.md",10600,0,"2",markdown,content +5274,3238355,"appendix_c_nodes.md",10601,0,"",markdown,selection_keyboard +5275,3238837,"TERMINAL",0,0,"4",,terminal_output +5276,3238882,"appendix_c_nodes.md",10601,0,"\n",markdown,content +5277,3239733,"TERMINAL",0,0,"5",,terminal_output +5278,3240233,"appendix_c_nodes.md",10602,0,"Step 200000, validation loss: 6.788418769836426\n{'val_codebook_usage_lam': np.float64(1.0), 'val_codebook_usage_tokenizer': np.float64(0.8475988051470589), 'val_cross_entropy_loss': np.float64(6.788418115354052), 'val_entropy': np.float64(5.4373855964810245), 'val_masked_token_top16_accuracy': np.float64(0.19955201680753745), 'val_masked_token_top1_accuracy': np.float64(0.032450725634892784), 'val_masked_token_top2_accuracy': np.float64(0.05669430161223692), 'val_masked_token_top5_accuracy': np.float64(0.10574950832946628), 'val_psnr': np.float64(10.75548319723092), 'val_select_logit': np.float64(4.335007929334454), 'val_select_p': np.float64(0.06822160222366744), 'val_ssim': np.float64(0.25764136659164055), 'val_total_loss': np.float64(6.788418115354052), 'val_z_loss': np.float64(54.11139491960114), 'val_loss': np.float32(6.788419), 'val_full_frame_codebook_usage_lam': np.float64(1.0), 'val_full_frame_codebook_usage_tokenizer': np.float64(0.2629250919117647), 'val_full_frame_cross_entropy_loss': np.float64(1.7930428303924262), 'val_full_frame_entropy': np.float64(0.252041938550332), 'val_full_frame_masked_token_top16_accuracy': np.float64(0.9929032676360187), 'val_full_frame_masked_token_top1_accuracy': np.float64(0.8329210959228814), 'val_full_frame_masked_token_top2_accuracy': np.float64(0.9222704674683365), 'val_full_frame_masked_token_top5_accuracy': np.float64(0.9736630788036421), 'val_full_frame_psnr': np.float64(28.751719717885933), 'val_full_frame_select_logit': np.float64(100.21908793729894), 'val_full_frame_select_p': np.float64(0.912218968073527), 'val_full_frame_ssim': np.float64(0.8437704747798396), 'val_full_frame_z_loss': np.float64(51192.01493566176), 'val_full_frame_loss': np.float32(1.7930428)}",markdown,content +5279,3240752,"TERMINAL",0,0,"6",,terminal_output +5280,3241778,"TERMINAL",0,0,"7",,terminal_output +5281,3242811,"TERMINAL",0,0,"8",,terminal_output +5282,3243811,"TERMINAL",0,0,"9",,terminal_output +5283,3244830,"TERMINAL",0,0,"30",,terminal_output +5284,3245852,"TERMINAL",0,0,"1",,terminal_output +5285,3246875,"TERMINAL",0,0,"2",,terminal_output +5286,3247936,"TERMINAL",0,0,"3",,terminal_output +5287,3248952,"TERMINAL",0,0,"4",,terminal_output +5288,3249973,"TERMINAL",0,0,"5",,terminal_output +5289,3250994,"TERMINAL",0,0,"6",,terminal_output +5290,3252012,"TERMINAL",0,0,"7",,terminal_output +5291,3253033,"TERMINAL",0,0,"8",,terminal_output +5292,3254055,"TERMINAL",0,0,"9",,terminal_output +5293,3255075,"TERMINAL",0,0,"40",,terminal_output +5294,3256112,"TERMINAL",0,0,"1",,terminal_output +5295,3257127,"TERMINAL",0,0,"2",,terminal_output +5296,3258139,"TERMINAL",0,0,"3",,terminal_output +5297,3259157,"TERMINAL",0,0,"4",,terminal_output +5298,3260179,"TERMINAL",0,0,"5",,terminal_output +5299,3261200,"TERMINAL",0,0,"6",,terminal_output +5300,3262244,"TERMINAL",0,0,"7",,terminal_output +5301,3263247,"TERMINAL",0,0,"8",,terminal_output +5302,3264268,"TERMINAL",0,0,"9",,terminal_output +5303,3265316,"TERMINAL",0,0,"50",,terminal_output +5304,3266318,"TERMINAL",0,0,"1",,terminal_output +5305,3267332,"TERMINAL",0,0,"2",,terminal_output +5306,3268353,"TERMINAL",0,0,"3",,terminal_output +5307,3269255,"appendix_c_nodes.md",10649,0,"",markdown,selection_command +5308,3269374,"TERMINAL",0,0,"4",,terminal_output +5309,3270389,"TERMINAL",0,0,"5",,terminal_output +5310,3271410,"TERMINAL",0,0,"6",,terminal_output +5311,3272432,"TERMINAL",0,0,"7",,terminal_output +5312,3273455,"TERMINAL",0,0,"8",,terminal_output +5313,3274026,"TERMINAL",0,0,"g",,terminal_output +5314,3274114,"TERMINAL",0,0,"i",,terminal_output +5315,3274217,"TERMINAL",0,0,"t",,terminal_output +5316,3274271,"TERMINAL",0,0," ",,terminal_output +5317,3274459,"TERMINAL",0,0,"s",,terminal_output +5318,3274487,"TERMINAL",0,0,"9",,terminal_output +5319,3274540,"TERMINAL",0,0,"t",,terminal_output +5320,3274671,"TERMINAL",0,0,"a",,terminal_output +5321,3274751,"TERMINAL",0,0,"s",,terminal_output +5322,3274804,"TERMINAL",0,0,"h",,terminal_output +5323,3275066,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5324,3275479,"TERMINAL",0,0,"Saved working directory and index state WIP on add-noise-to-combat-exposure-bias: 1ad41e0 Merge branch 'main' into add-noise-to-combat-exposure-bias\r\n",,terminal_output +5325,3275617,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5326,3275656,"TERMINAL",0,0,"2:00",,terminal_output +5327,3276106,"TERMINAL",0,0,"g",,terminal_output +5328,3276267,"TERMINAL",0,0,"it",,terminal_output +5329,3276428,"TERMINAL",0,0," ",,terminal_output +5330,3276480,"TERMINAL",0,0,"c",,terminal_output +5331,3276612,"TERMINAL",0,0,"h",,terminal_output +5332,3276691,"TERMINAL",0,0,"e",,terminal_output +5333,3276703,"TERMINAL",0,0,"2",,terminal_output +5334,3276744,"TERMINAL",0,0,"c",,terminal_output +5335,3276823,"TERMINAL",0,0,"k",,terminal_output +5336,3277026,"TERMINAL",0,0,"o",,terminal_output +5337,3277118,"TERMINAL",0,0,"ut",,terminal_output +5338,3277213,"TERMINAL",0,0," ",,terminal_output +5339,3277456,"TERMINAL",0,0,"a",,terminal_output +5340,3277534,"TERMINAL",0,0,"c",,terminal_output +5341,3277753,"TERMINAL",0,0,"t",,terminal_output +5342,3277760,"TERMINAL",0,0,"3",,terminal_output +5343,3277897,"TERMINAL",0,0,"i",,terminal_output +5344,3277996,"TERMINAL",0,0,"o",,terminal_output +5345,3278177,"TERMINAL",0,0,"n",,terminal_output +5346,3278804,"TERMINAL",0,0,"-",,terminal_output +5347,3278804,"TERMINAL",0,0,"4",,terminal_output +5348,3279122,"TERMINAL",0,0,"pr",,terminal_output +5349,3279344,"TERMINAL",0,0,"ep",,terminal_output +5350,3279527,"TERMINAL",0,0,"e",,terminal_output +5351,3279628,"TERMINAL",0,0,"n",,terminal_output +5352,3279701,"TERMINAL",0,0,"d",,terminal_output +5353,3279842,"TERMINAL",0,0,"5",,terminal_output +5354,3279942,"TERMINAL",0,0,"-",,terminal_output +5355,3280815,"TERMINAL",0,0,"6",,terminal_output +5356,3281842,"TERMINAL",0,0,"7",,terminal_output +5357,3282333,"TERMINAL",0,0,"m",,terminal_output +5358,3282413,"TERMINAL",0,0,"a",,terminal_output +5359,3282493,"TERMINAL",0,0,"s",,terminal_output +5360,3282585,"TERMINAL",0,0,"k",,terminal_output +5361,3282761,"TERMINAL",0,0,"g",,terminal_output +5362,3282840,"TERMINAL",0,0,"i",,terminal_output +5363,3282879,"TERMINAL",0,0,"8",,terminal_output +5364,3282953,"TERMINAL",0,0,"t",,terminal_output +5365,3283066,"TERMINAL",0,0,"\r\n[?2004l\rerror: pathspec 'action-prepend-maskgit' did not match any file(s) known to git\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5366,3283889,"TERMINAL",0,0,"9",,terminal_output +5367,3284909,"TERMINAL",0,0,"10",,terminal_output +5368,3285360,"TERMINAL",0,0,"git checkout action-prepend-maskgit",,terminal_output +5369,3285909,"TERMINAL",0,0,"",,terminal_output +5370,3285958,"TERMINAL",0,0,"1",,terminal_output +5371,3286389,"TERMINAL",0,0,"",,terminal_output +5372,3286966,"TERMINAL",0,0,"2",,terminal_output +5373,3287348,"TERMINAL",0,0,"-maskgit",,terminal_output +5374,3287796,"TERMINAL",0,0,"p-maskgit",,terminal_output +5375,3287986,"TERMINAL",0,0,"r-maskgit",,terminal_output +5376,3287987,"TERMINAL",0,0,"3",,terminal_output +5377,3288219,"TERMINAL",0,0,"e-maskgitp-maskgit",,terminal_output +5378,3288953,"TERMINAL",0,0,"e-maskgitn-maskgit",,terminal_output +5379,3288975,"TERMINAL",0,0,"4",,terminal_output +5380,3289195,"TERMINAL",0,0,"d-maskgit",,terminal_output +5381,3289291,"TERMINAL",0,0,"-maskgit",,terminal_output +5382,3289520,"TERMINAL",0,0,"a-maskgit",,terminal_output +5383,3289605,"TERMINAL",0,0,"c-maskgit",,terminal_output +5384,3290042,"TERMINAL",0,0,"5",,terminal_output +5385,3290045,"TERMINAL",0,0,"t-maskgit",,terminal_output +5386,3290127,"TERMINAL",0,0,"i-maskgit",,terminal_output +5387,3290576,"TERMINAL",0,0,"o-maskgit",,terminal_output +5388,3290722,"TERMINAL",0,0,"n-maskgit",,terminal_output +5389,3290967,"TERMINAL",0,0,"\r\n[?2004l\rSwitched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5390,3291062,"TERMINAL",0,0,"6\rprepend-action-maskgit",,terminal_output +5391,3292088,"TERMINAL",0,0,"7",,terminal_output +5392,3293105,"TERMINAL",0,0,"8",,terminal_output +5393,3294138,"TERMINAL",0,0,"9",,terminal_output +5394,3295161,"TERMINAL",0,0,"20",,terminal_output +5395,3295434,"",0,0,"Switched from branch 'add-noise-to-combat-exposure-bias' to 'prepend-action-maskgit'",,git_branch_checkout +5396,3296149,"TERMINAL",0,0,"1",,terminal_output +5397,3297169,"TERMINAL",0,0,"2",,terminal_output +5398,3298189,"TERMINAL",0,0,"3",,terminal_output +5399,3299209,"TERMINAL",0,0,"4",,terminal_output +5400,3300232,"TERMINAL",0,0,"5",,terminal_output +5401,3301250,"TERMINAL",0,0,"6",,terminal_output +5402,3302269,"TERMINAL",0,0,"7",,terminal_output +5403,3303296,"TERMINAL",0,0,"8",,terminal_output +5404,3304337,"TERMINAL",0,0,"9",,terminal_output +5405,3305331,"TERMINAL",0,0,"30",,terminal_output +5406,3306361,"TERMINAL",0,0,"1",,terminal_output +5407,3307379,"TERMINAL",0,0,"2",,terminal_output +5408,3308396,"TERMINAL",0,0,"3",,terminal_output +5409,3309415,"TERMINAL",0,0,"4",,terminal_output +5410,3310440,"TERMINAL",0,0,"5",,terminal_output +5411,3311460,"TERMINAL",0,0,"6",,terminal_output +5412,3312483,"TERMINAL",0,0,"7",,terminal_output +5413,3313499,"TERMINAL",0,0,"9",,terminal_output +5414,3314535,"TERMINAL",0,0,"40",,terminal_output +5415,3315566,"TERMINAL",0,0,"1",,terminal_output +5416,3316582,"TERMINAL",0,0,"2",,terminal_output +5417,3317780,"TERMINAL",0,0,"3",,terminal_output +5418,3318620,"TERMINAL",0,0,"4",,terminal_output +5419,3319771,"TERMINAL",0,0,"5",,terminal_output +5420,3320662,"TERMINAL",0,0,"6",,terminal_output +5421,3321702,"TERMINAL",0,0,"7",,terminal_output +5422,3322884,"TERMINAL",0,0,"8",,terminal_output +5423,3323882,"TERMINAL",0,0,"9",,terminal_output +5424,3324964,"TERMINAL",0,0,"50",,terminal_output +5425,3325853,"TERMINAL",0,0,"1",,terminal_output +5426,3326940,"TERMINAL",0,0,"2",,terminal_output +5427,3327885,"TERMINAL",0,0,"3",,terminal_output +5428,3328887,"TERMINAL",0,0,"4",,terminal_output +5429,3329862,"TERMINAL",0,0,"5",,terminal_output +5430,3331008,"TERMINAL",0,0,"6",,terminal_output +5431,3331900,"TERMINAL",0,0,"7",,terminal_output +5432,3332932,"TERMINAL",0,0,"8",,terminal_output +5433,3333937,"TERMINAL",0,0,"9",,terminal_output +5434,3334971,"TERMINAL",0,0,"3:00",,terminal_output +5435,3335981,"TERMINAL",0,0,"1",,terminal_output +5436,3337100,"TERMINAL",0,0,"2",,terminal_output +5437,3338090,"TERMINAL",0,0,"3",,terminal_output +5438,3339113,"TERMINAL",0,0,"4",,terminal_output +5439,3340234,"TERMINAL",0,0,"5",,terminal_output +5440,3341159,"TERMINAL",0,0,"6",,terminal_output +5441,3342227,"TERMINAL",0,0,"7",,terminal_output +5442,3343278,"TERMINAL",0,0,"8",,terminal_output +5443,3344221,"TERMINAL",0,0,"9",,terminal_output +5444,3345254,"TERMINAL",0,0,"10",,terminal_output +5445,3346439,"TERMINAL",0,0,"1",,terminal_output +5446,3347276,"TERMINAL",0,0,"2",,terminal_output +5447,3348296,"TERMINAL",0,0,"3",,terminal_output +5448,3349315,"TERMINAL",0,0,"4",,terminal_output +5449,3350337,"TERMINAL",0,0,"5",,terminal_output +5450,3351401,"TERMINAL",0,0,"6",,terminal_output +5451,3352382,"TERMINAL",0,0,"7",,terminal_output +5452,3353576,"TERMINAL",0,0,"8",,terminal_output +5453,3354555,"TERMINAL",0,0,"9",,terminal_output +5454,3355569,"TERMINAL",0,0,"20",,terminal_output +5455,3356560,"TERMINAL",0,0,"1",,terminal_output +5456,3357573,"TERMINAL",0,0,"3",,terminal_output +5457,3358563,"TERMINAL",0,0,"4",,terminal_output +5458,3359555,"TERMINAL",0,0,"5",,terminal_output +5459,3360576,"TERMINAL",0,0,"6",,terminal_output +5460,3361594,"TERMINAL",0,0,"7",,terminal_output +5461,3362615,"TERMINAL",0,0,"8",,terminal_output +5462,3363630,"TERMINAL",0,0,"9",,terminal_output +5463,3364665,"TERMINAL",0,0,"30",,terminal_output +5464,3365758,"TERMINAL",0,0,"1",,terminal_output +5465,3366784,"TERMINAL",0,0,"2",,terminal_output +5466,3367799,"TERMINAL",0,0,"3",,terminal_output +5467,3368832,"TERMINAL",0,0,"4",,terminal_output +5468,3369851,"TERMINAL",0,0,"5",,terminal_output +5469,3370888,"TERMINAL",0,0,"6",,terminal_output +5470,3371927,"TERMINAL",0,0,"7",,terminal_output +5471,3372932,"TERMINAL",0,0,"8",,terminal_output +5472,3373951,"TERMINAL",0,0,"9",,terminal_output +5473,3374972,"TERMINAL",0,0,"40",,terminal_output +5474,3376001,"TERMINAL",0,0,"1",,terminal_output +5475,3377022,"TERMINAL",0,0,"2",,terminal_output +5476,3378047,"TERMINAL",0,0,"3",,terminal_output +5477,3379091,"TERMINAL",0,0,"4",,terminal_output +5478,3380085,"TERMINAL",0,0,"5",,terminal_output +5479,3381112,"TERMINAL",0,0,"6",,terminal_output +5480,3382132,"TERMINAL",0,0,"7",,terminal_output +5481,3383150,"TERMINAL",0,0,"8",,terminal_output +5482,3384223,"TERMINAL",0,0,"9",,terminal_output +5483,3385271,"TERMINAL",0,0,"50",,terminal_output +5484,3386293,"TERMINAL",0,0,"1",,terminal_output +5485,3387323,"TERMINAL",0,0,"2",,terminal_output +5486,3388339,"TERMINAL",0,0,"3",,terminal_output +5487,3389392,"TERMINAL",0,0,"4",,terminal_output +5488,3390389,"TERMINAL",0,0,"5",,terminal_output +5489,3391389,"TERMINAL",0,0,"6",,terminal_output +5490,3392411,"TERMINAL",0,0,"7",,terminal_output +5491,3393430,"TERMINAL",0,0,"8",,terminal_output +5492,3394454,"TERMINAL",0,0,"9",,terminal_output +5493,3395482,"TERMINAL",0,0,"4:00",,terminal_output +5494,3396504,"TERMINAL",0,0,"2",,terminal_output +5495,3397525,"TERMINAL",0,0,"3",,terminal_output +5496,3398545,"TERMINAL",0,0,"4",,terminal_output +5497,3399579,"TERMINAL",0,0,"5",,terminal_output +5498,3400582,"TERMINAL",0,0,"6",,terminal_output +5499,3401648,"TERMINAL",0,0,"7",,terminal_output +5500,3402672,"TERMINAL",0,0,"8",,terminal_output +5501,3403677,"TERMINAL",0,0,"9",,terminal_output +5502,3404722,"TERMINAL",0,0,"10",,terminal_output +5503,3405718,"TERMINAL",0,0,"1",,terminal_output +5504,3406781,"TERMINAL",0,0,"2",,terminal_output +5505,3407796,"TERMINAL",0,0,"3",,terminal_output +5506,3408783,"TERMINAL",0,0,"4",,terminal_output +5507,3409844,"TERMINAL",0,0,"5",,terminal_output +5508,3410823,"TERMINAL",0,0,"6",,terminal_output +5509,3411844,"TERMINAL",0,0,"7",,terminal_output +5510,3412887,"TERMINAL",0,0,"8",,terminal_output +5511,3413959,"TERMINAL",0,0,"9",,terminal_output +5512,3414936,"TERMINAL",0,0,"20",,terminal_output +5513,3415946,"TERMINAL",0,0,"1",,terminal_output +5514,3416985,"TERMINAL",0,0,"2",,terminal_output +5515,3418004,"TERMINAL",0,0,"3",,terminal_output +5516,3419032,"TERMINAL",0,0,"4",,terminal_output +5517,3420195,"TERMINAL",0,0,"5",,terminal_output +5518,3421089,"TERMINAL",0,0,"6",,terminal_output +5519,3422160,"TERMINAL",0,0,"7",,terminal_output +5520,3423119,"TERMINAL",0,0,"8",,terminal_output +5521,3424154,"TERMINAL",0,0,"9",,terminal_output +5522,3425161,"TERMINAL",0,0,"30",,terminal_output +5523,3426176,"TERMINAL",0,0,"1",,terminal_output +5524,3427200,"TERMINAL",0,0,"2",,terminal_output +5525,3428210,"TERMINAL",0,0,"3",,terminal_output +5526,3429233,"TERMINAL",0,0,"4",,terminal_output +5527,3430274,"TERMINAL",0,0,"5",,terminal_output +5528,3431391,"TERMINAL",0,0,"6",,terminal_output +5529,3432321,"TERMINAL",0,0,"7",,terminal_output +5530,3433327,"TERMINAL",0,0,"8",,terminal_output +5531,3434365,"TERMINAL",0,0,"9",,terminal_output +5532,3435367,"TERMINAL",0,0,"40",,terminal_output +5533,3436390,"TERMINAL",0,0,"1",,terminal_output +5534,3437413,"TERMINAL",0,0,"2",,terminal_output +5535,3438437,"TERMINAL",0,0,"3",,terminal_output +5536,3439477,"TERMINAL",0,0,"4",,terminal_output +5537,3440497,"TERMINAL",0,0,"5",,terminal_output +5538,3441515,"TERMINAL",0,0,"7",,terminal_output +5539,3442534,"TERMINAL",0,0,"8",,terminal_output +5540,3443554,"TERMINAL",0,0,"9",,terminal_output +5541,3444606,"TERMINAL",0,0,"50",,terminal_output +5542,3445613,"TERMINAL",0,0,"1",,terminal_output +5543,3446622,"TERMINAL",0,0,"2",,terminal_output +5544,3447640,"TERMINAL",0,0,"3",,terminal_output +5545,3448653,"TERMINAL",0,0,"4",,terminal_output +5546,3449672,"TERMINAL",0,0,"5",,terminal_output +5547,3450693,"TERMINAL",0,0,"6",,terminal_output +5548,3451800,"TERMINAL",0,0,"7",,terminal_output +5549,3452781,"TERMINAL",0,0,"8",,terminal_output +5550,3453802,"TERMINAL",0,0,"9",,terminal_output +5551,3454896,"TERMINAL",0,0,"5:00",,terminal_output +5552,3455916,"TERMINAL",0,0,"1",,terminal_output +5553,3456942,"TERMINAL",0,0,"2",,terminal_output +5554,3457968,"TERMINAL",0,0,"3",,terminal_output +5555,3458996,"TERMINAL",0,0,"4",,terminal_output +5556,3460015,"TERMINAL",0,0,"5",,terminal_output +5557,3461038,"TERMINAL",0,0,"6",,terminal_output +5558,3462058,"TERMINAL",0,0,"7",,terminal_output +5559,3463058,"jasmine/train_dynamics_appendix-c.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n max_noise_level: float = 0.0\n noise_buckets: int = 10\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n max_noise_level=args.max_noise_level,\n noise_buckets=args.noise_buckets,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n batch=inputs,\n seq_len=args.seq_len,\n noise_level=0.0,\n temperature=args.val_temperature,\n sample_argmax=args.val_sample_argmax,\n maskgit_steps=args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n print(val_metrics)\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +5560,3463210,"TERMINAL",0,0,"8",,terminal_output +5561,3464109,"TERMINAL",0,0,"9",,terminal_output +5562,3465119,"TERMINAL",0,0,"10",,terminal_output +5563,3465680,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n noise_stddev = 0.2 # Standard deviation for the Gaussian noise\n noise = jax.random.normal(batch[""rng""], vid_embed_BTNM.shape) * noise_stddev\n vid_embed_BTNM += noise\n\n B = vid_embed_BTNM.shape[0]\n rng, _rng_prob, *_rngs_mask = jax.random.split(batch[""rng""], B + 2)\n mask_prob = jax.random.uniform(_rng_prob, shape=(B,), minval=self.mask_limit)\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Apply noise augmentation ---\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, rng\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, batch[""rng""]\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 1:-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +5564,3465842,"jasmine/models/dynamics.py",606,8773," mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, 1:]\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n",python,content +5565,3466148,"TERMINAL",0,0,"1",,terminal_output +5566,3467150,"TERMINAL",0,0,"2",,terminal_output +5567,3468213,"TERMINAL",0,0,"3",,terminal_output +5568,3469241,"TERMINAL",0,0,"4",,terminal_output +5569,3470261,"TERMINAL",0,0,"5",,terminal_output +5570,3470738,"jasmine/models/dynamics.py",2146,0,"",python,selection_mouse +5571,3471259,"TERMINAL",0,0,"6",,terminal_output +5572,3471432,"jasmine/models/dynamics.py",2133,0,"",python,selection_mouse +5573,3471587,"jasmine/models/dynamics.py",2125,16,"video_tokens_BTN",python,selection_mouse +5574,3472278,"jasmine/models/dynamics.py",2155,0,"",python,selection_mouse +5575,3472284,"TERMINAL",0,0,"7",,terminal_output +5576,3472428,"jasmine/models/dynamics.py",2151,12,"video_tokens",python,selection_mouse +5577,3472970,"jasmine/models/dynamics.py",2146,0,"",python,selection_mouse +5578,3473127,"jasmine/models/dynamics.py",2144,5,"batch",python,selection_mouse +5579,3473308,"TERMINAL",0,0,"8",,terminal_output +5580,3473619,"jasmine/models/dynamics.py",2138,0,"",python,selection_mouse +5581,3473759,"jasmine/models/dynamics.py",2125,16,"video_tokens_BTN",python,selection_mouse +5582,3474325,"TERMINAL",0,0,"9",,terminal_output +5583,3475353,"TERMINAL",0,0,"20",,terminal_output +5584,3475845,"jasmine/models/dynamics.py",2139,0,"",python,selection_mouse +5585,3476383,"TERMINAL",0,0,"1",,terminal_output +5586,3476882,"jasmine/models/dynamics.py",2208,0,"",python,selection_mouse +5587,3477394,"TERMINAL",0,0,"2",,terminal_output +5588,3478415,"TERMINAL",0,0,"3",,terminal_output +5589,3479440,"TERMINAL",0,0,"4",,terminal_output +5590,3480469,"TERMINAL",0,0,"5",,terminal_output +5591,3481479,"TERMINAL",0,0,"6",,terminal_output +5592,3481831,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp1V = (\n dynamics_maskgit.transformer(vid_embed_BSNp1M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp1V[:, :, 1:]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +5593,3482560,"TERMINAL",0,0,"8",,terminal_output +5594,3483607,"TERMINAL",0,0,"9",,terminal_output +5595,3484604,"TERMINAL",0,0,"30",,terminal_output +5596,3485599,"TERMINAL",0,0,"1",,terminal_output +5597,3486611,"TERMINAL",0,0,"2",,terminal_output +5598,3487635,"TERMINAL",0,0,"3",,terminal_output +5599,3488654,"TERMINAL",0,0,"4",,terminal_output +5600,3489680,"TERMINAL",0,0,"5",,terminal_output +5601,3490693,"TERMINAL",0,0,"6",,terminal_output +5602,3490909,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",5424,0,"",python,selection_mouse +5603,3490915,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",5423,0,"",python,selection_command +5604,3491496,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",5483,0,"",python,selection_mouse +5605,3491665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",5478,12,"videos_BTHWC",python,selection_mouse +5606,3491718,"TERMINAL",0,0,"7",,terminal_output +5607,3492794,"TERMINAL",0,0,"8",,terminal_output +5608,3493836,"TERMINAL",0,0,"9",,terminal_output +5609,3494800,"TERMINAL",0,0,"40",,terminal_output +5610,3495861,"TERMINAL",0,0,"1",,terminal_output +5611,3496932,"TERMINAL",0,0,"2",,terminal_output +5612,3497853,"TERMINAL",0,0,"3",,terminal_output +5613,3498890,"TERMINAL",0,0,"4",,terminal_output +5614,3499882,"TERMINAL",0,0,"5",,terminal_output +5615,3500897,"TERMINAL",0,0,"6",,terminal_output +5616,3501919,"TERMINAL",0,0,"7",,terminal_output +5617,3502941,"TERMINAL",0,0,"8",,terminal_output +5618,3503962,"TERMINAL",0,0,"9",,terminal_output +5619,3505024,"TERMINAL",0,0,"50",,terminal_output +5620,3506009,"TERMINAL",0,0,"1",,terminal_output +5621,3507036,"TERMINAL",0,0,"2",,terminal_output +5622,3508049,"TERMINAL",0,0,"3",,terminal_output +5623,3509070,"TERMINAL",0,0,"4",,terminal_output +5624,3510125,"TERMINAL",0,0,"5",,terminal_output +5625,3511111,"TERMINAL",0,0,"6",,terminal_output +5626,3512139,"TERMINAL",0,0,"7",,terminal_output +5627,3513165,"TERMINAL",0,0,"8",,terminal_output +5628,3514219,"TERMINAL",0,0,"9",,terminal_output +5629,3515223,"TERMINAL",0,0,"6:00",,terminal_output +5630,3516241,"TERMINAL",0,0,"1",,terminal_output +5631,3517319,"TERMINAL",0,0,"2",,terminal_output +5632,3518282,"TERMINAL",0,0,"3",,terminal_output +5633,3519317,"TERMINAL",0,0,"4",,terminal_output +5634,3520321,"TERMINAL",0,0,"5",,terminal_output +5635,3521340,"TERMINAL",0,0,"6",,terminal_output +5636,3522360,"TERMINAL",0,0,"7",,terminal_output +5637,3523382,"TERMINAL",0,0,"8",,terminal_output +5638,3524404,"TERMINAL",0,0,"9",,terminal_output +5639,3525422,"TERMINAL",0,0,"10",,terminal_output +5640,3526444,"TERMINAL",0,0,"1",,terminal_output +5641,3527463,"TERMINAL",0,0,"2",,terminal_output +5642,3528483,"TERMINAL",0,0,"3",,terminal_output +5643,3529505,"TERMINAL",0,0,"5",,terminal_output +5644,3530526,"TERMINAL",0,0,"6",,terminal_output +5645,3531547,"TERMINAL",0,0,"7",,terminal_output +5646,3532571,"TERMINAL",0,0,"8",,terminal_output +5647,3533597,"TERMINAL",0,0,"9",,terminal_output +5648,3534622,"TERMINAL",0,0,"20",,terminal_output +5649,3535635,"TERMINAL",0,0,"1",,terminal_output +5650,3536727,"TERMINAL",0,0,"2",,terminal_output +5651,3537723,"TERMINAL",0,0,"3",,terminal_output +5652,3538774,"TERMINAL",0,0,"4",,terminal_output +5653,3539735,"TERMINAL",0,0,"5",,terminal_output +5654,3540865,"TERMINAL",0,0,"6",,terminal_output +5655,3541822,"TERMINAL",0,0,"7",,terminal_output +5656,3542780,"TERMINAL",0,0,"8",,terminal_output +5657,3544045,"TERMINAL",0,0,"9",,terminal_output +5658,3544856,"TERMINAL",0,0,"30",,terminal_output +5659,3545840,"TERMINAL",0,0,"1",,terminal_output +5660,3546862,"TERMINAL",0,0,"2",,terminal_output +5661,3547884,"TERMINAL",0,0,"3",,terminal_output +5662,3548897,"TERMINAL",0,0,"4",,terminal_output +5663,3549979,"TERMINAL",0,0,"5",,terminal_output +5664,3550973,"TERMINAL",0,0,"6",,terminal_output +5665,3551957,"TERMINAL",0,0,"7",,terminal_output +5666,3553034,"TERMINAL",0,0,"8",,terminal_output +5667,3554000,"TERMINAL",0,0,"9",,terminal_output +5668,3555020,"TERMINAL",0,0,"40",,terminal_output +5669,3556066,"TERMINAL",0,0,"1",,terminal_output +5670,3557066,"TERMINAL",0,0,"2",,terminal_output +5671,3558090,"TERMINAL",0,0,"3",,terminal_output +5672,3559112,"TERMINAL",0,0,"4",,terminal_output +5673,3560130,"TERMINAL",0,0,"5",,terminal_output +5674,3561162,"TERMINAL",0,0,"6",,terminal_output +5675,3562175,"TERMINAL",0,0,"7",,terminal_output +5676,3563207,"TERMINAL",0,0,"8",,terminal_output +5677,3564216,"TERMINAL",0,0,"9",,terminal_output +5678,3565250,"TERMINAL",0,0,"50",,terminal_output +5679,3566265,"TERMINAL",0,0,"1",,terminal_output +5680,3567274,"TERMINAL",0,0,"2",,terminal_output +5681,3568292,"TERMINAL",0,0,"3",,terminal_output +5682,3569314,"TERMINAL",0,0,"4",,terminal_output +5683,3570335,"TERMINAL",0,0,"5",,terminal_output +5684,3571358,"TERMINAL",0,0,"6",,terminal_output +5685,3572440,"TERMINAL",0,0,"7",,terminal_output +5686,3573514,"TERMINAL",0,0,"8",,terminal_output +5687,3574496,"TERMINAL",0,0,"7:00",,terminal_output +5688,3575518,"TERMINAL",0,0,"1",,terminal_output +5689,3576540,"TERMINAL",0,0,"2",,terminal_output +5690,3577558,"TERMINAL",0,0,"3",,terminal_output +5691,3578583,"TERMINAL",0,0,"4",,terminal_output +5692,3579598,"TERMINAL",0,0,"5",,terminal_output +5693,3580616,"TERMINAL",0,0,"6",,terminal_output +5694,3581644,"TERMINAL",0,0,"7",,terminal_output +5695,3582711,"TERMINAL",0,0,"8",,terminal_output +5696,3583687,"TERMINAL",0,0,"9",,terminal_output +5697,3584704,"TERMINAL",0,0,"10",,terminal_output +5698,3585721,"TERMINAL",0,0,"1",,terminal_output +5699,3586748,"TERMINAL",0,0,"2",,terminal_output +5700,3587763,"TERMINAL",0,0,"3",,terminal_output +5701,3588782,"TERMINAL",0,0,"4",,terminal_output +5702,3589841,"TERMINAL",0,0,"5",,terminal_output +5703,3590841,"TERMINAL",0,0,"6",,terminal_output +5704,3591850,"TERMINAL",0,0,"7",,terminal_output +5705,3592867,"TERMINAL",0,0,"8",,terminal_output +5706,3593928,"TERMINAL",0,0,"9",,terminal_output +5707,3594912,"TERMINAL",0,0,"20",,terminal_output +5708,3595928,"TERMINAL",0,0,"1",,terminal_output +5709,3596969,"TERMINAL",0,0,"2",,terminal_output +5710,3597981,"TERMINAL",0,0,"3",,terminal_output +5711,3599005,"TERMINAL",0,0,"4",,terminal_output +5712,3600154,"TERMINAL",0,0,"5",,terminal_output +5713,3601059,"TERMINAL",0,0,"6",,terminal_output +5714,3602102,"TERMINAL",0,0,"7",,terminal_output +5715,3603087,"TERMINAL",0,0,"8",,terminal_output +5716,3604115,"TERMINAL",0,0,"9",,terminal_output +5717,3605174,"TERMINAL",0,0,"30",,terminal_output +5718,3606148,"TERMINAL",0,0,"1",,terminal_output +5719,3607193,"TERMINAL",0,0,"2",,terminal_output +5720,3608196,"TERMINAL",0,0,"3",,terminal_output +5721,3609298,"TERMINAL",0,0,"4",,terminal_output +5722,3610328,"TERMINAL",0,0,"5",,terminal_output +5723,3611259,"TERMINAL",0,0,"6",,terminal_output +5724,3612274,"TERMINAL",0,0,"7",,terminal_output +5725,3613303,"TERMINAL",0,0,"8",,terminal_output +5726,3614342,"TERMINAL",0,0,"9",,terminal_output +5727,3615348,"TERMINAL",0,0,"40",,terminal_output +5728,3616369,"TERMINAL",0,0,"1",,terminal_output +5729,3617393,"TERMINAL",0,0,"2",,terminal_output +5730,3618422,"TERMINAL",0,0,"3",,terminal_output +5731,3619475,"TERMINAL",0,0,"4",,terminal_output +5732,3620454,"TERMINAL",0,0,"5",,terminal_output +5733,3621475,"TERMINAL",0,0,"6",,terminal_output +5734,3622508,"TERMINAL",0,0,"7",,terminal_output +5735,3623517,"TERMINAL",0,0,"9",,terminal_output +5736,3624544,"TERMINAL",0,0,"50",,terminal_output +5737,3625573,"TERMINAL",0,0,"1",,terminal_output +5738,3626582,"TERMINAL",0,0,"2",,terminal_output +5739,3627608,"TERMINAL",0,0,"3",,terminal_output +5740,3628623,"TERMINAL",0,0,"4",,terminal_output +5741,3629647,"TERMINAL",0,0,"5",,terminal_output +5742,3630668,"TERMINAL",0,0,"6",,terminal_output +5743,3631730,"TERMINAL",0,0,"7",,terminal_output +5744,3632736,"TERMINAL",0,0,"8",,terminal_output +5745,3633758,"TERMINAL",0,0,"9",,terminal_output +5746,3634780,"TERMINAL",0,0,"8:00",,terminal_output +5747,3635838,"TERMINAL",0,0,"1",,terminal_output +5748,3636836,"TERMINAL",0,0,"2",,terminal_output +5749,3637846,"TERMINAL",0,0,"3",,terminal_output +5750,3638862,"TERMINAL",0,0,"4",,terminal_output +5751,3639884,"TERMINAL",0,0,"5",,terminal_output +5752,3641004,"TERMINAL",0,0,"6",,terminal_output +5753,3641927,"TERMINAL",0,0,"7",,terminal_output +5754,3642941,"TERMINAL",0,0,"8",,terminal_output +5755,3644008,"TERMINAL",0,0,"9",,terminal_output +5756,3645002,"TERMINAL",0,0,"10",,terminal_output +5757,3646032,"TERMINAL",0,0,"1",,terminal_output +5758,3647048,"TERMINAL",0,0,"2",,terminal_output +5759,3648069,"TERMINAL",0,0,"3",,terminal_output +5760,3649093,"TERMINAL",0,0,"4",,terminal_output +5761,3650114,"TERMINAL",0,0,"5",,terminal_output +5762,3651132,"TERMINAL",0,0,"6",,terminal_output +5763,3652155,"TERMINAL",0,0,"7",,terminal_output +5764,3653178,"TERMINAL",0,0,"8",,terminal_output +5765,3654209,"TERMINAL",0,0,"9",,terminal_output +5766,3655225,"TERMINAL",0,0,"20",,terminal_output +5767,3656240,"TERMINAL",0,0,"1",,terminal_output +5768,3657256,"TERMINAL",0,0,"2",,terminal_output +5769,3658284,"TERMINAL",0,0,"3",,terminal_output +5770,3659302,"TERMINAL",0,0,"4",,terminal_output +5771,3660323,"TERMINAL",0,0,"5",,terminal_output +5772,3661367,"TERMINAL",0,0,"6",,terminal_output +5773,3662366,"TERMINAL",0,0,"7",,terminal_output +5774,3663391,"TERMINAL",0,0,"8",,terminal_output +5775,3664408,"TERMINAL",0,0,"9",,terminal_output +5776,3665434,"TERMINAL",0,0,"30",,terminal_output +5777,3666463,"TERMINAL",0,0,"1",,terminal_output +5778,3667483,"TERMINAL",0,0,"2",,terminal_output +5779,3668504,"TERMINAL",0,0,"4",,terminal_output +5780,3669528,"TERMINAL",0,0,"5",,terminal_output +5781,3670565,"TERMINAL",0,0,"6",,terminal_output +5782,3671569,"TERMINAL",0,0,"7",,terminal_output +5783,3672584,"TERMINAL",0,0,"8",,terminal_output +5784,3673602,"TERMINAL",0,0,"9",,terminal_output +5785,3674624,"TERMINAL",0,0,"40",,terminal_output +5786,3675644,"TERMINAL",0,0,"1",,terminal_output +5787,3676669,"TERMINAL",0,0,"2",,terminal_output +5788,3677690,"TERMINAL",0,0,"3",,terminal_output +5789,3678716,"TERMINAL",0,0,"4",,terminal_output +5790,3679745,"TERMINAL",0,0,"5",,terminal_output +5791,3680481,"TERMINAL",0,0,"salloc: Job 3554157 has exceeded its time limit and its allocation has been revoked.\n",,terminal_output +5792,3680542,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3554157.interactive ON hkn0401 CANCELLED AT 2025-10-08T23:38:46 DUE TO TIME LIMIT ***\r\n",,terminal_output +5793,3680767,"TERMINAL",0,0,"6",,terminal_output +5794,3681787,"TERMINAL",0,0,"7",,terminal_output +5795,3682807,"TERMINAL",0,0,"8",,terminal_output +5796,3683878,"TERMINAL",0,0,"9",,terminal_output +5797,3684988,"TERMINAL",0,0,"50",,terminal_output +5798,3686089,"TERMINAL",0,0,"1",,terminal_output +5799,3686990,"TERMINAL",0,0,"2",,terminal_output +5800,3688014,"TERMINAL",0,0,"3",,terminal_output +5801,3688941,"TERMINAL",0,0,"4",,terminal_output +5802,3689976,"TERMINAL",0,0,"5",,terminal_output +5803,3690987,"TERMINAL",0,0,"6",,terminal_output +5804,3692012,"TERMINAL",0,0,"7",,terminal_output +5805,3693020,"TERMINAL",0,0,"8",,terminal_output +5806,3694038,"TERMINAL",0,0,"9",,terminal_output +5807,3695060,"TERMINAL",0,0,"9:00",,terminal_output +5808,3696100,"TERMINAL",0,0,"1",,terminal_output +5809,3697130,"TERMINAL",0,0,"2",,terminal_output +5810,3698119,"TERMINAL",0,0,"3",,terminal_output +5811,3699152,"TERMINAL",0,0,"4",,terminal_output +5812,3700158,"TERMINAL",0,0,"5",,terminal_output +5813,3701208,"TERMINAL",0,0,"6",,terminal_output +5814,3702210,"TERMINAL",0,0,"7",,terminal_output +5815,3703315,"TERMINAL",0,0,"8",,terminal_output +5816,3704261,"TERMINAL",0,0,"9",,terminal_output +5817,3705272,"TERMINAL",0,0,"10",,terminal_output +5818,3706291,"TERMINAL",0,0,"1",,terminal_output +5819,3707412,"TERMINAL",0,0,"2",,terminal_output +5820,3708553,"TERMINAL",0,0,"3",,terminal_output +5821,3709409,"TERMINAL",0,0,"4",,terminal_output +5822,3710389,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Killed\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +5823,3710564,"TERMINAL",0,0,"5",,terminal_output +5824,3711609,"TERMINAL",0,0,"6",,terminal_output +5825,3712505,"TERMINAL",0,0,"8",,terminal_output +5826,3713531,"TERMINAL",0,0,"9",,terminal_output +5827,3714594,"TERMINAL",0,0,"20",,terminal_output +5828,3715590,"TERMINAL",0,0,"1",,terminal_output +5829,3716591,"TERMINAL",0,0,"2",,terminal_output +5830,3717661,"TERMINAL",0,0,"3",,terminal_output +5831,3718638,"TERMINAL",0,0,"4",,terminal_output +5832,3719661,"TERMINAL",0,0,"5",,terminal_output +5833,3720678,"TERMINAL",0,0,"6",,terminal_output +5834,3721697,"TERMINAL",0,0,"7",,terminal_output +5835,3722728,"TERMINAL",0,0,"8",,terminal_output +5836,3723761,"TERMINAL",0,0,"9",,terminal_output +5837,3724785,"TERMINAL",0,0,"30",,terminal_output +5838,3725804,"TERMINAL",0,0,"1",,terminal_output +5839,3726967,"TERMINAL",0,0,"2",,terminal_output +5840,3727865,"TERMINAL",0,0,"3",,terminal_output +5841,3728843,"TERMINAL",0,0,"4",,terminal_output +5842,3729862,"TERMINAL",0,0,"5",,terminal_output +5843,3730912,"TERMINAL",0,0,"6",,terminal_output +5844,3731900,"TERMINAL",0,0,"7",,terminal_output +5845,3732939,"TERMINAL",0,0,"8",,terminal_output +5846,3734012,"TERMINAL",0,0,"9",,terminal_output +5847,3734972,"TERMINAL",0,0,"40",,terminal_output +5848,3736110,"TERMINAL",0,0,"1",,terminal_output +5849,3737226,"TERMINAL",0,0,"2",,terminal_output +5850,3738265,"TERMINAL",0,0,"3",,terminal_output +5851,3739215,"TERMINAL",0,0,"4",,terminal_output +5852,3740220,"TERMINAL",0,0,"5",,terminal_output +5853,3741336,"TERMINAL",0,0,"6",,terminal_output +5854,3742363,"TERMINAL",0,0,"7",,terminal_output +5855,3743332,"TERMINAL",0,0,"8",,terminal_output +5856,3744482,"TERMINAL",0,0,"9",,terminal_output +5857,3745425,"TERMINAL",0,0,"50",,terminal_output +5858,3746480,"TERMINAL",0,0,"1",,terminal_output +5859,3747462,"TERMINAL",0,0,"2",,terminal_output +5860,3748540,"TERMINAL",0,0,"3",,terminal_output +5861,3749344,"TERMINAL",0,0,"4",,terminal_output +5862,3750345,"TERMINAL",0,0,"5",,terminal_output +5863,3751434,"TERMINAL",0,0,"6",,terminal_output +5864,3752380,"TERMINAL",0,0,"7",,terminal_output +5865,3753399,"TERMINAL",0,0,"8",,terminal_output +5866,3754423,"TERMINAL",0,0,"9",,terminal_output +5867,3755446,"TERMINAL",0,0,"40:00",,terminal_output +5868,3756466,"TERMINAL",0,0,"1",,terminal_output +5869,3757493,"TERMINAL",0,0,"2",,terminal_output +5870,3758577,"TERMINAL",0,0,"3",,terminal_output +5871,3759544,"TERMINAL",0,0,"5",,terminal_output +5872,3760653,"TERMINAL",0,0,"6",,terminal_output +5873,3761566,"TERMINAL",0,0,"7",,terminal_output +5874,3762578,"TERMINAL",0,0,"8",,terminal_output +5875,3763608,"TERMINAL",0,0,"9",,terminal_output +5876,3764703,"TERMINAL",0,0,"10",,terminal_output +5877,3765739,"TERMINAL",0,0,"1",,terminal_output +5878,3767041,"TERMINAL",0,0,"2",,terminal_output +5879,3767744,"TERMINAL",0,0,"3",,terminal_output +5880,3768701,"TERMINAL",0,0,"4",,terminal_output +5881,3769726,"TERMINAL",0,0,"5",,terminal_output +5882,3770738,"TERMINAL",0,0,"6",,terminal_output +5883,3771757,"TERMINAL",0,0,"7",,terminal_output +5884,3772766,"TERMINAL",0,0,"8",,terminal_output +5885,3773790,"TERMINAL",0,0,"9",,terminal_output +5886,3774808,"TERMINAL",0,0,"20",,terminal_output +5887,3775842,"TERMINAL",0,0,"1",,terminal_output +5888,3776852,"TERMINAL",0,0,"2",,terminal_output +5889,3777866,"TERMINAL",0,0,"3",,terminal_output +5890,3778950,"TERMINAL",0,0,"4",,terminal_output +5891,3780843,"TERMINAL",0,0,"5",,terminal_output +5892,3781252,"TERMINAL",0,0,"6",,terminal_output +5893,3782332,"TERMINAL",0,0,"7",,terminal_output +5894,3783156,"TERMINAL",0,0,"8",,terminal_output +5895,3784219,"TERMINAL",0,0,"9",,terminal_output +5896,3785148,"TERMINAL",0,0,"30",,terminal_output +5897,3786254,"TERMINAL",0,0,"1",,terminal_output +5898,3787423,"TERMINAL",0,0,"2",,terminal_output +5899,3788168,"TERMINAL",0,0,"3",,terminal_output +5900,3789199,"TERMINAL",0,0,"4",,terminal_output +5901,3790120,"TERMINAL",0,0,"5",,terminal_output +5902,3791136,"TERMINAL",0,0,"6",,terminal_output +5903,3792159,"TERMINAL",0,0,"7",,terminal_output +5904,3793181,"TERMINAL",0,0,"8",,terminal_output +5905,3794199,"TERMINAL",0,0,"9",,terminal_output +5906,3795219,"TERMINAL",0,0,"40",,terminal_output +5907,3796321,"TERMINAL",0,0,"1",,terminal_output +5908,3797373,"TERMINAL",0,0,"2",,terminal_output +5909,3798715,"TERMINAL",0,0,"3",,terminal_output +5910,3799492,"TERMINAL",0,0,"4",,terminal_output +5911,3800433,"TERMINAL",0,0,"5",,terminal_output +5912,3801426,"TERMINAL",0,0,"6",,terminal_output +5913,3802449,"TERMINAL",0,0,"7",,terminal_output +5914,3803482,"TERMINAL",0,0,"8",,terminal_output +5915,3804490,"TERMINAL",0,0,"9",,terminal_output +5916,3805551,"TERMINAL",0,0,"51",,terminal_output +5917,3806544,"TERMINAL",0,0,"2",,terminal_output +5918,3807555,"TERMINAL",0,0,"3",,terminal_output +5919,3808595,"TERMINAL",0,0,"4",,terminal_output +5920,3809596,"TERMINAL",0,0,"5",,terminal_output +5921,3810623,"TERMINAL",0,0,"6",,terminal_output +5922,3811647,"TERMINAL",0,0,"7",,terminal_output +5923,3812660,"TERMINAL",0,0,"8",,terminal_output +5924,3813680,"TERMINAL",0,0,"9",,terminal_output +5925,3814703,"TERMINAL",0,0,"1:00",,terminal_output +5926,3815731,"TERMINAL",0,0,"1",,terminal_output +5927,3816749,"TERMINAL",0,0,"2",,terminal_output +5928,3817774,"TERMINAL",0,0,"3",,terminal_output +5929,3818800,"TERMINAL",0,0,"4",,terminal_output +5930,3819839,"TERMINAL",0,0,"5",,terminal_output +5931,3820832,"TERMINAL",0,0,"6",,terminal_output +5932,3821852,"TERMINAL",0,0,"7",,terminal_output +5933,3822885,"TERMINAL",0,0,"8",,terminal_output +5934,3823893,"TERMINAL",0,0,"9",,terminal_output +5935,3824948,"TERMINAL",0,0,"10",,terminal_output +5936,3825950,"TERMINAL",0,0,"1",,terminal_output +5937,3826973,"TERMINAL",0,0,"2",,terminal_output +5938,3827997,"TERMINAL",0,0,"3",,terminal_output +5939,3829011,"TERMINAL",0,0,"4",,terminal_output +5940,3830036,"TERMINAL",0,0,"5",,terminal_output +5941,3831076,"TERMINAL",0,0,"6",,terminal_output +5942,3832079,"TERMINAL",0,0,"7",,terminal_output +5943,3833107,"TERMINAL",0,0,"8",,terminal_output +5944,3834174,"TERMINAL",0,0,"9",,terminal_output +5945,3835238,"TERMINAL",0,0,"20",,terminal_output +5946,3836220,"TERMINAL",0,0,"1",,terminal_output +5947,3837280,"TERMINAL",0,0,"2",,terminal_output +5948,3838415,"TERMINAL",0,0,"3",,terminal_output +5949,3839424,"TERMINAL",0,0,"4",,terminal_output +5950,3840385,"TERMINAL",0,0,"5",,terminal_output +5951,3841325,"TERMINAL",0,0,"6",,terminal_output +5952,3842513,"TERMINAL",0,0,"7",,terminal_output +5953,3843605,"TERMINAL",0,0,"8",,terminal_output +5954,3844533,"TERMINAL",0,0,"9",,terminal_output +5955,3845477,"TERMINAL",0,0,"30",,terminal_output +5956,3846520,"TERMINAL",0,0,"1",,terminal_output +5957,3847613,"TERMINAL",0,0,"2",,terminal_output +5958,3848722,"TERMINAL",0,0,"3",,terminal_output +5959,3849558,"TERMINAL",0,0,"4",,terminal_output +5960,3850516,"TERMINAL",0,0,"6",,terminal_output +5961,3851535,"TERMINAL",0,0,"7",,terminal_output +5962,3852578,"TERMINAL",0,0,"8",,terminal_output +5963,3853861,"TERMINAL",0,0,"9",,terminal_output +5964,3854828,"TERMINAL",0,0,"40",,terminal_output +5965,3856027,"TERMINAL",0,0,"1",,terminal_output +5966,3856892,"TERMINAL",0,0,"2",,terminal_output +5967,3857949,"TERMINAL",0,0,"3",,terminal_output +5968,3858781,"TERMINAL",0,0,"4",,terminal_output +5969,3859772,"TERMINAL",0,0,"5",,terminal_output +5970,3860791,"TERMINAL",0,0,"6",,terminal_output +5971,3861814,"TERMINAL",0,0,"7",,terminal_output +5972,3862854,"TERMINAL",0,0,"8",,terminal_output +5973,3863908,"TERMINAL",0,0,"9",,terminal_output +5974,3864916,"TERMINAL",0,0,"50",,terminal_output +5975,3865969,"TERMINAL",0,0,"1",,terminal_output +5976,3867170,"TERMINAL",0,0,"2",,terminal_output +5977,3868274,"TERMINAL",0,0,"3",,terminal_output +5978,3869039,"TERMINAL",0,0,"4",,terminal_output +5979,3870089,"TERMINAL",0,0,"5",,terminal_output +5980,3871051,"TERMINAL",0,0,"6",,terminal_output +5981,3872091,"TERMINAL",0,0,"7",,terminal_output +5982,3873112,"TERMINAL",0,0,"8",,terminal_output +5983,3874138,"TERMINAL",0,0,"9",,terminal_output +5984,3875171,"TERMINAL",0,0,"2:00",,terminal_output +5985,3876175,"TERMINAL",0,0,"1",,terminal_output +5986,3877191,"TERMINAL",0,0,"2",,terminal_output +5987,3878215,"TERMINAL",0,0,"3",,terminal_output +5988,3879288,"TERMINAL",0,0,"4",,terminal_output +5989,3880273,"TERMINAL",0,0,"5",,terminal_output +5990,3881291,"TERMINAL",0,0,"6",,terminal_output +5991,3882313,"TERMINAL",0,0,"7",,terminal_output +5992,3883342,"TERMINAL",0,0,"8",,terminal_output +5993,3884351,"TERMINAL",0,0,"9",,terminal_output +5994,3885417,"TERMINAL",0,0,"10",,terminal_output +5995,3886397,"TERMINAL",0,0,"1",,terminal_output +5996,3887417,"TERMINAL",0,0,"2",,terminal_output +5997,3888449,"TERMINAL",0,0,"3",,terminal_output +5998,3889457,"TERMINAL",0,0,"4",,terminal_output +5999,3890476,"TERMINAL",0,0,"5",,terminal_output +6000,3891499,"TERMINAL",0,0,"7",,terminal_output +6001,3892525,"TERMINAL",0,0,"8",,terminal_output +6002,3893611,"TERMINAL",0,0,"9",,terminal_output +6003,3894597,"TERMINAL",0,0,"20",,terminal_output +6004,3895627,"TERMINAL",0,0,"1",,terminal_output +6005,3896668,"TERMINAL",0,0,"2",,terminal_output +6006,3897832,"TERMINAL",0,0,"3",,terminal_output +6007,3899060,"TERMINAL",0,0,"4",,terminal_output +6008,3900001,"TERMINAL",0,0,"5",,terminal_output +6009,3901053,"TERMINAL",0,0,"6",,terminal_output +6010,3902019,"TERMINAL",0,0,"7",,terminal_output +6011,3903049,"TERMINAL",0,0,"8",,terminal_output +6012,3903923,"TERMINAL",0,0,"9",,terminal_output +6013,3904911,"TERMINAL",0,0,"30",,terminal_output +6014,3906035,"TERMINAL",0,0,"1",,terminal_output +6015,3907068,"TERMINAL",0,0,"2",,terminal_output +6016,3908019,"TERMINAL",0,0,"3",,terminal_output +6017,3909044,"TERMINAL",0,0,"4",,terminal_output +6018,3909968,"TERMINAL",0,0,"5",,terminal_output +6019,3910969,"TERMINAL",0,0,"6",,terminal_output +6020,3911991,"TERMINAL",0,0,"7",,terminal_output +6021,3913013,"TERMINAL",0,0,"8",,terminal_output +6022,3914034,"TERMINAL",0,0,"9",,terminal_output +6023,3915056,"TERMINAL",0,0,"40",,terminal_output +6024,3916077,"TERMINAL",0,0,"1",,terminal_output +6025,3917095,"TERMINAL",0,0,"2",,terminal_output +6026,3918117,"TERMINAL",0,0,"3",,terminal_output +6027,3919203,"TERMINAL",0,0,"4",,terminal_output +6028,3920168,"TERMINAL",0,0,"5",,terminal_output +6029,3921183,"TERMINAL",0,0,"6",,terminal_output +6030,3922206,"TERMINAL",0,0,"7",,terminal_output +6031,3923223,"TERMINAL",0,0,"8",,terminal_output +6032,3924245,"TERMINAL",0,0,"9",,terminal_output +6033,3925372,"TERMINAL",0,0,"50",,terminal_output +6034,3926323,"TERMINAL",0,0,"1",,terminal_output +6035,3927587,"TERMINAL",0,0,"2",,terminal_output +6036,3928457,"TERMINAL",0,0,"3",,terminal_output +6037,3929553,"TERMINAL",0,0,"4",,terminal_output +6038,3930444,"TERMINAL",0,0,"5",,terminal_output +6039,3931692,"TERMINAL",0,0,"6",,terminal_output +6040,3932640,"TERMINAL",0,0,"7",,terminal_output +6041,3933466,"TERMINAL",0,0,"8",,terminal_output +6042,3934526,"TERMINAL",0,0,"9",,terminal_output +6043,3935582,"TERMINAL",0,0,"3:01",,terminal_output +6044,3936566,"TERMINAL",0,0,"2",,terminal_output +6045,3937651,"TERMINAL",0,0,"3",,terminal_output +6046,3938717,"TERMINAL",0,0,"4",,terminal_output +6047,3939715,"TERMINAL",0,0,"5",,terminal_output +6048,3940797,"TERMINAL",0,0,"6",,terminal_output +6049,3941767,"TERMINAL",0,0,"7",,terminal_output +6050,3942856,"TERMINAL",0,0,"8",,terminal_output +6051,3943858,"TERMINAL",0,0,"9",,terminal_output +6052,3944831,"TERMINAL",0,0,"10",,terminal_output +6053,3945847,"TERMINAL",0,0,"1",,terminal_output +6054,3946871,"TERMINAL",0,0,"2",,terminal_output +6055,3947889,"TERMINAL",0,0,"3",,terminal_output +6056,3948926,"TERMINAL",0,0,"4",,terminal_output +6057,3950022,"TERMINAL",0,0,"5",,terminal_output +6058,3951036,"TERMINAL",0,0,"6",,terminal_output +6059,3952069,"TERMINAL",0,0,"7",,terminal_output +6060,3953082,"TERMINAL",0,0,"8",,terminal_output +6061,3954099,"TERMINAL",0,0,"9",,terminal_output +6062,3955121,"TERMINAL",0,0,"20",,terminal_output +6063,3956152,"TERMINAL",0,0,"1",,terminal_output +6064,3957170,"TERMINAL",0,0,"2",,terminal_output +6065,3958203,"TERMINAL",0,0,"3",,terminal_output +6066,3959251,"TERMINAL",0,0,"4",,terminal_output +6067,3960244,"TERMINAL",0,0,"5",,terminal_output +6068,3961453,"TERMINAL",0,0,"6",,terminal_output +6069,3962266,"TERMINAL",0,0,"7",,terminal_output +6070,3963291,"TERMINAL",0,0,"8",,terminal_output +6071,3964345,"TERMINAL",0,0,"9",,terminal_output +6072,3965360,"TERMINAL",0,0,"30",,terminal_output +6073,3966395,"TERMINAL",0,0,"1",,terminal_output +6074,3967407,"TERMINAL",0,0,"2",,terminal_output +6075,3968440,"TERMINAL",0,0,"3",,terminal_output +6076,3969451,"TERMINAL",0,0,"4",,terminal_output +6077,3970484,"TERMINAL",0,0,"5",,terminal_output +6078,3971514,"TERMINAL",0,0,"6",,terminal_output +6079,3972545,"TERMINAL",0,0,"8",,terminal_output +6080,3973554,"TERMINAL",0,0,"9",,terminal_output +6081,3974555,"TERMINAL",0,0,"40",,terminal_output +6082,3975574,"TERMINAL",0,0,"1",,terminal_output +6083,3976594,"TERMINAL",0,0,"2",,terminal_output +6084,3977614,"TERMINAL",0,0,"3",,terminal_output +6085,3978696,"TERMINAL",0,0,"4",,terminal_output +6086,3979718,"TERMINAL",0,0,"5",,terminal_output +6087,3980733,"TERMINAL",0,0,"6",,terminal_output +6088,3981768,"TERMINAL",0,0,"7",,terminal_output +6089,3982776,"TERMINAL",0,0,"8",,terminal_output +6090,3983798,"TERMINAL",0,0,"9",,terminal_output +6091,3984819,"TERMINAL",0,0,"50",,terminal_output +6092,3985846,"TERMINAL",0,0,"1",,terminal_output +6093,3986861,"TERMINAL",0,0,"2",,terminal_output +6094,3987890,"TERMINAL",0,0,"3",,terminal_output +6095,3988914,"TERMINAL",0,0,"4",,terminal_output +6096,3989918,"TERMINAL",0,0,"5",,terminal_output +6097,3990939,"TERMINAL",0,0,"6",,terminal_output +6098,3992000,"TERMINAL",0,0,"7",,terminal_output +6099,3993008,"TERMINAL",0,0,"8",,terminal_output +6100,3994005,"TERMINAL",0,0,"9",,terminal_output +6101,3995071,"TERMINAL",0,0,"4:00",,terminal_output +6102,3996050,"TERMINAL",0,0,"1",,terminal_output +6103,3997073,"TERMINAL",0,0,"2",,terminal_output +6104,3998128,"TERMINAL",0,0,"3",,terminal_output +6105,3999124,"TERMINAL",0,0,"4",,terminal_output +6106,4000135,"TERMINAL",0,0,"5",,terminal_output +6107,4001163,"TERMINAL",0,0,"6",,terminal_output +6108,4002179,"TERMINAL",0,0,"7",,terminal_output +6109,4003234,"TERMINAL",0,0,"8",,terminal_output +6110,4004694,"TERMINAL",0,0,"9",,terminal_output +6111,4005460,"TERMINAL",0,0,"10",,terminal_output +6112,4006384,"TERMINAL",0,0,"1",,terminal_output +6113,4007293,"TERMINAL",0,0,"2",,terminal_output +6114,4008319,"TERMINAL",0,0,"3",,terminal_output +6115,4009328,"TERMINAL",0,0,"4",,terminal_output +6116,4010352,"TERMINAL",0,0,"5",,terminal_output +6117,4011377,"TERMINAL",0,0,"6",,terminal_output +6118,4012417,"TERMINAL",0,0,"7",,terminal_output +6119,4013562,"TERMINAL",0,0,"8",,terminal_output +6120,4014590,"TERMINAL",0,0,"9",,terminal_output +6121,4015478,"TERMINAL",0,0,"20",,terminal_output +6122,4016569,"TERMINAL",0,0,"1",,terminal_output +6123,4017696,"TERMINAL",0,0,"3",,terminal_output +6124,4018549,"TERMINAL",0,0,"4",,terminal_output +6125,4019554,"TERMINAL",0,0,"5",,terminal_output +6126,4020570,"TERMINAL",0,0,"6",,terminal_output +6127,4021596,"TERMINAL",0,0,"7",,terminal_output +6128,4022617,"TERMINAL",0,0,"8",,terminal_output +6129,4023708,"TERMINAL",0,0,"9",,terminal_output +6130,4024701,"TERMINAL",0,0,"30",,terminal_output +6131,4025724,"TERMINAL",0,0,"1",,terminal_output +6132,4026769,"TERMINAL",0,0,"2",,terminal_output +6133,4027780,"TERMINAL",0,0,"3",,terminal_output +6134,4029039,"TERMINAL",0,0,"4",,terminal_output +6135,4030102,"TERMINAL",0,0,"5",,terminal_output +6136,4031009,"TERMINAL",0,0,"6",,terminal_output +6137,4031902,"TERMINAL",0,0,"7",,terminal_output +6138,4032864,"TERMINAL",0,0,"8",,terminal_output +6139,4033890,"TERMINAL",0,0,"9",,terminal_output +6140,4034917,"TERMINAL",0,0,"40",,terminal_output +6141,4035939,"TERMINAL",0,0,"1",,terminal_output +6142,4036956,"TERMINAL",0,0,"2",,terminal_output +6143,4038018,"TERMINAL",0,0,"3",,terminal_output +6144,4039037,"TERMINAL",0,0,"4",,terminal_output +6145,4040041,"TERMINAL",0,0,"5",,terminal_output +6146,4041088,"TERMINAL",0,0,"6",,terminal_output +6147,4042109,"TERMINAL",0,0,"7",,terminal_output +6148,4043120,"TERMINAL",0,0,"8",,terminal_output +6149,4044203,"TERMINAL",0,0,"9",,terminal_output +6150,4045235,"TERMINAL",0,0,"50",,terminal_output +6151,4046217,"TERMINAL",0,0,"1",,terminal_output +6152,4047218,"TERMINAL",0,0,"2",,terminal_output +6153,4048240,"TERMINAL",0,0,"3",,terminal_output +6154,4049242,"TERMINAL",0,0,"4",,terminal_output +6155,4050272,"TERMINAL",0,0,"5",,terminal_output +6156,4051570,"TERMINAL",0,0,"6",,terminal_output +6157,4052483,"TERMINAL",0,0,"7",,terminal_output +6158,4053466,"TERMINAL",0,0,"8",,terminal_output +6159,4054521,"TERMINAL",0,0,"9",,terminal_output +6160,4055427,"TERMINAL",0,0,"5:00",,terminal_output +6161,4056447,"TERMINAL",0,0,"1",,terminal_output +6162,4057473,"TERMINAL",0,0,"2",,terminal_output +6163,4058727,"TERMINAL",0,0,"3",,terminal_output +6164,4059699,"TERMINAL",0,0,"5",,terminal_output +6165,4060642,"TERMINAL",0,0,"6",,terminal_output +6166,4061638,"TERMINAL",0,0,"7",,terminal_output +6167,4062645,"TERMINAL",0,0,"8",,terminal_output +6168,4063667,"TERMINAL",0,0,"9",,terminal_output +6169,4064703,"TERMINAL",0,0,"10",,terminal_output +6170,4065713,"TERMINAL",0,0,"1",,terminal_output +6171,4066736,"TERMINAL",0,0,"2",,terminal_output +6172,4067769,"TERMINAL",0,0,"3",,terminal_output +6173,4068791,"TERMINAL",0,0,"4",,terminal_output +6174,4069815,"TERMINAL",0,0,"5",,terminal_output +6175,4070837,"TERMINAL",0,0,"6",,terminal_output +6176,4071855,"TERMINAL",0,0,"7",,terminal_output +6177,4072863,"TERMINAL",0,0,"8",,terminal_output +6178,4073956,"TERMINAL",0,0,"9",,terminal_output +6179,4075102,"TERMINAL",0,0,"20",,terminal_output +6180,4076027,"TERMINAL",0,0,"1",,terminal_output +6181,4077208,"TERMINAL",0,0,"2",,terminal_output +6182,4078364,"TERMINAL",0,0,"3",,terminal_output +6183,4079151,"TERMINAL",0,0,"4",,terminal_output +6184,4080167,"TERMINAL",0,0,"5",,terminal_output +6185,4081336,"TERMINAL",0,0,"6",,terminal_output +6186,4082258,"TERMINAL",0,0,"7",,terminal_output +6187,4083391,"TERMINAL",0,0,"8",,terminal_output +6188,4084561,"TERMINAL",0,0,"9",,terminal_output +6189,4085275,"TERMINAL",0,0,"30",,terminal_output +6190,4086147,"TERMINAL",0,0,"1",,terminal_output +6191,4087167,"TERMINAL",0,0,"2",,terminal_output +6192,4088208,"TERMINAL",0,0,"3",,terminal_output +6193,4089231,"TERMINAL",0,0,"4",,terminal_output +6194,4090257,"TERMINAL",0,0,"5",,terminal_output +6195,4091263,"TERMINAL",0,0,"6",,terminal_output +6196,4092346,"TERMINAL",0,0,"7",,terminal_output +6197,4093414,"TERMINAL",0,0,"8",,terminal_output +6198,4094313,"TERMINAL",0,0,"9",,terminal_output +6199,4095336,"TERMINAL",0,0,"40",,terminal_output +6200,4096357,"TERMINAL",0,0,"1",,terminal_output +6201,4097502,"TERMINAL",0,0,"2",,terminal_output +6202,4098449,"TERMINAL",0,0,"3",,terminal_output +6203,4099466,"TERMINAL",0,0,"4",,terminal_output +6204,4100489,"TERMINAL",0,0,"5",,terminal_output +6205,4101510,"TERMINAL",0,0,"7",,terminal_output +6206,4102527,"TERMINAL",0,0,"8",,terminal_output +6207,4103549,"TERMINAL",0,0,"9",,terminal_output +6208,4104638,"TERMINAL",0,0,"50",,terminal_output +6209,4105587,"TERMINAL",0,0,"1",,terminal_output +6210,4106753,"TERMINAL",0,0,"2",,terminal_output +6211,4107631,"TERMINAL",0,0,"3",,terminal_output +6212,4108654,"TERMINAL",0,0,"4",,terminal_output +6213,4109727,"TERMINAL",0,0,"5",,terminal_output +6214,4110691,"TERMINAL",0,0,"6",,terminal_output +6215,4111709,"TERMINAL",0,0,"7",,terminal_output +6216,4112768,"TERMINAL",0,0,"8",,terminal_output +6217,4113753,"TERMINAL",0,0,"9",,terminal_output +6218,4114820,"TERMINAL",0,0,"6:00",,terminal_output +6219,4115799,"TERMINAL",0,0,"1",,terminal_output +6220,4117100,"TERMINAL",0,0,"2",,terminal_output +6221,4117839,"TERMINAL",0,0,"3",,terminal_output +6222,4118864,"TERMINAL",0,0,"4",,terminal_output +6223,4119882,"TERMINAL",0,0,"5",,terminal_output +6224,4120954,"TERMINAL",0,0,"6",,terminal_output +6225,4121929,"TERMINAL",0,0,"7",,terminal_output +6226,4122998,"TERMINAL",0,0,"8",,terminal_output +6227,4124252,"TERMINAL",0,0,"9",,terminal_output +6228,4125194,"TERMINAL",0,0,"10",,terminal_output +6229,4126098,"TERMINAL",0,0,"1",,terminal_output +6230,4127047,"TERMINAL",0,0,"2",,terminal_output +6231,4128100,"TERMINAL",0,0,"3",,terminal_output +6232,4129087,"TERMINAL",0,0,"4",,terminal_output +6233,4130103,"TERMINAL",0,0,"5",,terminal_output +6234,4131122,"TERMINAL",0,0,"6",,terminal_output +6235,4132162,"TERMINAL",0,0,"7",,terminal_output +6236,4133179,"TERMINAL",0,0,"8",,terminal_output +6237,4134186,"TERMINAL",0,0,"9",,terminal_output +6238,4135226,"TERMINAL",0,0,"20",,terminal_output +6239,4136260,"TERMINAL",0,0,"1",,terminal_output +6240,4137323,"TERMINAL",0,0,"2",,terminal_output +6241,4138439,"TERMINAL",0,0,"3",,terminal_output +6242,4139557,"TERMINAL",0,0,"4",,terminal_output +6243,4140355,"TERMINAL",0,0,"5",,terminal_output +6244,4141397,"TERMINAL",0,0,"6",,terminal_output +6245,4142445,"TERMINAL",0,0,"7",,terminal_output +6246,4143446,"TERMINAL",0,0,"8",,terminal_output +6247,4144514,"TERMINAL",0,0,"9",,terminal_output +6248,4145499,"TERMINAL",0,0,"30",,terminal_output +6249,4146521,"TERMINAL",0,0,"2",,terminal_output +6250,4147861,"TERMINAL",0,0,"3",,terminal_output +6251,4148564,"TERMINAL",0,0,"4",,terminal_output +6252,4149584,"TERMINAL",0,0,"5",,terminal_output +6253,4150605,"TERMINAL",0,0,"6",,terminal_output +6254,4151633,"TERMINAL",0,0,"7",,terminal_output +6255,4152650,"TERMINAL",0,0,"8",,terminal_output +6256,4153670,"TERMINAL",0,0,"9",,terminal_output +6257,4154690,"TERMINAL",0,0,"40",,terminal_output +6258,4155712,"TERMINAL",0,0,"1",,terminal_output +6259,4156731,"TERMINAL",0,0,"2",,terminal_output +6260,4157752,"TERMINAL",0,0,"3",,terminal_output +6261,4158773,"TERMINAL",0,0,"4",,terminal_output +6262,4159866,"TERMINAL",0,0,"5",,terminal_output +6263,4160884,"TERMINAL",0,0,"6",,terminal_output +6264,4162189,"TERMINAL",0,0,"7",,terminal_output +6265,4162944,"TERMINAL",0,0,"8",,terminal_output +6266,4163946,"TERMINAL",0,0,"9",,terminal_output +6267,4164966,"TERMINAL",0,0,"50",,terminal_output +6268,4165984,"TERMINAL",0,0,"1",,terminal_output +6269,4167066,"TERMINAL",0,0,"2",,terminal_output +6270,4168032,"TERMINAL",0,0,"3",,terminal_output +6271,4169224,"TERMINAL",0,0,"4",,terminal_output +6272,4170106,"TERMINAL",0,0,"5",,terminal_output +6273,4171256,"TERMINAL",0,0,"6",,terminal_output +6274,4172172,"TERMINAL",0,0,"7",,terminal_output +6275,4173213,"TERMINAL",0,0,"8",,terminal_output +6276,4174168,"TERMINAL",0,0,"9",,terminal_output +6277,4175171,"TERMINAL",0,0,"7:00",,terminal_output +6278,4176226,"TERMINAL",0,0,"1",,terminal_output +6279,4177209,"TERMINAL",0,0,"2",,terminal_output +6280,4178228,"TERMINAL",0,0,"3",,terminal_output +6281,4179375,"TERMINAL",0,0,"4",,terminal_output +6282,4180400,"TERMINAL",0,0,"5",,terminal_output +6283,4181431,"TERMINAL",0,0,"6",,terminal_output +6284,4182456,"TERMINAL",0,0,"7",,terminal_output +6285,4183478,"TERMINAL",0,0,"8",,terminal_output +6286,4184518,"TERMINAL",0,0,"9",,terminal_output +6287,4185647,"TERMINAL",0,0,"10",,terminal_output +6288,4186560,"TERMINAL",0,0,"2",,terminal_output +6289,4187536,"TERMINAL",0,0,"3",,terminal_output +6290,4188653,"TERMINAL",0,0,"4",,terminal_output +6291,4189789,"TERMINAL",0,0,"5",,terminal_output +6292,4190710,"TERMINAL",0,0,"6",,terminal_output +6293,4191749,"TERMINAL",0,0,"7",,terminal_output +6294,4192695,"TERMINAL",0,0,"8",,terminal_output +6295,4193740,"TERMINAL",0,0,"9",,terminal_output +6296,4194781,"TERMINAL",0,0,"20",,terminal_output +6297,4195868,"TERMINAL",0,0,"1",,terminal_output +6298,4196829,"TERMINAL",0,0,"2",,terminal_output +6299,4197964,"TERMINAL",0,0,"3",,terminal_output +6300,4198952,"TERMINAL",0,0,"4",,terminal_output +6301,4200094,"TERMINAL",0,0,"5",,terminal_output +6302,4201039,"TERMINAL",0,0,"6",,terminal_output +6303,4202159,"TERMINAL",0,0,"7",,terminal_output +6304,4203083,"TERMINAL",0,0,"8",,terminal_output +6305,4204028,"TERMINAL",0,0,"9",,terminal_output +6306,4204984,"TERMINAL",0,0,"30",,terminal_output +6307,4206011,"TERMINAL",0,0,"1",,terminal_output +6308,4206960,"TERMINAL",0,0,"2",,terminal_output +6309,4207954,"TERMINAL",0,0,"3",,terminal_output +6310,4208986,"TERMINAL",0,0,"4",,terminal_output +6311,4210017,"TERMINAL",0,0,"5",,terminal_output +6312,4211133,"TERMINAL",0,0,"6",,terminal_output +6313,4212152,"TERMINAL",0,0,"7",,terminal_output +6314,4213127,"TERMINAL",0,0,"8",,terminal_output +6315,4214421,"TERMINAL",0,0,"9",,terminal_output +6316,4215364,"TERMINAL",0,0,"40",,terminal_output +6317,4216179,"TERMINAL",0,0,"1",,terminal_output +6318,4217268,"TERMINAL",0,0,"2",,terminal_output +6319,4218167,"TERMINAL",0,0,"3",,terminal_output +6320,4219180,"TERMINAL",0,0,"4",,terminal_output +6321,4220199,"TERMINAL",0,0,"5",,terminal_output +6322,4221218,"TERMINAL",0,0,"6",,terminal_output +6323,4222237,"TERMINAL",0,0,"7",,terminal_output +6324,4223440,"TERMINAL",0,0,"8",,terminal_output +6325,4224281,"TERMINAL",0,0,"9",,terminal_output +6326,4225337,"TERMINAL",0,0,"50",,terminal_output +6327,4226477,"TERMINAL",0,0,"1",,terminal_output +6328,4227422,"TERMINAL",0,0,"2",,terminal_output +6329,4228393,"TERMINAL",0,0,"3",,terminal_output +6330,4229405,"TERMINAL",0,0,"4",,terminal_output +6331,4230406,"TERMINAL",0,0,"5",,terminal_output +6332,4231429,"TERMINAL",0,0,"6",,terminal_output +6333,4232451,"TERMINAL",0,0,"7",,terminal_output +6334,4233540,"TERMINAL",0,0,"8",,terminal_output +6335,4234638,"TERMINAL",0,0,"9",,terminal_output +6336,4235542,"TERMINAL",0,0,"8:01",,terminal_output +6337,4236526,"TERMINAL",0,0,"2",,terminal_output +6338,4237548,"TERMINAL",0,0,"3",,terminal_output +6339,4238607,"TERMINAL",0,0,"4",,terminal_output +6340,4239612,"TERMINAL",0,0,"5",,terminal_output +6341,4240633,"TERMINAL",0,0,"6",,terminal_output +6342,4241658,"TERMINAL",0,0,"7",,terminal_output +6343,4242747,"TERMINAL",0,0,"8",,terminal_output +6344,4243697,"TERMINAL",0,0,"9",,terminal_output +6345,4244720,"TERMINAL",0,0,"10",,terminal_output +6346,4245742,"TERMINAL",0,0,"1",,terminal_output +6347,4246761,"TERMINAL",0,0,"2",,terminal_output +6348,4247785,"TERMINAL",0,0,"3",,terminal_output +6349,4248807,"TERMINAL",0,0,"4",,terminal_output +6350,4249964,"TERMINAL",0,0,"5",,terminal_output +6351,4251127,"TERMINAL",0,0,"6",,terminal_output +6352,4251871,"TERMINAL",0,0,"7",,terminal_output +6353,4252888,"TERMINAL",0,0,"8",,terminal_output +6354,4253908,"TERMINAL",0,0,"9",,terminal_output +6355,4254931,"TERMINAL",0,0,"20",,terminal_output +6356,4255948,"TERMINAL",0,0,"1",,terminal_output +6357,4256969,"TERMINAL",0,0,"2",,terminal_output +6358,4258019,"TERMINAL",0,0,"3",,terminal_output +6359,4259038,"TERMINAL",0,0,"4",,terminal_output +6360,4260070,"TERMINAL",0,0,"5",,terminal_output +6361,4261103,"TERMINAL",0,0,"6",,terminal_output +6362,4262147,"TERMINAL",0,0,"7",,terminal_output +6363,4263205,"TERMINAL",0,0,"8",,terminal_output +6364,4264317,"TERMINAL",0,0,"9",,terminal_output +6365,4265373,"TERMINAL",0,0,"30",,terminal_output +6366,4266300,"TERMINAL",0,0,"1",,terminal_output +6367,4267307,"TERMINAL",0,0,"2",,terminal_output +6368,4268360,"TERMINAL",0,0,"3",,terminal_output +6369,4269374,"TERMINAL",0,0,"4",,terminal_output +6370,4270393,"TERMINAL",0,0,"5",,terminal_output +6371,4271416,"TERMINAL",0,0,"6",,terminal_output +6372,4272441,"TERMINAL",0,0,"7",,terminal_output +6373,4273489,"TERMINAL",0,0,"8",,terminal_output +6374,4274494,"TERMINAL",0,0,"9",,terminal_output +6375,4275508,"TERMINAL",0,0,"41",,terminal_output +6376,4276525,"TERMINAL",0,0,"2",,terminal_output +6377,4277552,"TERMINAL",0,0,"3",,terminal_output +6378,4278580,"TERMINAL",0,0,"4",,terminal_output +6379,4279579,"TERMINAL",0,0,"5",,terminal_output +6380,4280643,"TERMINAL",0,0,"6",,terminal_output +6381,4281656,"TERMINAL",0,0,"7",,terminal_output +6382,4282683,"TERMINAL",0,0,"8",,terminal_output +6383,4283695,"TERMINAL",0,0,"9",,terminal_output +6384,4284777,"TERMINAL",0,0,"50",,terminal_output +6385,4285831,"TERMINAL",0,0,"1",,terminal_output +6386,4286815,"TERMINAL",0,0,"2",,terminal_output +6387,4287959,"TERMINAL",0,0,"3",,terminal_output +6388,4288861,"TERMINAL",0,0,"4",,terminal_output +6389,4289946,"TERMINAL",0,0,"5",,terminal_output +6390,4290905,"TERMINAL",0,0,"6",,terminal_output +6391,4291923,"TERMINAL",0,0,"7",,terminal_output +6392,4292939,"TERMINAL",0,0,"8",,terminal_output +6393,4294004,"TERMINAL",0,0,"9",,terminal_output +6394,4295259,"TERMINAL",0,0,"9:00",,terminal_output +6395,4296167,"TERMINAL",0,0,"1",,terminal_output +6396,4297166,"TERMINAL",0,0,"2",,terminal_output +6397,4298172,"TERMINAL",0,0,"3",,terminal_output +6398,4299111,"TERMINAL",0,0,"4",,terminal_output +6399,4300235,"TERMINAL",0,0,"5",,terminal_output +6400,4301141,"TERMINAL",0,0,"6",,terminal_output +6401,4302134,"TERMINAL",0,0,"7",,terminal_output +6402,4303151,"TERMINAL",0,0,"8",,terminal_output +6403,4304171,"TERMINAL",0,0,"9",,terminal_output +6404,4305191,"TERMINAL",0,0,"10",,terminal_output +6405,4306215,"TERMINAL",0,0,"1",,terminal_output +6406,4307235,"TERMINAL",0,0,"2",,terminal_output +6407,4308252,"TERMINAL",0,0,"3",,terminal_output +6408,4309274,"TERMINAL",0,0,"4",,terminal_output +6409,4310339,"TERMINAL",0,0,"5",,terminal_output +6410,4311317,"TERMINAL",0,0,"6",,terminal_output +6411,4312333,"TERMINAL",0,0,"7",,terminal_output +6412,4313365,"TERMINAL",0,0,"8",,terminal_output +6413,4314397,"TERMINAL",0,0,"9",,terminal_output +6414,4315505,"TERMINAL",0,0,"20",,terminal_output +6415,4316416,"TERMINAL",0,0,"1",,terminal_output +6416,4317492,"TERMINAL",0,0,"2",,terminal_output +6417,4318512,"TERMINAL",0,0,"4",,terminal_output +6418,4319536,"TERMINAL",0,0,"5",,terminal_output +6419,4320557,"TERMINAL",0,0,"6",,terminal_output +6420,4321673,"TERMINAL",0,0,"7",,terminal_output +6421,4322609,"TERMINAL",0,0,"8",,terminal_output +6422,4323814,"TERMINAL",0,0,"9",,terminal_output +6423,4324877,"TERMINAL",0,0,"30",,terminal_output +6424,4325742,"TERMINAL",0,0,"1",,terminal_output +6425,4326698,"TERMINAL",0,0,"2",,terminal_output +6426,4327717,"TERMINAL",0,0,"3",,terminal_output +6427,4328842,"TERMINAL",0,0,"4",,terminal_output +6428,4329872,"TERMINAL",0,0,"5",,terminal_output +6429,4330812,"TERMINAL",0,0,"6",,terminal_output +6430,4331814,"TERMINAL",0,0,"7",,terminal_output +6431,4332810,"TERMINAL",0,0,"8",,terminal_output +6432,4333832,"TERMINAL",0,0,"9",,terminal_output +6433,4334853,"TERMINAL",0,0,"40",,terminal_output +6434,4335887,"TERMINAL",0,0,"1",,terminal_output +6435,4336907,"TERMINAL",0,0,"2",,terminal_output +6436,4337928,"TERMINAL",0,0,"3",,terminal_output +6437,4338948,"TERMINAL",0,0,"4",,terminal_output +6438,4339969,"TERMINAL",0,0,"5",,terminal_output +6439,4340990,"TERMINAL",0,0,"6",,terminal_output +6440,4342009,"TERMINAL",0,0,"7",,terminal_output +6441,4343055,"TERMINAL",0,0,"8",,terminal_output +6442,4344057,"TERMINAL",0,0,"9",,terminal_output +6443,4345072,"TERMINAL",0,0,"50",,terminal_output +6444,4346144,"TERMINAL",0,0,"1",,terminal_output +6445,4347154,"TERMINAL",0,0,"2",,terminal_output +6446,4348170,"TERMINAL",0,0,"3",,terminal_output +6447,4349160,"TERMINAL",0,0,"4",,terminal_output +6448,4350183,"TERMINAL",0,0,"5",,terminal_output +6449,4351205,"TERMINAL",0,0,"6",,terminal_output +6450,4352225,"TERMINAL",0,0,"7",,terminal_output +6451,4353279,"TERMINAL",0,0,"8",,terminal_output +6452,4354310,"TERMINAL",0,0,"9",,terminal_output +6453,4355359,"TERMINAL",0,0,"50:00",,terminal_output +6454,4356323,"TERMINAL",0,0,"1",,terminal_output +6455,4357361,"TERMINAL",0,0,"2",,terminal_output +6456,4358361,"TERMINAL",0,0,"3",,terminal_output +6457,4359385,"TERMINAL",0,0,"4",,terminal_output +6458,4360402,"TERMINAL",0,0,"5",,terminal_output +6459,4361430,"TERMINAL",0,0,"6",,terminal_output +6460,4362459,"TERMINAL",0,0,"7",,terminal_output +6461,4363464,"TERMINAL",0,0,"8",,terminal_output +6462,4364484,"TERMINAL",0,0,"9",,terminal_output +6463,4365508,"TERMINAL",0,0,"11",,terminal_output +6464,4366529,"TERMINAL",0,0,"2",,terminal_output +6465,4367552,"TERMINAL",0,0,"3",,terminal_output +6466,4368567,"TERMINAL",0,0,"4",,terminal_output +6467,4369622,"TERMINAL",0,0,"5",,terminal_output +6468,4370694,"TERMINAL",0,0,"6",,terminal_output +6469,4371632,"TERMINAL",0,0,"7",,terminal_output +6470,4372661,"TERMINAL",0,0,"8",,terminal_output +6471,4373688,"TERMINAL",0,0,"9",,terminal_output +6472,4374700,"TERMINAL",0,0,"20",,terminal_output +6473,4375730,"TERMINAL",0,0,"1",,terminal_output +6474,4376750,"TERMINAL",0,0,"2",,terminal_output +6475,4377787,"TERMINAL",0,0,"3",,terminal_output +6476,4378797,"TERMINAL",0,0,"4",,terminal_output +6477,4379817,"TERMINAL",0,0,"5",,terminal_output +6478,4380835,"TERMINAL",0,0,"6",,terminal_output +6479,4381857,"TERMINAL",0,0,"7",,terminal_output +6480,4382870,"TERMINAL",0,0,"8",,terminal_output +6481,4383889,"TERMINAL",0,0,"9",,terminal_output +6482,4384941,"TERMINAL",0,0,"30",,terminal_output +6483,4385957,"TERMINAL",0,0,"1",,terminal_output +6484,4386980,"TERMINAL",0,0,"2",,terminal_output +6485,4388002,"TERMINAL",0,0,"3",,terminal_output +6486,4389020,"TERMINAL",0,0,"4",,terminal_output +6487,4390043,"TERMINAL",0,0,"5",,terminal_output +6488,4391080,"TERMINAL",0,0,"6",,terminal_output +6489,4392082,"TERMINAL",0,0,"7",,terminal_output +6490,4393108,"TERMINAL",0,0,"8",,terminal_output +6491,4394154,"TERMINAL",0,0,"9",,terminal_output +6492,4395145,"TERMINAL",0,0,"40",,terminal_output +6493,4396164,"TERMINAL",0,0,"1",,terminal_output +6494,4397186,"TERMINAL",0,0,"2",,terminal_output +6495,4398205,"TERMINAL",0,0,"3",,terminal_output +6496,4399229,"TERMINAL",0,0,"4",,terminal_output +6497,4400249,"TERMINAL",0,0,"5",,terminal_output +6498,4401288,"TERMINAL",0,0,"6",,terminal_output +6499,4402409,"TERMINAL",0,0,"7",,terminal_output +6500,4403345,"TERMINAL",0,0,"8",,terminal_output +6501,4404362,"TERMINAL",0,0,"9",,terminal_output +6502,4405436,"TERMINAL",0,0,"50",,terminal_output +6503,4406405,"TERMINAL",0,0,"1",,terminal_output +6504,4407425,"TERMINAL",0,0,"2",,terminal_output +6505,4408452,"TERMINAL",0,0,"3",,terminal_output +6506,4409543,"TERMINAL",0,0,"4",,terminal_output +6507,4410555,"TERMINAL",0,0,"5",,terminal_output +6508,4411511,"TERMINAL",0,0,"7",,terminal_output +6509,4412532,"TERMINAL",0,0,"8",,terminal_output +6510,4413554,"TERMINAL",0,0,"9",,terminal_output +6511,4414674,"TERMINAL",0,0,"1:00",,terminal_output +6512,4415680,"TERMINAL",0,0,"1",,terminal_output +6513,4416614,"TERMINAL",0,0,"2",,terminal_output +6514,4417629,"TERMINAL",0,0,"3",,terminal_output +6515,4418652,"TERMINAL",0,0,"4",,terminal_output +6516,4419711,"TERMINAL",0,0,"5",,terminal_output +6517,4420789,"TERMINAL",0,0,"6",,terminal_output +6518,4421721,"TERMINAL",0,0,"7",,terminal_output +6519,4422742,"TERMINAL",0,0,"8",,terminal_output +6520,4423764,"TERMINAL",0,0,"9",,terminal_output +6521,4424786,"TERMINAL",0,0,"10",,terminal_output +6522,4425805,"TERMINAL",0,0,"1",,terminal_output +6523,4426822,"TERMINAL",0,0,"2",,terminal_output +6524,4427854,"TERMINAL",0,0,"3",,terminal_output +6525,4428876,"TERMINAL",0,0,"4",,terminal_output +6526,4429888,"TERMINAL",0,0,"5",,terminal_output +6527,4430905,"TERMINAL",0,0,"6",,terminal_output +6528,4431922,"TERMINAL",0,0,"7",,terminal_output +6529,4432943,"TERMINAL",0,0,"8",,terminal_output +6530,4433995,"TERMINAL",0,0,"9",,terminal_output +6531,4435017,"TERMINAL",0,0,"20",,terminal_output +6532,4436036,"TERMINAL",0,0,"1",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2f5e552b-d86c-4a34-a644-139d05fcf0731753100718217-2025_07_21-14.25.46.738/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2f5e552b-d86c-4a34-a644-139d05fcf0731753100718217-2025_07_21-14.25.46.738/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..492dcf975e1481bf3c2151c3db954eabaeb2303d --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2f5e552b-d86c-4a34-a644-139d05fcf0731753100718217-2025_07_21-14.25.46.738/source.csv @@ -0,0 +1,3662 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,6,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,tab +2,182,"models/dynamics.py",0,0,"",python,tab +3,273,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:25:46 PM [info] Activating crowd-code\n2:25:46 PM [info] Recording started\n2:25:46 PM [info] Initializing git provider using file system watchers...\n2:25:46 PM [info] Git repository found\n2:25:46 PM [info] Git provider initialized successfully\n",Log,tab +4,346,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"2:25:46 PM [info] Initial git state: [object Object]\n",Log,content +5,1074,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +6,1395,"models/tokenizer.py",0,0,"",python,tab +7,2027,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # videos = np.load(""overfit_dir/corner_8repl.npy"")\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +8,2425,"train_dynamics.py",0,0,"",python,tab +9,9749,"train_dynamics.py",12101,0,"",python,selection_mouse +10,9752,"train_dynamics.py",12100,0,"",python,selection_command +11,14984,"TERMINAL",0,0,"queue",,terminal_command +12,15041,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:01 queue;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 14:26:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3364311 accelerat interact tum_cte0 R 2:14:02\t 2 hkn[0608,0724]3364312 accelerat interact tum_cte0 R 3:21:19\t 1 hkn0710",,terminal_output +13,16090,"TERMINAL",0,0,"2320",,terminal_output +14,17140,"TERMINAL",0,0,"341",,terminal_output +15,18190,"TERMINAL",0,0,"452",,terminal_output +16,18825,"TERMINAL",0,0,"bash",,terminal_focus +17,19247,"TERMINAL",0,0,"watch",,terminal_focus +18,19252,"TERMINAL",0,0,"563",,terminal_output +19,20299,"TERMINAL",0,0,"674",,terminal_output +20,21337,"TERMINAL",0,0,"785",,terminal_output +21,22390,"TERMINAL",0,0,"9107",,terminal_output +22,23435,"TERMINAL",0,0,"1018",,terminal_output +23,24473,"TERMINAL",0,0,"129",,terminal_output +24,24922,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +25,26425,"TERMINAL",0,0,"scancel 3364311",,terminal_command +26,26444,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:13 scancel 3364311;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +27,27389,"TERMINAL",0,0,"queue",,terminal_command +28,27440,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:14 queue;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C",,terminal_output +29,27503,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 14:26:14 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3364311 accelerat interact tum_cte0 CG 2:14:14\t 2 hkn[0608,0724]3364312 accelerat interact tum_cte0 R 3:21:32\t 1 hkn0710",,terminal_output +30,28543,"TERMINAL",0,0,"53",,terminal_output +31,29321,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +32,33023,"TERMINAL",0,0,"bash",,terminal_focus +33,38440,"TERMINAL",0,0,"bash",,terminal_focus +34,40227,"TERMINAL",0,0,"scancel 3364312",,terminal_command +35,40249,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:26 scancel 3364312;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +36,45498,"TERMINAL",0,0,"loc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5",,terminal_command +37,45521,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D",,terminal_output +38,48012,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +39,48071,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:34 salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;Csalloc: Pending job allocation 3365091\r\nsalloc: job 3365091 queued and waiting for resources\r\n",,terminal_output +40,49316,"TERMINAL",0,0,"bash",,terminal_focus +41,52013,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5",,terminal_command +42,52067,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:38 salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5;2fac4da8-d4f0-4d83-a6ce-f6776ed5ed51]633;Csalloc: Pending job allocation 3365092\r\nsalloc: job 3365092 queued and waiting for resources\r\n",,terminal_output +43,52983,"TERMINAL",0,0,"bash",,terminal_focus +44,53966,"TERMINAL",0,0,"queu",,terminal_command +45,54021,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:40 queu;ae54d867-0567-4fb2-95c9-86b7932c267e]633;Cbash: queu: command not found...\r\n",,terminal_output +46,54732,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;130",,terminal_output +47,55498,"TERMINAL",0,0,"queue",,terminal_command +48,55509,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:42 queue;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +49,55575,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 14:26:42 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3364311 accelerat interact tum_cte0 CG 2:14:14\t 1 hkn06083364312 accelerat interact tum_cte0 CG 3:21:44\t 1 hkn07103365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +50,56626,"TERMINAL",0,0,"3",,terminal_output +51,57582,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +52,58375,"TERMINAL",0,0,"idling",,terminal_command +53,58442,"TERMINAL",0,0,"]633;E;2025-07-21 14:26:45 idling;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 14:26:45 2025Partition dev_cpuonly:\t 3 nodes idle\rPartition cpuonly: 217 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +54,59474,"TERMINAL",0,0,"6",,terminal_output +55,60516,"TERMINAL",0,0,"7",,terminal_output +56,61559,"TERMINAL",0,0,"8",,terminal_output +57,62600,"TERMINAL",0,0,"9",,terminal_output +58,63640,"TERMINAL",0,0,"50",,terminal_output +59,64158,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +60,80065,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +61,80116,"TERMINAL",0,0,"]633;E;2025-07-21 14:27:06 salloc --time=10:00:00 --partition=accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;ae54d867-0567-4fb2-95c9-86b7932c267e]633;Csalloc: Pending job allocation 3365094\r\nsalloc: job 3365094 queued and waiting for resources\r\n",,terminal_output +62,81767,"TERMINAL",0,0,"^Csalloc: Job allocation 3365094 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1",,terminal_output +63,85070,"TERMINAL",0,0,"idling",,terminal_command +64,85121,"TERMINAL",0,0,"]633;E;2025-07-21 14:27:11 idling;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +65,85196,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 14:27:11 2025Partition dev_cpuonly:\t 3 nodes idle\rPartition cpuonly: 217 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +66,86283,"TERMINAL",0,0,"2",,terminal_output +67,87252,"TERMINAL",0,0,"3",,terminal_output +68,87783,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +69,97631,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +70,97681,"TERMINAL",0,0,"]633;E;2025-07-21 14:27:24 salloc --time=10:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +71,98577,"TERMINAL",0,0,"salloc: error: Job submit/allocate failed: Requested time limit is invalid (missing or exceeds some limit)\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1",,terminal_output +72,98599,"TERMINAL",0,0,"salloc: job 3365091 has been allocated resources\r\nsalloc: Granted job allocation 3365091\r\n",,terminal_output +73,98725,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +74,103263,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +75,103307,"TERMINAL",0,0,"]633;E;2025-07-21 14:27:29 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;ae54d867-0567-4fb2-95c9-86b7932c267e]633;Csalloc: Granted job allocation 3365095\r\n",,terminal_output +76,103411,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +77,105026,"TERMINAL",0,0,"salloc",,terminal_focus +78,106563,"TERMINAL",0,0,"salloc",,terminal_focus +79,107904,"TERMINAL",0,0,"salloc",,terminal_focus +80,110753,"TERMINAL",0,0,"salloc",,terminal_focus +81,112669,"TERMINAL",0,0,"salloc",,terminal_focus +82,114488,"TERMINAL",0,0,"salloc",,terminal_focus +83,119135,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +84,119283,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +85,119343,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +86,119494,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +87,119663,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +88,119820,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +89,120507,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +90,120788,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +91,121173,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +92,121282,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +93,121965,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +94,122319,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +95,122612,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +96,122760,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +97,122819,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +98,123480,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +99,123636,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +100,123768,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +101,123995,"TERMINAL",0,0,"[?25lt[?25h[?25li[?25h",,terminal_output +102,124130,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +103,124398,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +104,124461,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +105,124572,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +106,125810,"TERMINAL",0,0,"salloc: Nodes hkn0707 are ready for job\r\n",,terminal_output +107,126567,"TERMINAL",0,0,"]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +108,131306,"TERMINAL",0,0,"salloc: Prolog hung on node hkn0901\r\n",,terminal_output +109,141999,"TERMINAL",0,0,"salloc: Nodes hkn0901 are ready for job\r\n",,terminal_output +110,142224,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +111,142838,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h[tum_cte0515@hkn0901 jafar]$ source .venv/bin/activate",,terminal_output +112,143973,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +113,147479,"models/dynamics.py",0,0,"",python,tab +114,152634,"train_dynamics.py",0,0,"",python,tab +115,154410,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_new_arch-bugfixed-temporal-shift\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50000 \\n --warmup_steps=2500 \\n --wsd_decay_steps=5000 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-new-arch-bugfix-temporal-shift-$slurm_job_id \\n --tags dynamics new-arch bug-fix \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n ",shellscript,tab +116,154798,"train_dynamics.py",0,0,"",python,tab +117,162974,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\npython sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=16 \\n --data_dir $array_records_dir\n\n",shellscript,tab +118,175853,"sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\n# grain_iterator = _get_dataloader_iterator()\n# video_batch = next(grain_iterator)\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n\nvideo_batch = video_batch.astype(args.dtype) #/ 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +119,200395,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(N):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +120,202018,"sample.py",0,0,"",python,tab +121,210962,"sample.py",5145,0,"",python,selection_mouse +122,211649,"sample.py",5134,0,"",python,selection_mouse +123,211772,"sample.py",5129,11,"overfit_dir",python,selection_mouse +124,211988,"sample.py",5129,32,"overfit_dir/single_sample_corner",python,selection_mouse +125,212425,"sample.py",5129,33,"overfit_dir/single_sample_corner.",python,selection_mouse +126,212943,"sample.py",5162,0,"",python,selection_mouse +127,213409,"sample.py",5163,0,"",python,selection_mouse +128,213562,"sample.py",5162,3,"npy",python,selection_mouse +129,213703,"sample.py",5141,24,"single_sample_corner.npy",python,selection_mouse +130,213786,"sample.py",5090,75,"grain_iterator)\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy",python,selection_mouse +131,214026,"sample.py",5129,36,"overfit_dir/single_sample_corner.npy",python,selection_mouse +132,214587,"sample.py",5129,0,"",python,selection_mouse +133,219678,"sample.py",3934,0,"",python,selection_mouse +134,219839,"sample.py",3926,15,"_autoreg_sample",python,selection_mouse +135,224774,"sample.py",4268,0,"",python,selection_mouse +136,225661,"sample.py",4139,0,"",python,selection_mouse +137,226474,"sample.py",3804,0,"",python,selection_mouse +138,226624,"sample.py",3802,13,"sample_causal",python,selection_mouse +139,233893,"sample.py",4279,0,"",python,selection_mouse +140,234017,"sample.py",4273,13,"generated_vid",python,selection_mouse +141,234720,"sample.py",4219,0,"",python,selection_mouse +142,234858,"sample.py",4213,11,"sampling_fn",python,selection_mouse +143,236854,"genie.py",0,0,"",python,tab +144,240302,"genie.py",3834,0,"",python,selection_mouse +145,240304,"genie.py",3833,0,"",python,selection_command +146,240462,"genie.py",3834,0,"",python,selection_mouse +147,240463,"genie.py",3833,0,"",python,selection_command +148,246736,"genie.py",4693,0,"",python,selection_mouse +149,246895,"genie.py",4683,13,"tokenizer_out",python,selection_mouse +150,247883,"genie.py",4720,0,"",python,selection_mouse +151,248034,"genie.py",4714,9,"vq_encode",python,selection_mouse +152,248670,"genie.py",4708,0,"",python,selection_mouse +153,248818,"genie.py",4704,9,"tokenizer",python,selection_mouse +154,249356,"genie.py",4718,0,"",python,selection_mouse +155,250509,"genie.py",4789,0,"",python,selection_mouse +156,251083,"genie.py",4795,0,"",python,selection_mouse +157,251219,"genie.py",4793,7,"indices",python,selection_mouse +158,251936,"genie.py",4772,0,"",python,selection_mouse +159,252106,"genie.py",4765,10,"token_idxs",python,selection_mouse +160,254853,"genie.py",4786,0,"",python,selection_mouse +161,254980,"genie.py",4778,13,"tokenizer_out",python,selection_mouse +162,258729,"genie.py",4810,0,"",python,selection_mouse +163,265042,"genie.py",5028,0,"",python,selection_mouse +164,265198,"genie.py",5020,9,"pad_shape",python,selection_mouse +165,272672,"genie.py",4813,0,"",python,selection_mouse +166,359646,"genie.py",4849,0,"",python,selection_command +167,360822,"genie.py",4813,0,"",python,selection_command +168,361033,"genie.py",4849,0,"",python,selection_command +169,364537,"genie.py",5065,0,"",python,selection_mouse +170,366343,"genie.py",5126,0,"",python,selection_mouse +171,366491,"genie.py",5124,5,"zeros",python,selection_mouse +172,378766,"genie.py",4840,0,"",python,selection_mouse +173,378902,"genie.py",4834,10,"token_idxs",python,selection_mouse +174,380406,"genie.py",5068,0,"",python,selection_mouse +175,380544,"genie.py",5060,15,"token_idxs_full",python,selection_mouse +176,388272,"genie.py",5188,0,"",python,selection_mouse +177,388448,"genie.py",5188,1,"B",python,selection_mouse +178,389193,"genie.py",5189,0,"",python,selection_mouse +179,390436,"genie.py",5195,0,"",python,selection_mouse +180,390713,"genie.py",5191,7,"seq_len",python,selection_mouse +181,391786,"genie.py",5200,0,"",python,selection_mouse +182,392814,"genie.py",4808,0,"",python,selection_mouse +183,392992,"genie.py",4807,1,"B",python,selection_mouse +184,393473,"genie.py",4810,0,"",python,selection_mouse +185,394714,"genie.py",4813,0,"",python,selection_mouse +186,395669,"genie.py",4810,0,"",python,selection_mouse +187,395870,"genie.py",4810,1,"T",python,selection_mouse +188,397572,"genie.py",5188,0,"",python,selection_mouse +189,397733,"genie.py",5188,3,"B, ",python,selection_mouse +190,397755,"genie.py",5188,5,"B, se",python,selection_mouse +191,397764,"genie.py",5188,7,"B, seq_",python,selection_mouse +192,397818,"genie.py",5188,9,"B, seq_le",python,selection_mouse +193,397818,"genie.py",5188,10,"B, seq_len",python,selection_mouse +194,397819,"genie.py",5188,12,"B, seq_len, ",python,selection_mouse +195,397874,"genie.py",5188,13,"B, seq_len, N",python,selection_mouse +196,397875,"genie.py",5188,14,"B, seq_len, N)",python,selection_mouse +197,398837,"genie.py",5071,0,"",python,selection_mouse +198,398964,"genie.py",5060,15,"token_idxs_full",python,selection_mouse +199,400864,"genie.py",5193,0,"",python,selection_mouse +200,403778,"genie.py",5203,0,"",python,selection_mouse +201,404374,"genie.py",5202,0,"",python,selection_mouse +202,404375,"genie.py",5201,0,"",python,selection_command +203,405286,"genie.py",5188,0,"",python,selection_mouse +204,405788,"genie.py",5190,0,"",python,selection_mouse +205,416891,"genie.py",5258,0,"",python,selection_mouse +206,417059,"genie.py",5253,13,"action_tokens",python,selection_mouse +207,433062,"genie.py",5320,0,"",python,selection_mouse +208,441470,"genie.py",5413,0,"",python,selection_mouse +209,442292,"genie.py",5416,0,"",python,selection_mouse +210,445313,"genie.py",5417,0,"",python,selection_mouse +211,449227,"genie.py",5457,0,"",python,selection_mouse +212,455022,"genie.py",5429,0,"",python,selection_mouse +213,456619,"genie.py",5428,0,"",python,selection_mouse +214,457464,"genie.py",5436,0,"",python,selection_mouse +215,460683,"genie.py",5468,0,"",python,selection_mouse +216,465072,"genie.py",5734,0,"",python,selection_mouse +217,465323,"genie.py",5730,12,"dyna_outputs",python,selection_mouse +218,466307,"genie.py",5755,0,"",python,selection_mouse +219,468124,"genie.py",5770,0,"",python,selection_mouse +220,469544,"genie.py",5749,0,"",python,selection_mouse +221,469767,"genie.py",5749,2,".d",python,selection_mouse +222,469790,"genie.py",5749,3,".dy",python,selection_mouse +223,469801,"genie.py",5749,5,".dyna",python,selection_mouse +224,469812,"genie.py",5749,7,".dynami",python,selection_mouse +225,469865,"genie.py",5749,8,".dynamic",python,selection_mouse +226,469866,"genie.py",5749,9,".dynamics",python,selection_mouse +227,470261,"genie.py",5758,0,"",python,selection_mouse +228,475075,"genie.py",5762,0,"",python,selection_mouse +229,475239,"genie.py",5759,11,"dyna_inputs",python,selection_mouse +230,477514,"genie.py",5549,0,"",python,selection_mouse +231,477637,"genie.py",5540,15,"token_idxs_full",python,selection_mouse +232,478376,"genie.py",5601,0,"",python,selection_mouse +233,478519,"genie.py",5595,13,"action_tokens",python,selection_mouse +234,479943,"genie.py",5549,0,"",python,selection_mouse +235,481836,"genie.py",5540,15,"token_idxs_full",python,selection_mouse +236,482500,"genie.py",5549,0,"",python,selection_mouse +237,485419,"genie.py",5125,0,"",python,selection_mouse +238,485506,"genie.py",5125,1,"e",python,selection_mouse +239,485507,"genie.py",5125,2,"er",python,selection_mouse +240,485522,"genie.py",5125,77,"eros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)",python,selection_mouse +241,486179,"genie.py",5202,0,"",python,selection_mouse +242,486184,"genie.py",5201,0,"",python,selection_command +243,489298,"genie.py",5762,0,"",python,selection_mouse +244,489444,"genie.py",5759,11,"dyna_inputs",python,selection_mouse +245,492224,"genie.py",5754,0,"",python,selection_mouse +246,492407,"genie.py",5750,8,"dynamics",python,selection_mouse +247,494093,"genie.py",6056,0,"",python,selection_mouse +248,496073,"genie.py",6059,0,"",python,selection_mouse +249,506280,"genie.py",6185,0,"",python,selection_mouse +250,510067,"genie.py",6450,0,"",python,selection_mouse +251,510215,"genie.py",6445,17,"next_token_logits",python,selection_mouse +252,511116,"genie.py",6381,0,"",python,selection_mouse +253,511834,"genie.py",6437,0,"",python,selection_mouse +254,511997,"genie.py",6435,8,"step_rng",python,selection_mouse +255,512643,"genie.py",6381,0,"",python,selection_mouse +256,513653,"genie.py",6629,0,"",python,selection_mouse +257,513798,"genie.py",6617,15,"token_idxs_full",python,selection_mouse +258,514785,"genie.py",6642,0,"",python,selection_mouse +259,515387,"genie.py",6639,0,"",python,selection_mouse +260,543294,"genie.py",6054,0,"",python,selection_mouse +261,544180,"genie.py",6055,0,"",python,selection_mouse +262,544407,"genie.py",6055,1," ",python,selection_mouse +263,544471,"genie.py",6055,2," t",python,selection_mouse +264,544958,"genie.py",6055,3," t,",python,selection_mouse +265,545034,"genie.py",6055,4," t, ",python,selection_mouse +266,560953,"genie.py",6661,0,"",python,selection_mouse +267,561229,"genie.py",6639,22,"t, n].set(next_token)\n",python,selection_mouse +268,561284,"genie.py",6640,21,", n].set(next_token)\n",python,selection_mouse +269,561779,"genie.py",6636,0,"",python,selection_mouse +270,561969,"genie.py",6636,25,":, t, n].set(next_token)\n",python,selection_mouse +271,562240,"genie.py",6636,13,":, t, n].set(",python,selection_mouse +272,562258,"genie.py",6636,14,":, t, n].set(n",python,selection_mouse +273,562344,"genie.py",6636,13,":, t, n].set(",python,selection_mouse +274,562420,"genie.py",6636,12,":, t, n].set",python,selection_mouse +275,563017,"genie.py",6648,0,"",python,selection_mouse +276,563191,"genie.py",6645,3,"set",python,selection_mouse +277,563362,"genie.py",6644,4,".set",python,selection_mouse +278,563380,"genie.py",6643,5,"].set",python,selection_mouse +279,563404,"genie.py",6640,8,", n].set",python,selection_mouse +280,563452,"genie.py",6639,9,"t, n].set",python,selection_mouse +281,563453,"genie.py",6637,11,", t, n].set",python,selection_mouse +282,563504,"genie.py",6636,12,":, t, n].set",python,selection_mouse +283,564022,"genie.py",6636,0,"",python,selection_mouse +284,564466,"genie.py",6636,2,":,",python,selection_mouse +285,564482,"genie.py",6636,3,":, ",python,selection_mouse +286,564534,"genie.py",6636,4,":, t",python,selection_mouse +287,564535,"genie.py",6636,5,":, t,",python,selection_mouse +288,564535,"genie.py",6636,6,":, t, ",python,selection_mouse +289,564545,"genie.py",6636,7,":, t, n",python,selection_mouse +290,564610,"genie.py",6636,8,":, t, n]",python,selection_mouse +291,565337,"genie.py",6644,0,"",python,selection_mouse +292,565508,"genie.py",6643,2,"].",python,selection_mouse +293,565655,"genie.py",6643,2,"].",python,selection_mouse +294,565708,"genie.py",6642,3,"n].",python,selection_mouse +295,565709,"genie.py",6641,4," n].",python,selection_mouse +296,565713,"genie.py",6640,5,", n].",python,selection_mouse +297,565730,"genie.py",6639,6,"t, n].",python,selection_mouse +298,565747,"genie.py",6638,7," t, n].",python,selection_mouse +299,565805,"genie.py",6637,8,", t, n].",python,selection_mouse +300,565865,"genie.py",6636,9,":, t, n].",python,selection_mouse +301,576743,"genie.py",5418,0,"",python,selection_mouse +302,576896,"genie.py",5417,1,"t",python,selection_mouse +303,578967,"genie.py",5428,0,"",python,selection_mouse +304,579136,"genie.py",5428,1,"T",python,selection_mouse +305,646818,"genie.py",6052,0,"",python,selection_mouse +306,647030,"genie.py",6052,2,"[:",python,selection_mouse +307,647048,"genie.py",6052,3,"[:,",python,selection_mouse +308,647064,"genie.py",6052,4,"[:, ",python,selection_mouse +309,647117,"genie.py",6052,5,"[:, t",python,selection_mouse +310,647118,"genie.py",6052,6,"[:, t,",python,selection_mouse +311,647122,"genie.py",6052,7,"[:, t, ",python,selection_mouse +312,647185,"genie.py",6052,8,"[:, t, n",python,selection_mouse +313,647255,"genie.py",6052,9,"[:, t, n,",python,selection_mouse +314,647317,"genie.py",6052,55,"[:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n",python,selection_mouse +315,647910,"genie.py",6052,12,"[:, t, n, :]",python,selection_mouse +316,652348,"genie.py",6053,0,"",python,selection_mouse +317,655376,"genie.py",6052,0,"",python,selection_mouse +318,683845,"genie.py",6024,0,"",python,selection_mouse +319,684003,"genie.py",6024,12,"dyna_outputs",python,selection_mouse +320,684137,"genie.py",5988,119," next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n",python,selection_mouse +321,684925,"genie.py",6024,0,"",python,selection_mouse +322,692327,"genie.py",6640,0,"",python,selection_mouse +323,706822,"genie.py",6056,0,"",python,selection_mouse +324,722792,"genie.py",6057,0,"",python,selection_mouse +325,744296,"genie.py",6057,0,"-",python,content +326,744301,"genie.py",6058,0,"",python,selection_keyboard +327,744356,"genie.py",6058,0,"1",python,content +328,744358,"genie.py",6059,0,"",python,selection_keyboard +329,744865,"genie.py",6058,0,"",python,selection_command +330,757168,"models/dynamics.py",0,0,"",python,tab +331,761091,"models/dynamics.py",3066,0,"",python,selection_mouse +332,764544,"models/dynamics.py",3012,61," logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_command +333,765028,"models/dynamics.py",2932,141," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_command +334,765247,"models/dynamics.py",2940,0,"",python,selection_command +335,765899,"models/dynamics.py",3020,0,"ä",python,content +336,765900,"models/dynamics.py",2940,0,"ä",python,content +337,765900,"models/dynamics.py",2941,0,"",python,selection_keyboard +338,765927,"models/dynamics.py",3022,0," ",python,content +339,765928,"models/dynamics.py",2941,0," ",python,content +340,765928,"models/dynamics.py",2942,0,"",python,selection_keyboard +341,766809,"models/dynamics.py",3023,1,"",python,content +342,766809,"models/dynamics.py",2941,1,"",python,content +343,766920,"models/dynamics.py",3021,1,"",python,content +344,766921,"models/dynamics.py",2940,1,"",python,content +345,767382,"models/dynamics.py",3020,0,"#",python,content +346,767382,"models/dynamics.py",2940,0,"#",python,content +347,767383,"models/dynamics.py",2941,0,"",python,selection_keyboard +348,767694,"models/dynamics.py",2940,0,"",python,selection_command +349,767813,"models/dynamics.py",2941,0,"",python,selection_command +350,768352,"models/dynamics.py",3022,0,"",python,selection_command +351,768585,"models/dynamics.py",3085,0,"",python,selection_command +352,768874,"models/dynamics.py",3167,0,"",python,selection_command +353,769218,"models/dynamics.py",3158,61," # logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_command +354,769409,"models/dynamics.py",3076,143," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_command +355,769720,"models/dynamics.py",3084,0,"",python,selection_command +356,770375,"models/dynamics.py",3166,1,"",python,content +357,770375,"models/dynamics.py",3084,1,"",python,content +358,770505,"models/dynamics.py",3165,1,"",python,content +359,770505,"models/dynamics.py",3084,1,"",python,content +360,770621,"models/dynamics.py",3083,0,"",python,selection_command +361,772641,"models/dynamics.py",2949,0,"",python,selection_mouse +362,773458,"models/dynamics.py",2932,80," #vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))",python,selection_command +363,773678,"models/dynamics.py",2932,143," #vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n #logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_command +364,774418,"models/dynamics.py",2940,0,"",python,selection_command +365,774843,"models/dynamics.py",2941,0,"",python,selection_command +366,775024,"models/dynamics.py",3022,0," ",python,content +367,775024,"models/dynamics.py",2941,0," ",python,content +368,775025,"models/dynamics.py",2942,0,"",python,selection_keyboard +369,775301,"models/dynamics.py",2941,0,"",python,selection_command +370,789750,"models/dynamics.py",3125,0,"",python,selection_mouse +371,789932,"models/dynamics.py",3125,1,"(",python,selection_mouse +372,789945,"models/dynamics.py",3125,3,"(0,",python,selection_mouse +373,790001,"models/dynamics.py",3125,4,"(0, ",python,selection_mouse +374,790007,"models/dynamics.py",3125,5,"(0, 0",python,selection_mouse +375,790383,"models/dynamics.py",3125,6,"(0, 0)",python,selection_mouse +376,791700,"models/dynamics.py",3133,0,"",python,selection_mouse +377,791946,"models/dynamics.py",3133,1,"(",python,selection_mouse +378,791961,"models/dynamics.py",3133,2,"(0",python,selection_mouse +379,791978,"models/dynamics.py",3133,3,"(0,",python,selection_mouse +380,792079,"models/dynamics.py",3133,4,"(0, ",python,selection_mouse +381,792145,"models/dynamics.py",3133,5,"(0, 0",python,selection_mouse +382,792327,"models/dynamics.py",3133,6,"(0, 0)",python,selection_mouse +383,792385,"models/dynamics.py",3133,7,"(0, 0),",python,selection_mouse +384,793073,"models/dynamics.py",3140,0,"",python,selection_mouse +385,795826,"models/dynamics.py",3141,0,"",python,selection_mouse +386,796014,"models/dynamics.py",3141,1,"(",python,selection_mouse +387,796067,"models/dynamics.py",3141,3,"(1,",python,selection_mouse +388,796081,"models/dynamics.py",3141,4,"(1, ",python,selection_mouse +389,796138,"models/dynamics.py",3141,5,"(1, 0",python,selection_mouse +390,796198,"models/dynamics.py",3141,6,"(1, 0)",python,selection_mouse +391,797499,"models/dynamics.py",3149,0,"",python,selection_mouse +392,797696,"models/dynamics.py",3149,1,"(",python,selection_mouse +393,797724,"models/dynamics.py",3149,2,"(0",python,selection_mouse +394,797743,"models/dynamics.py",3149,3,"(0,",python,selection_mouse +395,797754,"models/dynamics.py",3149,4,"(0, ",python,selection_mouse +396,797807,"models/dynamics.py",3149,5,"(0, 0",python,selection_mouse +397,797883,"models/dynamics.py",3149,6,"(0, 0)",python,selection_mouse +398,801034,"models/dynamics.py",2811,0,"",python,selection_mouse +399,801215,"models/dynamics.py",2806,9,"act_embed",python,selection_mouse +400,802210,"models/dynamics.py",3149,0,"",python,selection_mouse +401,802365,"models/dynamics.py",3149,2,"(0",python,selection_mouse +402,802384,"models/dynamics.py",3149,4,"(0, ",python,selection_mouse +403,802400,"models/dynamics.py",3149,5,"(0, 0",python,selection_mouse +404,802823,"models/dynamics.py",3149,6,"(0, 0)",python,selection_mouse +405,807522,"models/dynamics.py",3215,0,"",python,selection_mouse +406,809148,"models/dynamics.py",3207,0,"",python,selection_mouse +407,809604,"models/dynamics.py",3210,0,"",python,selection_mouse +408,810130,"models/dynamics.py",3216,0,"",python,selection_mouse +409,810388,"models/dynamics.py",3215,1,"1",python,selection_mouse +410,810747,"models/dynamics.py",3214,2,"-1",python,selection_mouse +411,811067,"models/dynamics.py",3213,3,":-1",python,selection_mouse +412,819936,"models/dynamics.py",3214,0,"",python,selection_mouse +413,820137,"models/dynamics.py",3213,2,":-",python,selection_mouse +414,820430,"models/dynamics.py",3213,2,":-",python,selection_mouse +415,820492,"models/dynamics.py",3213,3,":-1",python,selection_mouse +416,820960,"models/dynamics.py",3216,0,"",python,selection_mouse +417,821419,"models/dynamics.py",3215,1,"1",python,selection_mouse +418,821436,"models/dynamics.py",3214,2,"-1",python,selection_mouse +419,821464,"models/dynamics.py",3213,3,":-1",python,selection_mouse +420,822140,"models/dynamics.py",3213,0,"",python,selection_mouse +421,822300,"models/dynamics.py",3212,1," ",python,selection_mouse +422,822479,"models/dynamics.py",3212,2," :",python,selection_mouse +423,822497,"models/dynamics.py",3212,3," :-",python,selection_mouse +424,822514,"models/dynamics.py",3212,4," :-1",python,selection_mouse +425,822530,"models/dynamics.py",3212,5," :-1]",python,selection_mouse +426,822940,"models/dynamics.py",3217,0,"",python,selection_mouse +427,822943,"models/dynamics.py",3216,0,"",python,selection_command +428,823816,"models/dynamics.py",3100,0,"",python,selection_mouse +429,823987,"models/dynamics.py",3086,16,"vid_embed_padded",python,selection_mouse +430,825021,"models/dynamics.py",3141,0,"",python,selection_mouse +431,825187,"models/dynamics.py",3141,1,"(",python,selection_mouse +432,825249,"models/dynamics.py",3141,3,"(1,",python,selection_mouse +433,825250,"models/dynamics.py",3141,5,"(1, 0",python,selection_mouse +434,825262,"models/dynamics.py",3141,6,"(1, 0)",python,selection_mouse +435,826335,"models/dynamics.py",3213,0,"",python,selection_mouse +436,826592,"models/dynamics.py",3213,1,":",python,selection_mouse +437,826613,"models/dynamics.py",3213,3,":-1",python,selection_mouse +438,826666,"models/dynamics.py",3213,4,":-1]",python,selection_mouse +439,834858,"models/dynamics.py",3146,0,"",python,selection_mouse +440,835446,"models/dynamics.py",3077,0,"",python,selection_mouse +441,835451,"models/dynamics.py",3076,0,"",python,selection_command +442,835635,"models/dynamics.py",3076,1,"]",python,selection_mouse +443,835646,"models/dynamics.py",3077,0,"",python,selection_command +444,835655,"models/dynamics.py",3076,1,"]",python,selection_mouse +445,835665,"models/dynamics.py",3070,7,"1, :-1]",python,selection_mouse +446,835722,"models/dynamics.py",3058,19,"added)[:, :-1, :-1]",python,selection_mouse +447,835723,"models/dynamics.py",2969,108,"vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +448,835723,"models/dynamics.py",2964,113,".pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +449,835733,"models/dynamics.py",3042,35,"mics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +450,835750,"models/dynamics.py",3040,37,"namics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +451,835768,"models/dynamics.py",3038,39,"dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +452,835783,"models/dynamics.py",3036,41,"f.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +453,835834,"models/dynamics.py",3035,42,"lf.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +454,835835,"models/dynamics.py",3034,43,"elf.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +455,835836,"models/dynamics.py",3032,45," self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +456,835854,"models/dynamics.py",3031,46,"= self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +457,835911,"models/dynamics.py",3030,47," = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +458,835917,"models/dynamics.py",3029,48,"s = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +459,835971,"models/dynamics.py",3028,49,"ts = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +460,835979,"models/dynamics.py",3027,50,"its = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +461,836034,"models/dynamics.py",3026,51,"gits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +462,836046,"models/dynamics.py",3025,52,"ogits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +463,836099,"models/dynamics.py",2942,135,"vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +464,836099,"models/dynamics.py",2940,137,"# vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +465,836100,"models/dynamics.py",2939,138," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +466,836115,"models/dynamics.py",2938,139," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +467,836132,"models/dynamics.py",2937,140," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +468,836187,"models/dynamics.py",2936,141," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +469,836187,"models/dynamics.py",2935,142," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +470,836188,"models/dynamics.py",2934,143," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +471,836242,"models/dynamics.py",2933,144," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +472,836246,"models/dynamics.py",2932,145," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +473,837819,"models/dynamics.py",2933,0,"",python,selection_mouse +474,837820,"models/dynamics.py",2932,8," ",python,selection_mouse +475,838022,"models/dynamics.py",2932,9," #",python,selection_mouse +476,838038,"models/dynamics.py",2932,26," # vid_embed_padded",python,selection_mouse +477,838101,"models/dynamics.py",2932,28," # vid_embed_padded =",python,selection_mouse +478,838109,"models/dynamics.py",2932,32," # vid_embed_padded = jnp",python,selection_mouse +479,838126,"models/dynamics.py",2932,36," # vid_embed_padded = jnp.pad",python,selection_mouse +480,838142,"models/dynamics.py",2932,46," # vid_embed_padded = jnp.pad(vid_embed",python,selection_mouse +481,838256,"models/dynamics.py",2932,47," # vid_embed_padded = jnp.pad(vid_embed,",python,selection_mouse +482,838311,"models/dynamics.py",2932,49," # vid_embed_padded = jnp.pad(vid_embed, (",python,selection_mouse +483,838312,"models/dynamics.py",2932,50," # vid_embed_padded = jnp.pad(vid_embed, ((",python,selection_mouse +484,838325,"models/dynamics.py",2932,51," # vid_embed_padded = jnp.pad(vid_embed, ((0",python,selection_mouse +485,838345,"models/dynamics.py",2932,52," # vid_embed_padded = jnp.pad(vid_embed, ((0,",python,selection_mouse +486,838398,"models/dynamics.py",2932,53," # vid_embed_padded = jnp.pad(vid_embed, ((0, ",python,selection_mouse +487,838399,"models/dynamics.py",2932,136," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, ",python,selection_mouse +488,838399,"models/dynamics.py",2932,137," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :",python,selection_mouse +489,838454,"models/dynamics.py",2932,138," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-",python,selection_mouse +490,838455,"models/dynamics.py",2932,139," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1",python,selection_mouse +491,838472,"models/dynamics.py",2932,140," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1,",python,selection_mouse +492,838528,"models/dynamics.py",2932,141," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, ",python,selection_mouse +493,838539,"models/dynamics.py",2932,142," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :",python,selection_mouse +494,838591,"models/dynamics.py",2932,143," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-",python,selection_mouse +495,838605,"models/dynamics.py",2932,144," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1",python,selection_mouse +496,838657,"models/dynamics.py",2932,145," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +497,839309,"models/dynamics.py",3077,0,"",python,selection_mouse +498,839311,"models/dynamics.py",3076,0,"",python,selection_command +499,839527,"models/dynamics.py",3077,0,"",python,selection_mouse +500,839530,"models/dynamics.py",3076,0,"",python,selection_command +501,839774,"models/dynamics.py",3076,1,"]",python,selection_mouse +502,839774,"models/dynamics.py",3076,68,"]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1,",python,selection_mouse +503,839775,"models/dynamics.py",3077,0,"",python,selection_command +504,839785,"models/dynamics.py",3077,64,"\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), ",python,selection_mouse +505,839801,"models/dynamics.py",3073,4,":-1]",python,selection_mouse +506,839855,"models/dynamics.py",3069,8,"-1, :-1]",python,selection_mouse +507,839855,"models/dynamics.py",3067,10," :-1, :-1]",python,selection_mouse +508,839857,"models/dynamics.py",3065,12,":, :-1, :-1]",python,selection_mouse +509,839873,"models/dynamics.py",3063,14,")[:, :-1, :-1]",python,selection_mouse +510,839890,"models/dynamics.py",3047,30,"vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +511,839987,"models/dynamics.py",3046,31,"(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +512,840041,"models/dynamics.py",3038,39,"dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +513,840094,"models/dynamics.py",3037,40,".dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +514,840095,"models/dynamics.py",3033,44,"self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +515,840147,"models/dynamics.py",3032,45," self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +516,840148,"models/dynamics.py",3030,47," = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +517,840155,"models/dynamics.py",3024,53,"logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +518,840255,"models/dynamics.py",3023,54," logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +519,840312,"models/dynamics.py",3022,55,"# logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +520,840375,"models/dynamics.py",3021,56," # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +521,840387,"models/dynamics.py",3020,57," # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +522,840441,"models/dynamics.py",3019,58," # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +523,840503,"models/dynamics.py",3018,59," # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +524,840561,"models/dynamics.py",2935,142," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +525,840621,"models/dynamics.py",2934,143," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +526,840687,"models/dynamics.py",2933,144," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +527,840770,"models/dynamics.py",2932,145," # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]",python,selection_mouse +528,841666,"models/dynamics.py",3242,0,"",python,selection_mouse +529,842613,"models/dynamics.py",3217,0,"",python,selection_mouse +530,842614,"models/dynamics.py",3216,0,"",python,selection_command +531,842768,"models/dynamics.py",3217,0,"",python,selection_mouse +532,842781,"models/dynamics.py",3216,0,"",python,selection_command +533,842967,"models/dynamics.py",3216,1,"]",python,selection_mouse +534,842968,"models/dynamics.py",3217,0,"",python,selection_command +535,842986,"models/dynamics.py",3210,7,":, :-1]",python,selection_mouse +536,843045,"models/dynamics.py",3206,11,"[:, :, :-1]",python,selection_mouse +537,843046,"models/dynamics.py",3189,28,"vid_embed_padded)[:, :, :-1]",python,selection_mouse +538,843111,"models/dynamics.py",3180,37,"dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +539,843244,"models/dynamics.py",3179,38,".dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +540,843261,"models/dynamics.py",3175,42,"self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +541,843318,"models/dynamics.py",3174,43," self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +542,843330,"models/dynamics.py",3173,44,"= self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +543,843384,"models/dynamics.py",3172,45," = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +544,843385,"models/dynamics.py",3166,51,"logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +545,843441,"models/dynamics.py",3164,53," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +546,843461,"models/dynamics.py",3163,54," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +547,843477,"models/dynamics.py",3162,55," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +548,843535,"models/dynamics.py",3161,56," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +549,843591,"models/dynamics.py",3160,57," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +550,843644,"models/dynamics.py",3159,58," logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +551,843760,"models/dynamics.py",3079,138," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +552,844110,"models/dynamics.py",3078,139," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +553,1154014,"models/dynamics.py",3383,0,"",python,selection_mouse +554,1154020,"models/dynamics.py",3382,0,"",python,selection_command +555,1155092,"models/dynamics.py",3217,0,"",python,selection_mouse +556,1155093,"models/dynamics.py",3216,0,"",python,selection_command +557,1155588,"models/dynamics.py",3217,0,"",python,selection_mouse +558,1155593,"models/dynamics.py",3216,0,"",python,selection_command +559,1155741,"models/dynamics.py",3216,1,"]",python,selection_mouse +560,1155742,"models/dynamics.py",3214,2,"-1",python,selection_mouse +561,1155744,"models/dynamics.py",3217,0,"",python,selection_command +562,1155754,"models/dynamics.py",3209,8," :, :-1]",python,selection_mouse +563,1155814,"models/dynamics.py",3200,17,"added)[:, :, :-1]",python,selection_mouse +564,1155815,"models/dynamics.py",3192,25,"_embed_padded)[:, :, :-1]",python,selection_mouse +565,1155815,"models/dynamics.py",3187,30,"s(vid_embed_padded)[:, :, :-1]",python,selection_mouse +566,1155826,"models/dynamics.py",3182,35,"namics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +567,1155877,"models/dynamics.py",3178,39,"f.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +568,1155878,"models/dynamics.py",3177,40,"lf.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +569,1155879,"models/dynamics.py",3176,41,"elf.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +570,1155894,"models/dynamics.py",3175,42,"self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +571,1155951,"models/dynamics.py",3174,43," self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +572,1155952,"models/dynamics.py",3173,44,"= self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +573,1155960,"models/dynamics.py",3172,45," = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +574,1155977,"models/dynamics.py",3171,46,"s = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +575,1155993,"models/dynamics.py",3170,47,"ts = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +576,1156011,"models/dynamics.py",3169,48,"its = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +577,1156027,"models/dynamics.py",3168,49,"gits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +578,1156043,"models/dynamics.py",3087,130,"id_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +579,1156098,"models/dynamics.py",3086,131,"vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +580,1156099,"models/dynamics.py",3085,132," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +581,1156099,"models/dynamics.py",3083,134," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +582,1156109,"models/dynamics.py",3082,135," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +583,1156165,"models/dynamics.py",3081,136," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +584,1156166,"models/dynamics.py",3080,137," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +585,1156175,"models/dynamics.py",3079,138," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +586,1156249,"models/dynamics.py",3078,139," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +587,1359633,"train_dynamics.py",0,0,"",python,tab +588,1364184,"train_dynamics.py",2983,0,"",python,selection_mouse +589,1364185,"train_dynamics.py",2982,0,"",python,selection_command +590,1364321,"train_dynamics.py",2982,1," ",python,selection_mouse +591,1364322,"train_dynamics.py",2983,0,"",python,selection_command +592,1364343,"train_dynamics.py",2983,1,"\n",python,selection_mouse +593,1364406,"train_dynamics.py",2969,14,"k""][:, :, 1:] ",python,selection_mouse +594,1364424,"train_dynamics.py",2965,18,"""mask""][:, :, 1:] ",python,selection_mouse +595,1364441,"train_dynamics.py",2963,20,"s[""mask""][:, :, 1:] ",python,selection_mouse +596,1364472,"train_dynamics.py",2962,21,"ts[""mask""][:, :, 1:] ",python,selection_mouse +597,1364483,"train_dynamics.py",2960,23,"puts[""mask""][:, :, 1:] ",python,selection_mouse +598,1364534,"train_dynamics.py",2958,25,"utputs[""mask""][:, :, 1:] ",python,selection_mouse +599,1364537,"train_dynamics.py",2957,26,"outputs[""mask""][:, :, 1:] ",python,selection_mouse +600,1364591,"train_dynamics.py",2956,27," outputs[""mask""][:, :, 1:] ",python,selection_mouse +601,1364592,"train_dynamics.py",2954,29," = outputs[""mask""][:, :, 1:] ",python,selection_mouse +602,1364592,"train_dynamics.py",2953,30,"k = outputs[""mask""][:, :, 1:] ",python,selection_mouse +603,1364647,"train_dynamics.py",2952,31,"sk = outputs[""mask""][:, :, 1:] ",python,selection_mouse +604,1364648,"train_dynamics.py",2897,86,"argets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] ",python,selection_mouse +605,1364701,"train_dynamics.py",2896,87,"targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] ",python,selection_mouse +606,1364755,"train_dynamics.py",2841,142," logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] ",python,selection_mouse +607,1365120,"train_dynamics.py",2841,0,"",python,selection_mouse +608,1366021,"train_dynamics.py",2937,0,"",python,selection_mouse +609,1366160,"train_dynamics.py",2936,1,"1",python,selection_mouse +610,1367047,"train_dynamics.py",2984,0,"",python,selection_mouse +611,1367701,"train_dynamics.py",2882,0,"",python,selection_mouse +612,1368206,"train_dynamics.py",2936,0,"",python,selection_mouse +613,1372399,"train_dynamics.py",2983,0,"",python,selection_mouse +614,1372402,"train_dynamics.py",2982,0,"",python,selection_command +615,1380360,"models/dynamics.py",0,0,"",python,tab +616,1382575,"models/dynamics.py",3167,0,"",python,selection_mouse +617,1382697,"models/dynamics.py",3166,6,"logits",python,selection_mouse +618,1386746,"train_dynamics.py",0,0,"",python,tab +619,1388983,"train_dynamics.py",2881,0,"",python,selection_mouse +620,1389204,"train_dynamics.py",2881,2,":-",python,selection_mouse +621,1389212,"train_dynamics.py",2881,3,":-1",python,selection_mouse +622,1389230,"train_dynamics.py",2881,4,":-1]",python,selection_mouse +623,1389606,"train_dynamics.py",2885,0,"",python,selection_mouse +624,1389610,"train_dynamics.py",2884,0,"",python,selection_command +625,1390871,"train_dynamics.py",2898,0,"",python,selection_mouse +626,1391921,"train_dynamics.py",2938,0,"",python,selection_mouse +627,1637589,"models/dynamics.py",0,0,"",python,tab +628,1654610,"models/dynamics.py",3108,0,"",python,selection_mouse +629,1654750,"models/dynamics.py",3105,3,"jnp",python,selection_mouse +630,1654902,"models/dynamics.py",3078,80," vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n",python,selection_mouse +631,1663894,"models/dynamics.py",3190,0,"",python,selection_mouse +632,1664040,"models/dynamics.py",3189,16,"vid_embed_padded",python,selection_mouse +633,1664162,"models/dynamics.py",3158,60," logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n",python,selection_mouse +634,1665420,"models/dynamics.py",3217,0,"",python,selection_mouse +635,1665430,"models/dynamics.py",3216,0,"",python,selection_command +636,1665864,"models/dynamics.py",3215,1,"1",python,selection_mouse +637,1665866,"models/dynamics.py",3215,2,"1]",python,selection_command +638,1665881,"models/dynamics.py",3213,4,":-1]",python,selection_mouse +639,1665896,"models/dynamics.py",3212,5," :-1]",python,selection_mouse +640,1665914,"models/dynamics.py",3210,7,":, :-1]",python,selection_mouse +641,1665968,"models/dynamics.py",3209,8," :, :-1]",python,selection_mouse +642,1665968,"models/dynamics.py",3207,10,":, :, :-1]",python,selection_mouse +643,1666025,"models/dynamics.py",3206,11,"[:, :, :-1]",python,selection_mouse +644,1666028,"models/dynamics.py",3205,12,")[:, :, :-1]",python,selection_mouse +645,1666081,"models/dynamics.py",3204,13,"d)[:, :, :-1]",python,selection_mouse +646,1666135,"models/dynamics.py",3203,14,"ed)[:, :, :-1]",python,selection_mouse +647,1666244,"models/dynamics.py",3204,13,"d)[:, :, :-1]",python,selection_mouse +648,1666301,"models/dynamics.py",3205,12,")[:, :, :-1]",python,selection_mouse +649,1666354,"models/dynamics.py",3206,11,"[:, :, :-1]",python,selection_mouse +650,1667656,"models/dynamics.py",3215,0,"",python,selection_mouse +651,1668263,"models/dynamics.py",3217,0,"",python,selection_mouse +652,1668264,"models/dynamics.py",3216,0,"",python,selection_command +653,1668495,"models/dynamics.py",3216,1,"]",python,selection_mouse +654,1668506,"models/dynamics.py",3217,0,"",python,selection_command +655,1668561,"models/dynamics.py",3215,2,"1]",python,selection_mouse +656,1668580,"models/dynamics.py",3214,3,"-1]",python,selection_mouse +657,1668581,"models/dynamics.py",3213,4,":-1]",python,selection_mouse +658,1668631,"models/dynamics.py",3211,6,", :-1]",python,selection_mouse +659,1668684,"models/dynamics.py",3210,7,":, :-1]",python,selection_mouse +660,1668692,"models/dynamics.py",3209,8," :, :-1]",python,selection_mouse +661,1668744,"models/dynamics.py",3208,9,", :, :-1]",python,selection_mouse +662,1668760,"models/dynamics.py",3207,10,":, :, :-1]",python,selection_mouse +663,1668812,"models/dynamics.py",3206,11,"[:, :, :-1]",python,selection_mouse +664,1668892,"models/dynamics.py",3205,12,")[:, :, :-1]",python,selection_mouse +665,1669297,"models/dynamics.py",3206,11,"[:, :, :-1]",python,selection_mouse +666,1670258,"models/dynamics.py",3217,0,"",python,selection_mouse +667,1670266,"models/dynamics.py",3216,0,"",python,selection_command +668,1671181,"models/dynamics.py",3213,0,"",python,selection_mouse +669,1671337,"models/dynamics.py",3212,1," ",python,selection_mouse +670,1671510,"models/dynamics.py",3158,60," logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n",python,selection_mouse +671,1672613,"models/dynamics.py",3213,0,"",python,selection_mouse +672,1673196,"models/dynamics.py",3213,50,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])",python,selection_mouse +673,1673227,"models/dynamics.py",3213,102,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)",python,selection_mouse +674,1673281,"models/dynamics.py",3213,137,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)",python,selection_mouse +675,1673288,"models/dynamics.py",3213,170,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()",python,selection_mouse +676,1673351,"models/dynamics.py",3213,137,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)",python,selection_mouse +677,1673505,"models/dynamics.py",3213,90,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logi",python,selection_mouse +678,1673561,"models/dynamics.py",3213,91,":-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logit",python,selection_mouse +679,1673561,"models/dynamics.py",3213,45,":-1]\n mask = jnp.ones(vid_embed.shape[",python,selection_mouse +680,1673614,"models/dynamics.py",3213,46,":-1]\n mask = jnp.ones(vid_embed.shape[:",python,selection_mouse +681,1673621,"models/dynamics.py",3199,14,"padded)[:, :, ",python,selection_mouse +682,1674037,"models/dynamics.py",3199,0,"",python,selection_mouse +683,1674037,"models/dynamics.py",3189,16,"vid_embed_padded",python,selection_mouse +684,1675016,"models/dynamics.py",3187,0,"",python,selection_mouse +685,1675909,"models/dynamics.py",3115,0,"",python,selection_mouse +686,1676040,"models/dynamics.py",3113,9,"vid_embed",python,selection_mouse +687,1676256,"models/dynamics.py",3113,10,"vid_embed,",python,selection_mouse +688,1676272,"models/dynamics.py",3113,12,"vid_embed, (",python,selection_mouse +689,1676289,"models/dynamics.py",3113,14,"vid_embed, ((0",python,selection_mouse +690,1676305,"models/dynamics.py",3113,18,"vid_embed, ((0, 0)",python,selection_mouse +691,1676330,"models/dynamics.py",3113,21,"vid_embed, ((0, 0), (",python,selection_mouse +692,1676346,"models/dynamics.py",3113,23,"vid_embed, ((0, 0), (0,",python,selection_mouse +693,1676362,"models/dynamics.py",3113,24,"vid_embed, ((0, 0), (0, ",python,selection_mouse +694,1676417,"models/dynamics.py",3113,26,"vid_embed, ((0, 0), (0, 0)",python,selection_mouse +695,1676418,"models/dynamics.py",3113,28,"vid_embed, ((0, 0), (0, 0), ",python,selection_mouse +696,1676418,"models/dynamics.py",3113,30,"vid_embed, ((0, 0), (0, 0), (1",python,selection_mouse +697,1676431,"models/dynamics.py",3113,31,"vid_embed, ((0, 0), (0, 0), (1,",python,selection_mouse +698,1676444,"models/dynamics.py",3113,32,"vid_embed, ((0, 0), (0, 0), (1, ",python,selection_mouse +699,1676494,"models/dynamics.py",3113,34,"vid_embed, ((0, 0), (0, 0), (1, 0)",python,selection_mouse +700,1676495,"models/dynamics.py",3113,36,"vid_embed, ((0, 0), (0, 0), (1, 0), ",python,selection_mouse +701,1676512,"models/dynamics.py",3113,37,"vid_embed, ((0, 0), (0, 0), (1, 0), (",python,selection_mouse +702,1676528,"models/dynamics.py",3113,39,"vid_embed, ((0, 0), (0, 0), (1, 0), (0,",python,selection_mouse +703,1676585,"models/dynamics.py",3113,41,"vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0",python,selection_mouse +704,1676586,"models/dynamics.py",3113,42,"vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)",python,selection_mouse +705,1676595,"models/dynamics.py",3113,43,"vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0))",python,selection_mouse +706,1676612,"models/dynamics.py",3113,44,"vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))",python,selection_mouse +707,1677007,"models/dynamics.py",3157,0,"",python,selection_mouse +708,1677008,"models/dynamics.py",3156,0,"",python,selection_command +709,1678107,"models/dynamics.py",3141,0,"",python,selection_mouse +710,1678308,"models/dynamics.py",3141,2,"(1",python,selection_mouse +711,1678341,"models/dynamics.py",3141,3,"(1,",python,selection_mouse +712,1678449,"models/dynamics.py",3141,4,"(1, ",python,selection_mouse +713,1678485,"models/dynamics.py",3141,5,"(1, 0",python,selection_mouse +714,1678541,"models/dynamics.py",3141,6,"(1, 0)",python,selection_mouse +715,1710784,"models/dynamics.py",3125,0,"",python,selection_mouse +716,1710938,"models/dynamics.py",3125,1,"(",python,selection_mouse +717,1710970,"models/dynamics.py",3125,2,"(0",python,selection_mouse +718,1711029,"models/dynamics.py",3125,83,"(0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:",python,selection_mouse +719,1711030,"models/dynamics.py",3125,84,"(0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:,",python,selection_mouse +720,1711081,"models/dynamics.py",3125,85,"(0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, ",python,selection_mouse +721,1711134,"models/dynamics.py",3125,86,"(0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :",python,selection_mouse +722,1711877,"models/dynamics.py",3065,0,"",python,selection_mouse +723,1712467,"models/dynamics.py",3210,0,"",python,selection_mouse +724,1713046,"models/dynamics.py",3133,0,"",python,selection_mouse +725,1713132,"models/dynamics.py",3133,1,"(",python,selection_mouse +726,1713151,"models/dynamics.py",3133,2,"(0",python,selection_mouse +727,1713174,"models/dynamics.py",3133,4,"(0, ",python,selection_mouse +728,1713191,"models/dynamics.py",3133,5,"(0, 0",python,selection_mouse +729,1713245,"models/dynamics.py",3133,6,"(0, 0)",python,selection_mouse +730,1714055,"models/dynamics.py",3141,0,"",python,selection_mouse +731,1714219,"models/dynamics.py",3141,1,"(",python,selection_mouse +732,1714239,"models/dynamics.py",3141,3,"(1,",python,selection_mouse +733,1714254,"models/dynamics.py",3141,4,"(1, ",python,selection_mouse +734,1714268,"models/dynamics.py",3141,5,"(1, 0",python,selection_mouse +735,1714283,"models/dynamics.py",3141,6,"(1, 0)",python,selection_mouse +736,1715261,"models/dynamics.py",3149,0,"",python,selection_mouse +737,1715391,"models/dynamics.py",3149,1,"(",python,selection_mouse +738,1715411,"models/dynamics.py",3149,3,"(0,",python,selection_mouse +739,1715423,"models/dynamics.py",3149,4,"(0, ",python,selection_mouse +740,1715482,"models/dynamics.py",3149,5,"(0, 0",python,selection_mouse +741,1715482,"models/dynamics.py",3149,6,"(0, 0)",python,selection_mouse +742,1715483,"models/dynamics.py",3149,7,"(0, 0))",python,selection_mouse +743,1721493,"models/dynamics.py",3125,0,"",python,selection_mouse +744,1721605,"models/dynamics.py",3125,1,"(",python,selection_mouse +745,1721626,"models/dynamics.py",3125,3,"(0,",python,selection_mouse +746,1721641,"models/dynamics.py",3125,4,"(0, ",python,selection_mouse +747,1721694,"models/dynamics.py",3125,5,"(0, 0",python,selection_mouse +748,1721704,"models/dynamics.py",3125,6,"(0, 0)",python,selection_mouse +749,1722366,"models/dynamics.py",3134,0,"",python,selection_mouse +750,1722504,"models/dynamics.py",3134,2,"0,",python,selection_mouse +751,1722524,"models/dynamics.py",3134,4,"0, 0",python,selection_mouse +752,1722540,"models/dynamics.py",3134,5,"0, 0)",python,selection_mouse +753,1723393,"models/dynamics.py",3141,0,"",python,selection_mouse +754,1723540,"models/dynamics.py",3141,1,"(",python,selection_mouse +755,1723604,"models/dynamics.py",3141,3,"(1,",python,selection_mouse +756,1723607,"models/dynamics.py",3141,4,"(1, ",python,selection_mouse +757,1723675,"models/dynamics.py",3141,5,"(1, 0",python,selection_mouse +758,1723742,"models/dynamics.py",3141,6,"(1, 0)",python,selection_mouse +759,1724430,"models/dynamics.py",3149,0,"",python,selection_mouse +760,1724552,"models/dynamics.py",3149,2,"(0",python,selection_mouse +761,1724573,"models/dynamics.py",3149,3,"(0,",python,selection_mouse +762,1724587,"models/dynamics.py",3149,4,"(0, ",python,selection_mouse +763,1724638,"models/dynamics.py",3149,5,"(0, 0",python,selection_mouse +764,1724638,"models/dynamics.py",3149,6,"(0, 0)",python,selection_mouse +765,1727786,"models/dynamics.py",3125,0,"",python,selection_mouse +766,1727966,"models/dynamics.py",3125,1,"(",python,selection_mouse +767,1727988,"models/dynamics.py",3125,2,"(0",python,selection_mouse +768,1728015,"models/dynamics.py",3125,3,"(0,",python,selection_mouse +769,1728071,"models/dynamics.py",3125,4,"(0, ",python,selection_mouse +770,1728081,"models/dynamics.py",3125,5,"(0, 0",python,selection_mouse +771,1731649,"models/dynamics.py",3133,0,"",python,selection_mouse +772,1731829,"models/dynamics.py",3133,1,"(",python,selection_mouse +773,1731847,"models/dynamics.py",3133,2,"(0",python,selection_mouse +774,1731862,"models/dynamics.py",3133,3,"(0,",python,selection_mouse +775,1731918,"models/dynamics.py",3133,4,"(0, ",python,selection_mouse +776,1731925,"models/dynamics.py",3133,5,"(0, 0",python,selection_mouse +777,1732162,"models/dynamics.py",3133,6,"(0, 0)",python,selection_mouse +778,1733182,"models/dynamics.py",3142,0,"",python,selection_mouse +779,1733375,"models/dynamics.py",3142,1,"1",python,selection_mouse +780,1733404,"models/dynamics.py",3142,2,"1,",python,selection_mouse +781,1733458,"models/dynamics.py",3142,3,"1, ",python,selection_mouse +782,1733553,"models/dynamics.py",3142,4,"1, 0",python,selection_mouse +783,1903399,"models/dynamics.py",3217,0,"",python,selection_mouse +784,1903400,"models/dynamics.py",3216,0,"",python,selection_command +785,1903547,"models/dynamics.py",3215,1,"1",python,selection_mouse +786,1903558,"models/dynamics.py",3215,2,"1]",python,selection_command +787,1903579,"models/dynamics.py",3214,3,"-1]",python,selection_mouse +788,1903638,"models/dynamics.py",3213,4,":-1]",python,selection_mouse +789,1903749,"models/dynamics.py",3212,5," :-1]",python,selection_mouse +790,1903814,"models/dynamics.py",3211,6,", :-1]",python,selection_mouse +791,1937564,"models/dynamics.py",3183,0,"",python,selection_mouse +792,1940529,"models/dynamics.py",2190,0,"",python,selection_mouse +793,1940790,"utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spacial_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +794,1952935,"utils/nn.py",6848,0,"",python,selection_mouse +795,1963266,"utils/nn.py",4694,0,"",python,selection_mouse +796,1963471,"utils/nn.py",4689,12,"spacial_bert",python,selection_mouse +797,1965920,"utils/nn.py",4996,0,"",python,selection_mouse +798,1965922,"utils/nn.py",4995,0,"",python,selection_command +799,1967916,"utils/nn.py",4697,0,"",python,selection_mouse +800,1968539,"utils/nn.py",4762,0,"",python,selection_mouse +801,1968544,"utils/nn.py",4761,0,"",python,selection_command +802,2075041,"utils/nn.py",4701,0,"",python,selection_mouse +803,2075257,"utils/nn.py",4701,1," ",python,selection_mouse +804,2075361,"utils/nn.py",4701,4," els",python,selection_mouse +805,2075362,"utils/nn.py",4701,6," else ",python,selection_mouse +806,2075363,"utils/nn.py",4701,61," else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(",python,selection_mouse +807,2075485,"utils/nn.py",4701,99," else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,",python,selection_mouse +808,2075885,"utils/nn.py",4701,61," else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(",python,selection_mouse +809,2076019,"utils/nn.py",4701,26," else jnp.tri(z.shape[-2])",python,selection_mouse +810,2076520,"utils/nn.py",4727,0,"",python,selection_mouse +811,2076522,"utils/nn.py",4726,0,"",python,selection_command +812,2076701,"utils/nn.py",4727,0,"",python,selection_mouse +813,2076702,"utils/nn.py",4726,0,"",python,selection_command +814,2076880,"utils/nn.py",4726,1,")",python,selection_mouse +815,2076881,"utils/nn.py",4727,0,"",python,selection_command +816,2076941,"utils/nn.py",4725,2,"])",python,selection_mouse +817,2076951,"utils/nn.py",4724,3,"2])",python,selection_mouse +818,2076952,"utils/nn.py",4717,10,"shape[-2])",python,selection_mouse +819,2076956,"utils/nn.py",4652,75,"\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +820,2077011,"utils/nn.py",4639,88,"\n )(z)\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +821,2077019,"utils/nn.py",4652,75,"\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +822,2077405,"utils/nn.py",4684,43,"self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +823,2077423,"utils/nn.py",4683,44," self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +824,2077477,"utils/nn.py",4681,46,"if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +825,2077478,"utils/nn.py",4680,47," if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +826,2077478,"utils/nn.py",4676,51,"None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +827,2077536,"utils/nn.py",4675,52," None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +828,2077589,"utils/nn.py",4674,53,"= None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +829,2077595,"utils/nn.py",4673,54," = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +830,2077609,"utils/nn.py",4661,66,"spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +831,2078152,"utils/nn.py",4660,67," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +832,2078218,"utils/nn.py",4659,68," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +833,2078274,"utils/nn.py",4658,69," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +834,2078327,"utils/nn.py",4657,70," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +835,2078381,"utils/nn.py",4656,71," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +836,2078438,"utils/nn.py",4655,72," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +837,2078499,"utils/nn.py",4654,73," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +838,2078567,"utils/nn.py",4653,74," spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])",python,selection_mouse +839,2083552,"utils/nn.py",4357,0,"",python,selection_mouse +840,2084206,"utils/nn.py",4354,12,"spacial_bert",python,selection_mouse +841,2092034,"utils/nn.py",4366,0,"",python,selection_command +842,2092046,"utils/nn.py",7066,1,"t",python,content +843,2092047,"utils/nn.py",4692,1,"t",python,content +844,2092047,"utils/nn.py",4357,1,"t",python,content +845,2094509,"utils/nn.py",4668,0,"",python,selection_mouse +846,2094686,"utils/nn.py",4661,12,"spacial_mask",python,selection_mouse +847,2102847,"utils/nn.py",4673,0,"",python,selection_command +848,2102858,"utils/nn.py",4968,1,"t",python,content +849,2102858,"utils/nn.py",4664,1,"t",python,content +850,2104804,"utils/nn.py",4707,0,"",python,selection_mouse +851,2104980,"utils/nn.py",4707,3,"jnp",python,selection_mouse +852,2105227,"utils/nn.py",4707,7,"jnp.tri",python,selection_mouse +853,2105228,"utils/nn.py",4707,9,"jnp.tri(z",python,selection_mouse +854,2105243,"utils/nn.py",4707,15,"jnp.tri(z.shape",python,selection_mouse +855,2105311,"utils/nn.py",4707,16,"jnp.tri(z.shape[",python,selection_mouse +856,2105311,"utils/nn.py",4707,17,"jnp.tri(z.shape[-",python,selection_mouse +857,2105312,"utils/nn.py",4707,18,"jnp.tri(z.shape[-2",python,selection_mouse +858,2105328,"utils/nn.py",4707,19,"jnp.tri(z.shape[-2]",python,selection_mouse +859,2105384,"utils/nn.py",4707,20,"jnp.tri(z.shape[-2])",python,selection_mouse +860,2107506,"utils/nn.py",4961,0,"",python,selection_mouse +861,2107646,"utils/nn.py",4960,4,"mask",python,selection_mouse +862,2107865,"utils/nn.py",4960,5,"mask=",python,selection_mouse +863,2107882,"utils/nn.py",4960,17,"mask=spatial_mask",python,selection_mouse +864,2108360,"utils/nn.py",4977,0,"",python,selection_mouse +865,2110679,"utils/nn.py",4736,0,"",python,selection_mouse +866,2110809,"utils/nn.py",4736,1,"z",python,selection_mouse +867,2110977,"utils/nn.py",4736,35,"z = nn.MultiHeadAttention(\n ",python,selection_mouse +868,2110977,"utils/nn.py",4736,75,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n ",python,selection_mouse +869,2110991,"utils/nn.py",4736,112,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n ",python,selection_mouse +870,2111044,"utils/nn.py",4736,124,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate",python,selection_mouse +871,2111045,"utils/nn.py",4736,162,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype",python,selection_mouse +872,2111045,"utils/nn.py",4736,203,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self",python,selection_mouse +873,2111097,"utils/nn.py",4736,241,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask",python,selection_mouse +874,2111174,"utils/nn.py",4736,242,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +875,2111175,"utils/nn.py",4736,260,"z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)\n x = x + z",python,selection_mouse +876,2112176,"utils/nn.py",4996,0,"",python,selection_mouse +877,2112177,"utils/nn.py",4995,0,"",python,selection_command +878,2112356,"utils/nn.py",4995,1,"z",python,selection_mouse +879,2112358,"utils/nn.py",4996,0,"",python,selection_command +880,2113463,"utils/nn.py",4978,0,"",python,selection_mouse +881,2113465,"utils/nn.py",4977,0,"",python,selection_command +882,2113722,"utils/nn.py",4977,1,")",python,selection_mouse +883,2113724,"utils/nn.py",4978,0,"",python,selection_command +884,2113743,"utils/nn.py",4976,2,"k)",python,selection_mouse +885,2113794,"utils/nn.py",4974,4,"ask)",python,selection_mouse +886,2113795,"utils/nn.py",4941,37,"type,\n )(z, mask=spatial_mask)",python,selection_mouse +887,2113795,"utils/nn.py",4938,40,"f.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +888,2113806,"utils/nn.py",4935,43,"self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +889,2113822,"utils/nn.py",4932,46,"pe=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +890,2113840,"utils/nn.py",4930,48,"type=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +891,2113855,"utils/nn.py",4886,92," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +892,2113911,"utils/nn.py",4884,94," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +893,2113912,"utils/nn.py",4883,95," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +894,2113912,"utils/nn.py",4881,97," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +895,2113922,"utils/nn.py",4880,98," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +896,2113937,"utils/nn.py",4879,99," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +897,2113954,"utils/nn.py",4878,100," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +898,2113973,"utils/nn.py",4838,140," dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +899,2114027,"utils/nn.py",4837,141," dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +900,2114028,"utils/nn.py",4836,142," dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +901,2114036,"utils/nn.py",4801,177," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +902,2114108,"utils/nn.py",4763,215," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +903,2114171,"utils/nn.py",4728,250," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)",python,selection_mouse +904,2117155,"utils/nn.py",4978,0,"",python,selection_mouse +905,2117161,"utils/nn.py",4977,0,"",python,selection_command +906,2128221,"utils/nn.py",5511,0,"",python,selection_mouse +907,2128223,"utils/nn.py",5510,0,"",python,selection_command +908,2128475,"utils/nn.py",5510,19,")\n x = x + z",python,selection_mouse +909,2128476,"utils/nn.py",5511,18,"\n x = x + z",python,selection_command +910,2128522,"utils/nn.py",5509,2,"k)",python,selection_mouse +911,2128544,"utils/nn.py",5506,5,"mask)",python,selection_mouse +912,2128560,"utils/nn.py",5503,8,"al_mask)",python,selection_mouse +913,2128577,"utils/nn.py",5500,11,"ausal_mask)",python,selection_mouse +914,2128593,"utils/nn.py",5469,42,"self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +915,2128609,"utils/nn.py",5467,44,"e=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +916,2128625,"utils/nn.py",5466,45,"pe=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +917,2128642,"utils/nn.py",5465,46,"ype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +918,2128658,"utils/nn.py",5464,47,"type=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +919,2128715,"utils/nn.py",5421,90,"param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +920,2128715,"utils/nn.py",5420,91," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +921,2128723,"utils/nn.py",5419,92," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +922,2128774,"utils/nn.py",5418,93," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +923,2128775,"utils/nn.py",5379,132," dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +924,2128777,"utils/nn.py",5378,133," dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +925,2128814,"utils/nn.py",5343,168," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +926,2128865,"utils/nn.py",5342,169," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +927,2128921,"utils/nn.py",5341,170," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +928,2128924,"utils/nn.py",5303,208," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +929,2128942,"utils/nn.py",5302,209," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +930,2128998,"utils/nn.py",5301,210," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +931,2129052,"utils/nn.py",5300,211," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +932,2129052,"utils/nn.py",5265,246," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +933,2129105,"utils/nn.py",5264,247," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)",python,selection_mouse +934,2169467,"utils/nn.py",5264,0,"",python,selection_mouse +935,2169561,"utils/nn.py",5262,8," ",python,selection_mouse +936,2169712,"utils/nn.py",5262,35," z = nn.MultiHeadAttention(\n",python,selection_mouse +937,2169919,"utils/nn.py",5262,73," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n",python,selection_mouse +938,2169981,"utils/nn.py",5262,108," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n",python,selection_mouse +939,2170034,"utils/nn.py",5262,147," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n",python,selection_mouse +940,2170103,"utils/nn.py",5262,189," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n",python,selection_mouse +941,2170267,"utils/nn.py",5262,219," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +942,2170387,"utils/nn.py",5262,250," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n",python,selection_mouse +943,2170517,"utils/nn.py",5262,268," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n",python,selection_mouse +944,2171234,"utils/nn.py",5262,297," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n",python,selection_mouse +945,2173248,"utils/nn.py",5615,0,"",python,selection_mouse +946,2173254,"utils/nn.py",5614,0,"",python,selection_command +947,2173696,"utils/nn.py",5615,0,"",python,selection_mouse +948,2173697,"utils/nn.py",5614,0,"",python,selection_command +949,2173857,"utils/nn.py",5614,1,"(",python,selection_mouse +950,2173859,"utils/nn.py",5615,0,"",python,selection_command +951,2173878,"utils/nn.py",5589,26,"\n z = nn.LayerNorm(",python,selection_mouse +952,2173919,"utils/nn.py",5559,56,"\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +953,2173981,"utils/nn.py",5529,86,"\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +954,2173988,"utils/nn.py",5511,104,"\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +955,2174048,"utils/nn.py",5480,135,"\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +956,2174118,"utils/nn.py",5479,136,",\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +957,2174166,"utils/nn.py",5435,180,"lf.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +958,2174167,"utils/nn.py",5433,182,"self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +959,2174179,"utils/nn.py",5432,183,"=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +960,2174190,"utils/nn.py",5429,186,"ype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +961,2174244,"utils/nn.py",5389,226,"_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +962,2174245,"utils/nn.py",5388,227,"t_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +963,2174245,"utils/nn.py",5386,229,"out_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +964,2174254,"utils/nn.py",5385,230,"pout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +965,2174272,"utils/nn.py",5384,231,"opout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +966,2174289,"utils/nn.py",5383,232,"ropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +967,2174341,"utils/nn.py",5382,233,"dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +968,2174342,"utils/nn.py",5346,269," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +969,2174348,"utils/nn.py",5345,270," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +970,2174362,"utils/nn.py",5344,271," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +971,2174417,"utils/nn.py",5343,272," qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +972,2174418,"utils/nn.py",5305,310," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +973,2174471,"utils/nn.py",5304,311," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +974,2174483,"utils/nn.py",5303,312," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +975,2174534,"utils/nn.py",5302,313," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +976,2174536,"utils/nn.py",5301,314," num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +977,2174595,"utils/nn.py",5265,350," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +978,2174667,"utils/nn.py",5264,351," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(",python,selection_mouse +979,2175207,"utils/nn.py",5264,0,"",python,selection_mouse +980,2175208,"utils/nn.py",5262,8," ",python,selection_mouse +981,2175367,"utils/nn.py",5262,35," z = nn.MultiHeadAttention(\n",python,selection_mouse +982,2175548,"utils/nn.py",5262,73," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n",python,selection_mouse +983,2175562,"utils/nn.py",5262,108," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n",python,selection_mouse +984,2175614,"utils/nn.py",5262,147," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n",python,selection_mouse +985,2175624,"utils/nn.py",5262,189," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n",python,selection_mouse +986,2175690,"utils/nn.py",5262,219," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +987,2175748,"utils/nn.py",5262,250," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n",python,selection_mouse +988,2175873,"utils/nn.py",5262,268," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n",python,selection_mouse +989,2176156,"utils/nn.py",5262,297," z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n",python,selection_mouse +990,2194435,"utils/nn.py",5458,0,"",python,selection_mouse +991,2195224,"utils/nn.py",5266,0,"",python,selection_mouse +992,2195353,"utils/nn.py",5262,8," ",python,selection_mouse +993,2197987,"utils/nn.py",5281,0,"",python,selection_mouse +994,2201937,"utils/nn.py",5242,0,"",python,selection_mouse +995,2202064,"utils/nn.py",5241,3,"jnp",python,selection_mouse +996,2202268,"utils/nn.py",5241,4,"jnp.",python,selection_mouse +997,2202287,"utils/nn.py",5241,7,"jnp.tri",python,selection_mouse +998,2202341,"utils/nn.py",5241,9,"jnp.tri(z",python,selection_mouse +999,2202341,"utils/nn.py",5241,15,"jnp.tri(z.shape",python,selection_mouse +1000,2202447,"utils/nn.py",5241,16,"jnp.tri(z.shape[",python,selection_mouse +1001,2202512,"utils/nn.py",5241,17,"jnp.tri(z.shape[-",python,selection_mouse +1002,2202569,"utils/nn.py",5241,18,"jnp.tri(z.shape[-2",python,selection_mouse +1003,2202661,"utils/nn.py",5241,19,"jnp.tri(z.shape[-2]",python,selection_mouse +1004,2202812,"utils/nn.py",5241,20,"jnp.tri(z.shape[-2])",python,selection_mouse +1005,2206021,"utils/nn.py",5275,0,"",python,selection_mouse +1006,2206163,"utils/nn.py",5274,2,"nn",python,selection_mouse +1007,2209067,"utils/nn.py",5225,0,"",python,selection_mouse +1008,2210957,"utils/nn.py",5223,0,"",python,selection_mouse +1009,2211156,"utils/nn.py",5219,8," ",python,selection_mouse +1010,2212179,"utils/nn.py",5277,0,"",python,selection_mouse +1011,2212965,"utils/nn.py",5232,0,"",python,selection_mouse +1012,2377625,"models/dynamics.py",0,0,"",python,tab +1013,2380418,"models/dynamics.py",3096,0,"",python,selection_mouse +1014,2380552,"models/dynamics.py",3086,16,"vid_embed_padded",python,selection_mouse +1015,2383763,"models/dynamics.py",2766,0,"",python,selection_mouse +1016,2383939,"models/dynamics.py",2763,11,"patch_embed",python,selection_mouse +1017,2384624,"models/dynamics.py",2780,0,"",python,selection_mouse +1018,2390281,"models/dynamics.py",3195,0,"",python,selection_mouse +1019,2390891,"models/dynamics.py",3107,0,"",python,selection_mouse +1020,2391200,"models/dynamics.py",3107,1,"p",python,selection_mouse +1021,2391223,"models/dynamics.py",3107,3,"p.p",python,selection_mouse +1022,2391236,"models/dynamics.py",3107,4,"p.pa",python,selection_mouse +1023,2391245,"models/dynamics.py",3107,7,"p.pad(v",python,selection_mouse +1024,2391298,"models/dynamics.py",3107,8,"p.pad(vi",python,selection_mouse +1025,2391298,"models/dynamics.py",3107,9,"p.pad(vid",python,selection_mouse +1026,2391299,"models/dynamics.py",3107,10,"p.pad(vid_",python,selection_mouse +1027,2391842,"models/dynamics.py",3107,10,"",python,content +1028,2391842,"models/dynamics.py",3106,0,"p.pad(vid_",python,content +1029,2392194,"models/dynamics.py",3106,0,"",python,selection_mouse +1030,2392473,"models/dynamics.py",3100,0,"",python,selection_mouse +1031,2395966,"models/dynamics.py",3118,0,"",python,selection_mouse +1032,2397809,"models/dynamics.py",3117,0,"p.pad(vid_",python,content +1033,2397810,"models/dynamics.py",3106,10,"",python,content +1034,2399831,"models/dynamics.py",3217,0,"",python,selection_mouse +1035,2400576,"models/dynamics.py",3216,0,"",python,selection_command +1036,2401801,"models/dynamics.py",3350,0,"",python,selection_mouse +1037,2401806,"models/dynamics.py",3349,0,"",python,selection_command +1038,2407681,"models/dynamics.py",3111,0,"",python,selection_mouse +1039,2411063,"models/dynamics.py",3101,0,"",python,selection_mouse +1040,2411760,"models/dynamics.py",3104,0,"",python,selection_mouse +1041,2413426,"models/dynamics.py",3229,0,"",python,selection_mouse +1042,2413613,"models/dynamics.py",3226,4,"mask",python,selection_mouse +1043,2413786,"models/dynamics.py",3218,46," mask = jnp.ones(vid_embed.shape[:-1])\n",python,selection_mouse +1044,2414899,"models/dynamics.py",3229,0,"",python,selection_mouse +1045,2414900,"models/dynamics.py",3226,4,"mask",python,selection_mouse +1046,2415065,"models/dynamics.py",3218,46," mask = jnp.ones(vid_embed.shape[:-1])\n",python,selection_mouse +1047,2416600,"models/dynamics.py",3229,0,"",python,selection_mouse +1048,2416764,"models/dynamics.py",3226,4,"mask",python,selection_mouse +1049,2416957,"models/dynamics.py",3218,46," mask = jnp.ones(vid_embed.shape[:-1])\n",python,selection_mouse +1050,2422267,"models/dynamics.py",3229,0,"",python,selection_mouse +1051,2422391,"models/dynamics.py",3226,4,"mask",python,selection_mouse +1052,2423670,"models/dynamics.py",3240,0,"",python,selection_mouse +1053,2423828,"models/dynamics.py",3237,4,"ones",python,selection_mouse +1054,2424565,"models/dynamics.py",3238,0,"",python,selection_mouse +1055,2429754,"models/dynamics.py",3228,0,"",python,selection_mouse +1056,2429905,"models/dynamics.py",3226,4,"mask",python,selection_mouse +1057,2465032,"models/dynamics.py",3246,0,"",python,selection_mouse +1058,2465191,"models/dynamics.py",3242,9,"vid_embed",python,selection_mouse +1059,2485007,"genie.py",0,0,"",python,tab +1060,2488619,"genie.py",6109,0,"",python,selection_mouse +1061,2489212,"genie.py",6042,0,"",python,selection_mouse +1062,2494884,"genie.py",6055,0,"",python,selection_mouse +1063,2495046,"genie.py",6055,2," t",python,selection_mouse +1064,2495064,"genie.py",6055,4," t-1",python,selection_mouse +1065,2495078,"genie.py",6055,6," t-1, ",python,selection_mouse +1066,2495094,"genie.py",6055,7," t-1, n",python,selection_mouse +1067,2495175,"genie.py",6055,8," t-1, n,",python,selection_mouse +1068,2495228,"genie.py",6055,9," t-1, n, ",python,selection_mouse +1069,2496852,"genie.py",6637,0,"",python,selection_mouse +1070,2496960,"genie.py",6637,3,"[:,",python,selection_mouse +1071,2496980,"genie.py",6637,4,"[:, ",python,selection_mouse +1072,2496995,"genie.py",6637,7,"[:, t, ",python,selection_mouse +1073,2497050,"genie.py",6637,8,"[:, t, n",python,selection_mouse +1074,2497051,"genie.py",6637,9,"[:, t, n]",python,selection_mouse +1075,2497051,"genie.py",6637,10,"[:, t, n].",python,selection_mouse +1076,2497103,"genie.py",6637,11,"[:, t, n].s",python,selection_mouse +1077,2498375,"models/dynamics.py",0,0,"",python,tab +1078,2500274,"models/dynamics.py",0,0,"",python,tab +1079,2516759,"models/dynamics.py",3112,0,"",python,selection_mouse +1080,2517320,"models/dynamics.py",3209,0,"",python,selection_mouse +1081,2517947,"models/dynamics.py",3263,0,"",python,selection_mouse +1082,2517949,"models/dynamics.py",3262,0,"",python,selection_command +1083,2549313,"genie.py",0,0,"",python,tab +1084,2549314,"genie.py",5468,0,"",python,selection_mouse +1085,2564827,"genie.py",6483,0,"",python,selection_mouse +1086,2565416,"genie.py",6660,0,"",python,selection_mouse +1087,2566468,"genie.py",6662,0,"",python,selection_mouse +1088,2566469,"genie.py",6661,0,"",python,selection_command +1089,2623937,"models/dynamics.py",0,0,"",python,tab +1090,2623938,"models/dynamics.py",3217,0,"",python,selection_mouse +1091,2623941,"models/dynamics.py",3216,0,"",python,selection_command +1092,2624191,"models/dynamics.py",3217,0,"",python,selection_mouse +1093,2624205,"models/dynamics.py",3216,0,"",python,selection_command +1094,2624925,"models/dynamics.py",2963,0,"",python,selection_mouse +1095,2626053,"models/dynamics.py",2660,0,"",python,selection_mouse +1096,2626172,"models/dynamics.py",2658,8,"__call__",python,selection_mouse +1097,2626314,"models/dynamics.py",2658,97,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed",python,selection_mouse +1098,2626330,"models/dynamics.py",2658,300,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded",python,selection_mouse +1099,2626387,"models/dynamics.py",2658,388,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics",python,selection_mouse +1100,2626387,"models/dynamics.py",2658,454,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad",python,selection_mouse +1101,2626388,"models/dynamics.py",2658,547,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded",python,selection_mouse +1102,2626397,"models/dynamics.py",2658,603,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1",python,selection_mouse +1103,2626414,"models/dynamics.py",2658,605,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])",python,selection_mouse +1104,2626431,"models/dynamics.py",2658,656,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1",python,selection_mouse +1105,2626480,"models/dynamics.py",2658,657,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)",python,selection_mouse +1106,2626481,"models/dynamics.py",2658,692,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)",python,selection_mouse +1107,2626540,"models/dynamics.py",2658,725,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()",python,selection_mouse +1108,2626541,"models/dynamics.py",2658,777,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1109,2627471,"models/dynamics.py",3435,0,"",python,selection_mouse +1110,2627475,"models/dynamics.py",3434,0,"",python,selection_command +1111,2627879,"models/dynamics.py",3435,0,"",python,selection_mouse +1112,2627880,"models/dynamics.py",3434,0,"",python,selection_command +1113,2628040,"models/dynamics.py",3434,1,")",python,selection_mouse +1114,2628041,"models/dynamics.py",3435,0,"",python,selection_command +1115,2628063,"models/dynamics.py",3383,52,"\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1116,2628095,"models/dynamics.py",3350,85,"\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1117,2628157,"models/dynamics.py",3299,136,"logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1118,2628157,"models/dynamics.py",3296,139,"ax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1119,2628162,"models/dynamics.py",3293,142,"rgmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1120,2628180,"models/dynamics.py",3245,190,"_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1121,2628196,"models/dynamics.py",3243,192,"id_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1122,2628213,"models/dynamics.py",3181,254,"ynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1123,2628232,"models/dynamics.py",3180,255,"dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1124,2628246,"models/dynamics.py",3179,256,".dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1125,2628298,"models/dynamics.py",3098,337,"dded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1126,2628298,"models/dynamics.py",3097,338,"added = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1127,2628311,"models/dynamics.py",3032,403," self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1128,2628364,"models/dynamics.py",2949,486,"ed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1129,2628365,"models/dynamics.py",2948,487,"bed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1130,2628382,"models/dynamics.py",2947,488,"mbed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1131,2628438,"models/dynamics.py",2872,563,"bed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1132,2628438,"models/dynamics.py",2871,564,"mbed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1133,2628439,"models/dynamics.py",2870,565,"embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1134,2628448,"models/dynamics.py",2868,567,"d_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1135,2628464,"models/dynamics.py",2867,568,"id_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1136,2628513,"models/dynamics.py",2806,629,"act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1137,2628523,"models/dynamics.py",2805,630," act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1138,2628535,"models/dynamics.py",2804,631," act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1139,2628591,"models/dynamics.py",2803,632," act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1140,2628592,"models/dynamics.py",2802,633," act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1141,2628592,"models/dynamics.py",2741,694," vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1142,2628596,"models/dynamics.py",2740,695," vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1143,2628653,"models/dynamics.py",2739,696," vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1144,2628654,"models/dynamics.py",2738,697," vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1145,2628707,"models/dynamics.py",2650,785," def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,selection_mouse +1146,2631100,"models/dynamics.py",3109,0,"",python,selection_mouse +1147,2632271,"genie.py",0,0,"",python,tab +1148,2632272,"genie.py",6456,0,"",python,selection_mouse +1149,2634085,"genie.py",5754,0,"",python,selection_mouse +1150,2637329,"genie.py",5743,0,"",python,selection_mouse +1151,2637507,"genie.py",5742,1," ",python,selection_mouse +1152,2637596,"genie.py",5742,2," =",python,selection_mouse +1153,2637615,"genie.py",5742,3," = ",python,selection_mouse +1154,2637631,"genie.py",5742,7," = self",python,selection_mouse +1155,2637683,"genie.py",5742,16," = self.dynamics",python,selection_mouse +1156,2637811,"genie.py",5742,17," = self.dynamics(",python,selection_mouse +1157,2638472,"genie.py",5753,0,"",python,selection_mouse +1158,2638831,"genie.py",5750,8,"dynamics",python,selection_mouse +1159,2640310,"models/dynamics.py",0,0,"",python,tab +1160,2640311,"models/dynamics.py",2662,0,"",python,selection_mouse +1161,2640449,"models/dynamics.py",2658,8,"__call__",python,selection_mouse +1162,2643928,"models/dynamics.py",2663,0,"",python,selection_mouse +1163,2645181,"models/dynamics.py",2797,0,"",python,selection_mouse +1164,2645192,"models/dynamics.py",2796,0,"",python,selection_command +1165,2732505,"genie.py",0,0,"",python,tab +1166,2732506,"genie.py",5741,0,"",python,selection_mouse +1167,2732628,"genie.py",5730,12,"dyna_outputs",python,selection_mouse +1168,2740978,"genie.py",6344,0,"",python,selection_mouse +1169,2741948,"genie.py",6520,0,"",python,selection_mouse +1170,2742602,"genie.py",6519,0,"",python,selection_mouse +1171,2742603,"genie.py",6518,0,"",python,selection_command +1172,2752639,"genie.py",6058,0,"",python,selection_mouse +1173,2767083,"genie.py",5418,0,"",python,selection_mouse +1174,2768176,"genie.py",5417,0,"",python,selection_mouse +1175,2931537,"genie.py",6057,0,"",python,selection_mouse +1176,3008598,"genie.py",6057,1,"",python,content +1177,3008763,"genie.py",6057,1,"",python,content +1178,3020454,"genie.py",6059,0,"",python,selection_mouse +1179,3049485,"genie.py",5469,0,"",python,selection_mouse +1180,3050655,"genie.py",5468,0,"",python,selection_mouse +1181,3053183,"genie.py",5456,0,"",python,selection_mouse +1182,3054101,"genie.py",5457,0,"",python,selection_mouse +1183,3056159,"genie.py",6059,0,"",python,selection_mouse +1184,3059483,"genie.py",6643,0,"",python,selection_mouse +1185,3201983,"TERMINAL",0,0,"srun",,terminal_focus +1186,3202096,"TERMINAL",0,0,"\r[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1187,3202830,"TERMINAL",0,0,"srun",,terminal_focus +1188,3202918,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1189,3204239,"TERMINAL",0,0,"q",,terminal_output +1190,3204300,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1191,3204407,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1192,3204465,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1193,3204526,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1194,3204714,"TERMINAL",0,0,"[?25l[?2004l\r[?25h[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Mon Jul 21 15:19:11 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 R51:46\t 1 hkn07073365095 dev_accel interact tum_cte0 R51:42\t 1 hkn0901",,terminal_output +1195,3205735,"TERMINAL",0,0,"273",,terminal_output +1196,3206243,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1197,3213688,"sample.py",0,0,"",python,tab +1198,3215415,"sample.py",0,0,"",python,tab +1199,3216200,"sample.py",4241,0,"",python,selection_mouse +1200,3216204,"sample.py",4240,0,"",python,selection_command +1201,3217335,"sample.py",4207,0,"",python,selection_command +1202,3217479,"sample.py",4139,0,"",python,selection_command +1203,3217622,"sample.py",4207,0,"",python,selection_command +1204,3220479,"genie.py",0,0,"",python,tab +1205,3221270,"genie.py",0,0,"",python,tab +1206,3221809,"genie.py",6107,0,"",python,selection_mouse +1207,3222856,"genie.py",5468,0,"",python,selection_mouse +1208,3223474,"genie.py",5463,0,"",python,selection_mouse +1209,3223966,"genie.py",5429,0,"",python,selection_mouse +1210,3226767,"genie.py",0,0,"",python,tab +1211,3226767,"genie.py",5467,0,"",python,selection_mouse +1212,3227432,"genie.py",5468,0,"",python,selection_command +1213,3227899,"genie.py",5469,0,"",python,selection_command +1214,3228368,"genie.py",5468,1,"",python,content +1215,3229251,"genie.py",5468,0,"3",python,content +1216,3229252,"genie.py",5469,0,"",python,selection_keyboard +1217,3229263,"genie.py",5469,0,"2",python,content +1218,3229264,"genie.py",5470,0,"",python,selection_keyboard +1219,3230062,"genie.py",5469,0,"",python,selection_command +1220,3235261,"genie.py",0,0,"",python,tab +1221,3235262,"genie.py",5433,0,"",python,selection_mouse +1222,3236190,"genie.py",5432,0,"",python,selection_mouse +1223,3237557,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1224,3238975,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",739,0,"",shellscript,selection_command +1225,3239703,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",738,1,"",shellscript,content +1226,3240232,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",737,1,"",shellscript,content +1227,3240355,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",737,0,"2",shellscript,content +1228,3240355,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",738,0,"",shellscript,selection_keyboard +1229,3241046,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",737,0,"",shellscript,selection_command +1230,3262973,"TERMINAL",0,0,"#",,terminal_output +1231,3263115,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1232,3263490,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231",,terminal_output +1233,3264078,"TERMINAL",0,0,"[?25l/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ [?25h",,terminal_output +1234,3266103,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1235,3273004,"TERMINAL",0,0,"[?25lsh[?25h",,terminal_output +1236,3273068,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1237,3273172,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1238,3273485,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",,terminal_output +1239,3275890,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/yolo-runs/sampling.sh/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231",,terminal_output +1240,3278310,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231",,terminal_output +1241,3279165,"TERMINAL",0,0,"\r",,terminal_output +1242,3279223,"TERMINAL",0,0,"",,terminal_output +1243,3280010,"TERMINAL",0,0,"",,terminal_output +1244,3280426,"TERMINAL",0,0," /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231",,terminal_output +1245,3281201,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r[?25h\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +1246,3281391,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2919560\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753100849\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753104449\r\nSLURM_PMI2_SRUN_PORT=34047\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365095\r\nSLURM_PTY_PORT=38245\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=38\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=35279\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=35279\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +1247,3284887,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1248,3288254,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",756,0,"",shellscript,selection_mouse +1249,3290235,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",754,0,"",shellscript,selection_mouse +1250,3290890,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",738,0,"",shellscript,selection_mouse +1251,3291221,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +1252,3294823,"TERMINAL",0,0,"2025-07-21 15:20:41.381106: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1253,3305169,"TERMINAL",0,0,"2025-07-21 15:20:51.834548: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1254,3309463,"TERMINAL",0,0,"2025-07-21 15:20:56.025407: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1255,3311001,"TERMINAL",0,0,"2025-07-21 15:20:57.652265: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1256,3315614,"TERMINAL",0,0,"2025-07-21 15:21:02.211029: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1257,3319202,"TERMINAL",0,0,"2025-07-21 15:21:05.868774: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1258,3321913,"TERMINAL",0,0,"2025-07-21 15:21:08.579770: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1259,3322828,"TERMINAL",0,0,"bash",,terminal_focus +1260,3322896,"TERMINAL",0,0,"2025-07-21 15:21:09.460264: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1261,3324324,"TERMINAL",0,0,"ls",,terminal_command +1262,3324374,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:10 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1263,3324474,"TERMINAL",0,0,"data frame-knoms.png generation_1752857557.2350962.gif gifs models read_tf_record.py scripts_cremers slurm-3359334.out train_lam.py weekend-job-requeuer.sh\r\ndebug frame.png generation_1752857706.7504938.gif input_pipeline overfit_dir requirements-franz.txt scripts_horeka slurm-3359338.out train_tokenizer.py weekend-job-starter.sh\r\ndiff.diff frames generation_1752918064.411735.gif LICENSE __pycache__ requirements.txt slurm tests utils\r\ndiff.log generate_dataset.py genie.py logs README.md sample.py slurm-3359333.out train_dynamics.py wandb\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1264,3325546,"TERMINAL",0,0,"2025-07-21 15:21:12.210384: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1265,3327181,"TERMINAL",0,0,"2025-07-21 15:21:13.798142: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1266,3328625,"TERMINAL",0,0,"2025-07-21 15:21:15.289754: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1267,3331719,"TERMINAL",0,0,"2025-07-21 15:21:18.386666: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1268,3332577,"TERMINAL",0,0,"mv *.gif gifs/",,terminal_command +1269,3332618,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:19 mv *.gif gifs/;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1270,3332651,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1271,3333843,"TERMINAL",0,0,"ls",,terminal_command +1272,3333859,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:20 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;Cdata diff.log frames gifs logs __pycache__ requirements-franz.txt scripts_cremers slurm-3359333.out tests train_tokenizer.py weekend-job-requeuer.sh\r\ndebug frame-knoms.png generate_dataset.py input_pipeline models README.md requirements.txt scripts_horeka slurm-3359334.out train_dynamics.py utils weekend-job-starter.sh\r\ndiff.diff frame.png genie.py LICENSE overfit_dir read_tf_record.py sample.py slurm slurm-3359338.out train_lam.py wandb\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1273,3334759,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 121, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1563, in restore\r\n raise FileNotFoundError(f'No steps found in {self.directory}.')\r\nFileNotFoundError: No steps found in /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/3359231.\r\n",,terminal_output +1274,3335884,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1275,3338095,"TERMINAL",0,0,"srun",,terminal_focus +1276,3353818,"TERMINAL",0,0,"bash",,terminal_focus +1277,3356726,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +1278,3357073,"TERMINAL",0,0,"ls",,terminal_command +1279,3357123,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:43 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1280,3357200,"TERMINAL",0,0,"checkpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +1281,3360246,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +1282,3360483,"TERMINAL",0,0,"ls",,terminal_command +1283,3360529,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:47 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1284,3360620,"TERMINAL",0,0,"0000 3290391 3292258 3292334 3294601 3296574 3297582 3299016 3299258 3300663 3301031 3310436 3313565 coinrun lam_ckpt_dir train_dyn_new_arch-bugfixed-spatial-shift\r\n3290283 3290392 3292328 3292335 3294602 3296575 3297586 3299062 3299259 3300672 3306801 3310437 3313570 debug lam_main_test train_dyn_new_arch-bugfixed-temporal-shift\r\n3290284 3290439 3292329 3292336 3294603 3297569 3297606 3299063 3299272 3301025 3307618 3311671 3313571 dyn tokenizer train_dyn_yolorun_new_arch\r\n3290295 3290440 3292330 3292337 3296502 3297575 3297671 3299065 3299579 3301026 3307619 3311672 3313572 dynamics_ckpt_dir tokenizer_ckpt_dir train_lam_minecraft_overfit_sample\r\n3290296 3291405 3292331 3292338 3296540 3297576 3297693 3299066 3300233 3301027 3309662 3313562 3316022 interactive train_dynamics_lr_schedule_const train_tokenizer_batch_size_scaling_16_node\r\n3290366 3292213 3292332 3292339 3296571 3297577 3297706 3299068 3300290 3301029 3309663 3313563 big-runs lam train_dynamics_lr_schedule_cos train_tokenizer_minecraft_overfit_sample\r\n3290367 3292221 3292333 3294600 3296573 3297578 3297727 3299069 3300658 3301030 3309699 3313564 checkpoints_alfred lam-1-action train_dynamics_lr_schedule_wsd wrap\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +1285,3362640,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0901 jafar]$ \r(jafar) [tum_cte0515@hkn0901 jafar]$ \r(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1286,3363978,"TERMINAL",0,0,"cd interactive/",,terminal_command +1287,3364263,"TERMINAL",0,0,"ls",,terminal_command +1288,3364314,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:50 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1289,3364392,"TERMINAL",0,0,"3347289 3350418 3352994 3352996 3353884 3353924 3355596 3357147 3357893 3357894 3359231 3359232 3359275 3365095 shift-spatial shift-temporal\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive]633;D;0",,terminal_output +1290,3367593,"TERMINAL",0,0,"cd shift-spatial/",,terminal_command +1291,3368374,"TERMINAL",0,0,"pwd",,terminal_command +1292,3369925,"TERMINAL",0,0,"ls",,terminal_command +1293,3369935,"TERMINAL",0,0,"]633;E;2025-07-21 15:21:56 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C000800 000900 001000 3359231\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1294,3373097,"TERMINAL",0,0,"srun",,terminal_focus +1295,3373610,"TERMINAL",0,0,"bash",,terminal_focus +1296,3375229,"TERMINAL",0,0,"cd 3359231/",,terminal_command +1297,3375525,"TERMINAL",0,0,"ls",,terminal_command +1298,3376794,"TERMINAL",0,0,"cd ..",,terminal_command +1299,3377023,"TERMINAL",0,0,"ls",,terminal_command +1300,3377044,"TERMINAL",0,0,"]633;E;2025-07-21 15:22:03 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C000800 000900 001000 3359231\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1301,3381709,"TERMINAL",0,0,"rm -rf 3359231/",,terminal_command +1302,3382452,"TERMINAL",0,0,"ls",,terminal_command +1303,3382463,"TERMINAL",0,0,"]633;E;2025-07-21 15:22:09 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C000800 000900 001000\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1304,3384228,"TERMINAL",0,0,"pwd",,terminal_command +1305,3386105,"TERMINAL",0,0,"srun",,terminal_focus +1306,3386662,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/shift-spatial/3359231",,terminal_output +1307,3387774,"TERMINAL",0,0,"\r\n\r",,terminal_output +1308,3388327,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +1309,3388735,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +1310,3388916,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2919560\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753100849\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753104449\r\nSLURM_PMI2_SRUN_PORT=34047\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365095\r\nSLURM_PTY_PORT=38245\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=38\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=35279\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=35279\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +1311,3390466,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +1312,3391683,"TERMINAL",0,0,"bash",,terminal_focus +1313,3393091,"TERMINAL",0,0,"2025-07-21 15:22:19.722455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1314,3395034,"TERMINAL",0,0,"queue",,terminal_command +1315,3395084,"TERMINAL",0,0,"]633;E;2025-07-21 15:22:21 queue;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1316,3395150,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 15:22:21 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 R54:56\t 1 hkn07073365095 dev_accel interact tum_cte0 R54:52\t 1 hkn0901",,terminal_output +1317,3396199,"TERMINAL",0,0,"273",,terminal_output +1318,3397325,"TERMINAL",0,0,"384",,terminal_output +1319,3397468,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1320,3400364,"TERMINAL",0,0,"cd ..",,terminal_command +1321,3400685,"TERMINAL",0,0,"ls",,terminal_command +1322,3400700,"TERMINAL",0,0,"]633;E;2025-07-21 15:22:27 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C3347289 3350418 3352994 3352996 3353884 3353924 3355596 3357147 3357893 3357894 3359231 3359232 3359275 3365095 shift-spatial shift-temporal\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive]633;D;0",,terminal_output +1323,3403769,"TERMINAL",0,0,"2025-07-21 15:22:30.439659: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1324,3405208,"TERMINAL",0,0,"cd shift-",,terminal_command +1325,3408536,"TERMINAL",0,0,"2025-07-21 15:22:35.190211: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1326,3410024,"TERMINAL",0,0,"2025-07-21 15:22:36.691238: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1327,3411005,"TERMINAL",0,0,"cd shift-spatial/",,terminal_command +1328,3412269,"TERMINAL",0,0,"ls",,terminal_command +1329,3415146,"TERMINAL",0,0,"2025-07-21 15:22:41.802983: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1330,3416961,"TERMINAL",0,0,"srun",,terminal_focus +1331,3419126,"TERMINAL",0,0,"2025-07-21 15:22:45.794301: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1332,3422207,"TERMINAL",0,0,"2025-07-21 15:22:48.807581: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1333,3423027,"TERMINAL",0,0,"2025-07-21 15:22:49.661881: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1334,3425689,"TERMINAL",0,0,"2025-07-21 15:22:52.345292: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1335,3427376,"TERMINAL",0,0,"2025-07-21 15:22:54.041897: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1336,3428966,"TERMINAL",0,0,"2025-07-21 15:22:55.622274: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1337,3432384,"TERMINAL",0,0,"2025-07-21 15:22:59.053239: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1338,3435407,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\n",,terminal_output +1339,3444739,"TERMINAL",0,0,"autoreg sampling...\r\n",,terminal_output +1340,3448385,"genie.py",0,0,"",python,tab +1341,3448386,"genie.py",6108,0,"",python,selection_mouse +1342,3449975,"genie.py",5555,0,"",python,selection_mouse +1343,3450123,"genie.py",5541,15,"token_idxs_full",python,selection_mouse +1344,3450234,"genie.py",5541,68,"token_idxs_full,\n ""latent_actions"": action_tokens",python,selection_mouse +1345,3450253,"genie.py",5541,90,"token_idxs_full,\n ""latent_actions"": action_tokens\n }",python,selection_mouse +1346,3450317,"genie.py",5541,230,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs",python,selection_mouse +1347,3450317,"genie.py",5541,372,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last ",python,selection_mouse +1348,3450318,"genie.py",5541,510,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits",python,selection_mouse +1349,3450335,"genie.py",5541,617,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch",python,selection_mouse +1350,3450387,"genie.py",5541,651,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:",python,selection_mouse +1351,3450388,"genie.py",5541,713,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits",python,selection_mouse +1352,3450405,"genie.py",5541,755,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:",python,selection_mouse +1353,3450466,"genie.py",5541,812,"token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1354,3450900,"genie.py",6351,0,"",python,selection_mouse +1355,3450901,"genie.py",6350,3,"rng",python,selection_mouse +1356,3451091,"genie.py",6296,57,"\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1357,3451108,"genie.py",6237,116,"next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1358,3451126,"genie.py",6158,195,"\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1359,3451140,"genie.py",6108,245,"\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1360,3451194,"genie.py",6039,314,"token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1361,3451194,"genie.py",5988,365,"\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1362,3451195,"genie.py",5840,513,"B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1363,3451206,"genie.py",5760,593,"dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1364,3451267,"genie.py",5684,669,": {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1365,3451268,"genie.py",5631,722,"\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1366,3451273,"genie.py",5609,744,"\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1367,3451289,"genie.py",5557,796,"\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1368,3451353,"genie.py",5504,849,"\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1369,3451360,"genie.py",5472,881,"\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng",python,selection_mouse +1370,3451560,"genie.py",5472,0,"",python,selection_mouse +1371,3451562,"genie.py",5471,0,"",python,selection_command +1372,3452728,"TERMINAL",0,0,"2025-07-21 15:23:19.326706: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327055: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327085: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327130: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327205: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327254: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327300: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327320: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:23:19.327334: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1373,3483046,"TERMINAL",0,0,"autoreg sampling done. calculating ssim and saving video\r\n",,terminal_output +1374,3485798,"TERMINAL",0,0,"SSIM: 0.4741170406341553\r\n",,terminal_output +1375,3487781,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1376,3493476,"genie.py",5469,0,"",python,selection_mouse +1377,3493595,"genie.py",5468,2,"32",python,selection_mouse +1378,3494473,"genie.py",5630,0,"",python,selection_mouse +1379,3498113,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1380,3521533,"genie.py",0,0,"",python,tab +1381,3521534,"genie.py",5748,0,"",python,selection_mouse +1382,3523398,"genie.py",5502,0,"",python,selection_mouse +1383,3524373,"genie.py",5470,0,"",python,selection_mouse +1384,3529926,"genie.py",5469,1,"",python,content +1385,3530057,"genie.py",5468,1,"",python,content +1386,3532312,"genie.py",5468,0,"3",python,content +1387,3532314,"genie.py",5469,0,"",python,selection_keyboard +1388,3532390,"genie.py",5469,0,"0",python,content +1389,3532390,"genie.py",5470,0,"",python,selection_keyboard +1390,3532493,"genie.py",5470,0,"0",python,content +1391,3532493,"genie.py",5471,0,"",python,selection_keyboard +1392,3533274,"genie.py",5748,0,"",python,selection_mouse +1393,3534039,"genie.py",5466,0,"",python,selection_mouse +1394,3538197,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +1395,3538901,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +1396,3539040,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2919560\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753100849\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753104449\r\nSLURM_PMI2_SRUN_PORT=34047\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365095\r\nSLURM_PTY_PORT=38245\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=38\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=35279\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=35279\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +1397,3539869,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +1398,3542419,"TERMINAL",0,0,"2025-07-21 15:24:49.085768: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1399,3542983,"TERMINAL",0,0,"bash",,terminal_focus +1400,3543813,"TERMINAL",0,0,"queue",,terminal_command +1401,3543887,"TERMINAL",0,0,"]633;E;2025-07-21 15:24:50 queue;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 15:24:50 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 R57:25\t 1 hkn07073365095 dev_accel interact tum_cte0 R57:21\t 1 hkn0901",,terminal_output +1402,3544996,"TERMINAL",0,0,"162",,terminal_output +1403,3545892,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1404,3549487,"TERMINAL",0,0,"srun",,terminal_focus +1405,3553387,"TERMINAL",0,0,"2025-07-21 15:24:59.964457: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1406,3554425,"models/dynamics.py",0,0,"",python,tab +1407,3557924,"models/dynamics.py",332,0,"",python,selection_mouse +1408,3558031,"TERMINAL",0,0,"2025-07-21 15:25:04.659013: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1409,3558081,"models/dynamics.py",332,1," ",python,selection_mouse +1410,3558146,"models/dynamics.py",332,4," par",python,selection_mouse +1411,3558147,"models/dynamics.py",332,6," param",python,selection_mouse +1412,3558147,"models/dynamics.py",332,35," param_dtype: jnp.dtype\n dtype: ",python,selection_mouse +1413,3558149,"models/dynamics.py",332,37," param_dtype: jnp.dtype\n dtype: jn",python,selection_mouse +1414,3558166,"models/dynamics.py",332,39," param_dtype: jnp.dtype\n dtype: jnp.",python,selection_mouse +1415,3558218,"models/dynamics.py",332,42," param_dtype: jnp.dtype\n dtype: jnp.dty",python,selection_mouse +1416,3558218,"models/dynamics.py",332,43," param_dtype: jnp.dtype\n dtype: jnp.dtyp",python,selection_mouse +1417,3558220,"models/dynamics.py",332,44," param_dtype: jnp.dtype\n dtype: jnp.dtype",python,selection_mouse +1418,3558549,"models/dynamics.py",376,0,"",python,selection_mouse +1419,3558552,"models/dynamics.py",375,0,"",python,selection_command +1420,3558698,"models/dynamics.py",371,5,"dtype",python,selection_mouse +1421,3558699,"models/dynamics.py",372,4,"type",python,selection_command +1422,3558898,"models/dynamics.py",372,0,"",python,selection_mouse +1423,3558918,"models/dynamics.py",367,5,"jnp.d",python,selection_mouse +1424,3558936,"models/dynamics.py",360,12,"dtype: jnp.d",python,selection_mouse +1425,3558997,"models/dynamics.py",359,13," dtype: jnp.d",python,selection_mouse +1426,3558998,"models/dynamics.py",358,14," dtype: jnp.d",python,selection_mouse +1427,3558999,"models/dynamics.py",357,15," dtype: jnp.d",python,selection_mouse +1428,3559065,"models/dynamics.py",356,16," dtype: jnp.d",python,selection_mouse +1429,3559380,"models/dynamics.py",329,43," param_dtype: jnp.dtype\n dtype: jnp.d",python,selection_mouse +1430,3559546,"TERMINAL",0,0,"2025-07-21 15:25:06.215039: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1431,3560247,"sample.py",0,0,"",python,tab +1432,3564552,"TERMINAL",0,0,"2025-07-21 15:25:11.202453: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1433,3566764,"TERMINAL",0,0,"srun",,terminal_focus +1434,3566847,"TERMINAL",0,0,"\r[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1435,3567833,"TERMINAL",0,0,"#",,terminal_output +1436,3568006,"TERMINAL",0,0," ",,terminal_output +1437,3568488,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +1438,3568502,"TERMINAL",0,0,"2025-07-21 15:25:15.170951: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1439,3569533,"TERMINAL",0,0,"\rsh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\n[?2004l\r]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1440,3571257,"TERMINAL",0,0,"srun",,terminal_focus +1441,3571565,"TERMINAL",0,0,"2025-07-21 15:25:18.196693: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1442,3572389,"TERMINAL",0,0,"2025-07-21 15:25:19.058133: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1443,3575180,"TERMINAL",0,0,"2025-07-21 15:25:21.841694: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1444,3576883,"TERMINAL",0,0,"2025-07-21 15:25:23.549880: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1445,3578504,"TERMINAL",0,0,"2025-07-21 15:25:25.125511: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1446,3581675,"TERMINAL",0,0,"2025-07-21 15:25:28.338856: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1447,3584722,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\n",,terminal_output +1448,3594039,"TERMINAL",0,0,"autoreg sampling...\r\n",,terminal_output +1449,3651820,"TERMINAL",0,0,"bash",,terminal_focus +1450,3653587,"TERMINAL",0,0,"git status",,terminal_command +1451,3656914,"TERMINAL",0,0,"srun",,terminal_focus +1452,3656983,"TERMINAL",0,0,"\r[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1453,3657882,"TERMINAL",0,0,"gi",,terminal_output +1454,3657947,"TERMINAL",0,0,"t",,terminal_output +1455,3658073,"TERMINAL",0,0," ",,terminal_output +1456,3658182,"TERMINAL",0,0,"s",,terminal_output +1457,3658308,"TERMINAL",0,0,"t",,terminal_output +1458,3658372,"TERMINAL",0,0,"a",,terminal_output +1459,3658480,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1460,3658586,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1461,3658647,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1462,3658795,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +1463,3659155,"TERMINAL",0,0,"On branch new-arch-sampling\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1464,3659829,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +1465,3659938,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1466,3660004,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1467,3660158,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1468,3660285,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1469,3660345,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1470,3660718,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1471,3660856,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1472,3661158,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1473,3661677,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1474,3661806,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1475,3661867,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1476,3661934,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1477,3661995,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1478,3662152,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +1479,3662302,"TERMINAL",0,0,"diff --git a/genie.py b/genie.py\r\nindex 0e66676..384c84c 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -7,7 +7,7 @@ import flax.linen as nn\r\n from flax.training.train_state import TrainState\r\n import orbax.checkpoint as ocp\r\n \r\n-from models.dynamics import DynamicsMaskGIT\r\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n from models.lam import LatentActionModel\r\n from models.tokenizer import TokenizerVQVAE\r\n \r\n@@ -38,6 +38,7 @@ class Genie(nn.Module):\r\n dyna_dim: int\r\n dyna_num_blocks: int\r\n dyna_num_heads: int\r\n+ use_maskgit: bool\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n dropout: float = 0.0\r\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )\r\n- self.dynamics = DynamicsMaskGIT(\r\n- model_dim=self.dyna_dim,\r\n- num_latents=self.num_patch_latents,\r\n- num_blocks=self.dyna_num_blocks,\r\n- num_heads=self.dyna_num_heads,\r\n- dropout=self.dropout,\r\n- mask_limit=self.mask_limit,\r\n- param_dtype=self.param_dtype,\r\n- dtype=self.dtype,\r\n:",,terminal_output +1480,3663405,"TERMINAL",0,0,"\r- )\r\n:",,terminal_output +1481,3663583,"TERMINAL",0,0,"\r+\r\n:",,terminal_output +1482,3663822,"TERMINAL",0,0,"\rMindex 0e66676..384c84c 100644\r\n\r:",,terminal_output +1483,3664815,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1484,3665377,"TERMINAL",0,0,"g",,terminal_output +1485,3665437,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1486,3665493,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1487,3665676,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1488,3665927,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1489,3666002,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1490,3666115,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1491,3666259,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1492,3666390,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1493,3666596,"TERMINAL",0,0,"[?25lm[?25h[?25la[?25h",,terminal_output +1494,3666673,"TERMINAL",0,0,"[?25li[?25h[?25ln[?25h",,terminal_output +1495,3666769,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1496,3666949,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +1497,3667073,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1498,3667416,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1499,3667471,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1500,3667685,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1501,3667760,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1502,3667960,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +1503,3668016,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1504,3668867,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1505,3669082,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1506,3669324,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1507,3669431,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1508,3669600,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +1509,3669680,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1510,3671302,"diff.diff",0,0,"diff --git a/genie.py b/genie.py\nindex 0e66676..384c84c 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -7,7 +7,7 @@ import flax.linen as nn\n from flax.training.train_state import TrainState\n import orbax.checkpoint as ocp\n \n-from models.dynamics import DynamicsMaskGIT\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\n from models.lam import LatentActionModel\n from models.tokenizer import TokenizerVQVAE\n \n@@ -38,6 +38,7 @@ class Genie(nn.Module):\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n+ use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n- self.dynamics = DynamicsMaskGIT(\n- model_dim=self.dyna_dim,\n- num_latents=self.num_patch_latents,\n- num_blocks=self.dyna_num_blocks,\n- num_heads=self.dyna_num_heads,\n- dropout=self.dropout,\n- mask_limit=self.mask_limit,\n- param_dtype=self.param_dtype,\n- dtype=self.dtype,\n- )\n+\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n@@ -103,8 +116,81 @@ class Genie(nn.Module):\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n \n+\n+ def sample_causal(\n+ self,\n+ batch: Dict[str, Any],\n+ seq_len: int,\n+ temperature: float = 1,\n+ sample_argmax: bool = False,\n+ ):\n+ """"""\n+ Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n+\n+ - Input frames are tokenized once.\n+ - Future frames are generated one at a time, each conditioned on all previous frames.\n+ - All frames are detokenized in a single pass at the end.\n+\n+ Args:\n+ batch: Dict with at least ""videos"" (B, T, H, W, C)\n+ seq_len: total number of frames to generate (including context)\n+ temperature: sampling temperature\n+ sample_argmax: if True, use argmax instead of sampling\n+\n+ Returns:\n+ Generated video frames (B, seq_len, H, W, C)\n+ """"""\n+ # --- Encode context frames ---\n+ tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n+ token_idxs = tokenizer_out[""indices""] # (B, T, N)\n+ B, T, N = token_idxs.shape\n+\n+ # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n+ # --- Prepare initial token sequence ---\n+ # Pad with zeros for future frames\n+ pad_shape = (B, seq_len - T, N)\n+ token_idxs_full = jnp.concatenate(\n+ [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n+ ) # (B, seq_len, N)\n+\n+ # --- Prepare latent actions ---\n+ action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n+ # --- Autoregressive generation loop ---\n+ rng = batch[""rng""]\n+ for t in range(T, seq_len):\n+ for n in range(300):\n+ dyna_inputs = {\n+ ""video_tokens"": token_idxs_full,\n+ ""latent_actions"": action_tokens\n+ }\n+ # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n+ dyna_outputs = self.dynamics(dyna_inputs, training=False)\n+ # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n+ # # We want the logits for the last time step (frame t-1 predicting t)\n+ # jax.debug.breakpoint()\n+ next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n+\n+ # Sample or argmax for each patch\n+ if sample_argmax:\n+ next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n+ else:\n+ rng, step_rng = jax.random.split(rng)\n+ next_token = jax.random.categorical(\n+ step_rng, next_token_logits / temperature, axis=-1\n+ ) # (B, 1)\n+\n+ # Insert the generated tokens into the sequence\n+ token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n+\n+ # --- Decode all tokens at once at the end ---\n+ final_frames = self.tokenizer.decode(\n+ token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n+ )\n+ return final_frames\n+\n+\n @nn.compact\n- def sample(\n+ def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n@@ -141,7 +227,7 @@ class Genie(nn.Module):\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n- action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n+ action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n \n MaskGITLoop = nn.scan(\n MaskGITStep,\ndiff --git a/models/dynamics.py b/models/dynamics.py\nindex 8b183dc..7d184d8 100644\n--- a/models/dynamics.py\n+++ b/models/dynamics.py\n@@ -3,6 +3,7 @@ from typing import Dict, Any\n import jax\n import jax.numpy as jnp\n import flax.linen as nn\n+import einops\n \n from utils.nn import STTransformer\n \n@@ -28,6 +29,7 @@ class DynamicsMaskGIT(nn.Module):\n self.dropout,\n self.param_dtype,\n self.dtype,\n+ spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n@@ -58,3 +60,47 @@ class DynamicsMaskGIT(nn.Module):\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n+\n+\n+class DynamicsAutoregressive(nn.Module):\n+ """"""Autoregressive (causal) dynamics model""""""\n+\n+ model_dim: int\n+ num_latents: int\n+ num_blocks: int\n+ num_heads: int\n+ dropout: float\n+ param_dtype: jnp.dtype\n+ dtype: jnp.dtype\n+\n+ def setup(self):\n+ self.dynamics = STTransformer(\n+ self.model_dim,\n+ self.num_latents,\n+ self.num_blocks,\n+ self.num_heads,\n+ self.dropout,\n+ self.param_dtype,\n+ self.dtype,\n+ spacial_bert=False,\n+ )\n+ self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n+ self.action_up = nn.Dense(\n+ self.model_dim,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n+\n+ def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n+ vid_embed = self.patch_embed(batch[""video_tokens""])\n+ act_embed = self.action_up(batch[""latent_actions""])\n+ vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n+ # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n+ # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n+ vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n+ logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n+ mask = jnp.ones(vid_embed.shape[:-1])\n+ # next_tokens = jnp.argmax(logits, axis=-1)\n+ # print(next_tokens.shape)\n+ # jax.debug.breakpoint()\n+ return dict(token_logits=logits, mask=mask)\n\ No newline at end of file\ndiff --git a/models/lam.py b/models/lam.py\nindex cb3cc00..86a1b53 100644\n--- a/models/lam.py\n+++ b/models/lam.py\n@@ -85,6 +85,7 @@ class LatentActionModel(nn.Module):\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n+ # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n \n # --- Encode ---\ndiff --git a/sample.py b/sample.py\nindex 4ef592d..3048b3a 100644\n--- a/sample.py\n+++ b/sample.py\n@@ -1,4 +1,5 @@\n from dataclasses import dataclass\n+from typing import Optional\n import time\n import os\n \n@@ -8,7 +9,10 @@ import jax\n import jax.numpy as jnp\n import flax.linen as nn\n import numpy as np\n-from orbax.checkpoint import PyTreeCheckpointer\n+from flax.training.train_state import TrainState\n+import grain\n+import orbax.checkpoint as ocp\n+import optax\n from PIL import Image, ImageDraw\n import tyro\n \n@@ -26,6 +30,7 @@ class Args:\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n+ checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n@@ -46,6 +51,7 @@ class Args:\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n+ lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n@@ -74,11 +80,12 @@ genie = Genie(\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n- lam_co_train=False,\n+ lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n+ use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n@@ -90,12 +97,40 @@ dummy_inputs = dict(\n )\n rng, _rng = jax.random.split(rng)\n params = genie.init(_rng, dummy_inputs)\n-ckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\n-params[""params""].update(ckpt)\n+\n+dummy_train_state = TrainState.create(\n+ apply_fn=genie.apply,\n+ params=params,\n+ tx=optax.adamw(\n+ optax.warmup_cosine_decay_schedule(\n+ 0, 0, 1, 2 # dummy values\n+ )\n+ ), \n+)\n+handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n+handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n+checkpoint_manager = ocp.CheckpointManager(\n+ args.checkpoint,\n+ options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n+ handler_registry=handler_registry\n+)\n+abstract_train_state = jax.tree_util.tree_map(\n+ ocp.utils.to_shape_dtype_struct, dummy_train_state\n+)\n+\n+restored = checkpoint_manager.restore(\n+ args.checkpoint_step or checkpoint_manager.latest_step(),\n+ args=ocp.args.Composite(\n+ model_state=ocp.args.StandardRestore(abstract_train_state),\n+ ),\n+)\n+restored_train_state = restored[""model_state""]\n+params = restored_train_state.params\n \n \n def _sampling_wrapper(module, batch):\n- return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n+ # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n+ return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n \n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(rng, video_batch, action_batch):\n@@ -109,31 +144,42 @@ def _autoreg_sample(rng, video_batch, action_batch):\n )\n return generated_vid\n \n+def _get_dataloader_iterator():\n+ array_record_files = [\n+ os.path.join(args.data_dir, x)\n+ for x in os.listdir(args.data_dir)\n+ if x.endswith("".array_record"")\n+ ]\n+ grain_dataloader = get_dataloader(\n+ array_record_files,\n+ args.seq_len,\n+ # NOTE: We deliberately pass the global batch size\n+ # The dataloader shards the dataset across all processes\n+ args.batch_size,\n+ *image_shape,\n+ num_workers=0,\n+ prefetch_buffer_size=1,\n+ seed=args.seed,\n+ )\n+ initial_state = grain_dataloader._create_initial_state()\n+ grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n+ return grain_iterator\n+\n # --- Get video + latent actions ---\n-array_record_files = [\n- os.path.join(args.data_dir, x)\n- for x in os.listdir(args.data_dir)\n- if x.endswith("".array_record"")\n-]\n-dataloader = get_dataloader(\n- array_record_files,\n- args.seq_len,\n- args.batch_size,\n- args.image_height,\n- args.image_width,\n- args.image_channels,\n- num_workers=8,\n- prefetch_buffer_size=1,\n- seed=args.seed,\n-)\n-video_batch = next(iter(dataloader))\n+# grain_iterator = _get_dataloader_iterator()\n+# video_batch = next(grain_iterator)\n+video_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n+\n+video_batch = video_batch.astype(args.dtype) #/ 255.0\n # Get latent actions for all videos in the batch\n-batch = dict(videos=video_batch)\n+batch = dict(videos=video_batch[:,:args.seq_len])\n action_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\n action_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n \n # --- Sample + evaluate video ---\n+print(""autoreg sampling..."")\n vid = _autoreg_sample(rng, video_batch, action_batch)\n+print(""autoreg sampling done. calculating ssim and saving video"")\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n ssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\ndiff --git a/train_dynamics.py b/train_dynamics.py\nindex 3865cb3..4dd49a3 100644\n--- a/train_dynamics.py\n+++ b/train_dynamics.py\n@@ -62,6 +62,7 @@ class Args:\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n+ use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n@@ -93,13 +94,21 @@ def dynamics_loss_fn(params, state, inputs):\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n+ logits = outputs[""token_logits""]\n+ targets = outputs[""video_tokens""]\n+\n+ # if not args.use_maskgit:\n+ # logits = outputs[""token_logits""][:, :, :-1]\n+ # targets = outputs[""video_tokens""][:, :, 1:]\n+ # mask = outputs[""mask""][:, :, 1:] \n+\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n- outputs[""token_logits""], outputs[""video_tokens""]\n+ logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n- acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n+ acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n- select_probs = jax.nn.softmax(outputs[""token_logits""])\n+ select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n@@ -115,7 +124,7 @@ def dynamics_loss_fn(params, state, inputs):\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n- select_logit=outputs[""token_logits""].max(-1).mean(),\n+ select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n@@ -180,6 +189,7 @@ if __name__ == ""__main__"":\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n+ use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n@@ -315,6 +325,9 @@ if __name__ == ""__main__"":\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n+ # videos = np.load(""overfit_dir/corner_8repl.npy"")\n+ # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n+ # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n \ndiff --git a/utils/nn.py b/utils/nn.py\nindex b7bec9f..cf53092 100644\n--- a/utils/nn.py\n+++ b/utils/nn.py\n@@ -26,6 +26,112 @@ class PositionalEncoding(nn.Module):\n x = x + self.pe[: x.shape[2]]\n return x\n \n+# class STBlock2(nn.Module):\n+ # dim: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.remat\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # --- Spatial attention ---\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+\n+ # # --- Temporal attention ---\n+ # x = x.swapaxes(1, 2)\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+ # x = x.swapaxes(1, 2)\n+\n+ # # --- Feedforward ---\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n+ # z = nn.Dense(\n+ # self.dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # z = nn.gelu(z)\n+ # x = x + z\n+\n+ # return x\n+\n+# class CausalTransformer(nn.Module):\n+ # model_dim: int\n+ # out_dim: int\n+ # num_blocks: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # Input projection and normalization\n+ # x = nn.Sequential(\n+ # [\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.Dense(self.model_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # ]\n+ # )(x)\n+ # # Causal transformer blocks\n+ # for _ in range(self.num_blocks):\n+ # x = STBlock2(\n+ # dim=self.model_dim,\n+ # num_heads=self.num_heads,\n+ # dropout=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+\n+ # # Output projection\n+ # x = nn.Dense(\n+ # self.out_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # return x # (B, T, E)\n+\n \n class STBlock(nn.Module):\n dim: int\n@@ -33,6 +139,7 @@ class STBlock(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spatial_bert: bool = True\n \n @nn.remat\n @nn.compact\n@@ -43,13 +150,14 @@ class STBlock(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n+ spatial_mask = None if self.spatial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n- )(z)\n+ )(z, mask=spatial_mask)\n x = x + z\n \n # --- Temporal attention ---\n@@ -95,6 +203,7 @@ class STTransformer(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n@@ -121,6 +230,7 @@ class STTransformer(nn.Module):\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n+ spatial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n",diff,tab +1511,3685832,"TERMINAL",0,0,"2025-07-21 15:27:12.488819: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489318: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489432: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489524: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489582: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489637: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489660: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:27:12.489673: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1512,3699037,"diff.diff",16591,0,"",diff,selection_mouse +1513,3699039,"diff.diff",16590,0,"",diff,selection_command +1514,3702668,"diff.diff",13,0,"",diff,selection_command +1515,3727602,"TERMINAL",0,0,"srun",,terminal_focus +1516,3729146,"TERMINAL",0,0,"bash",,terminal_focus +1517,3730177,"TERMINAL",0,0,"queue",,terminal_command +1518,3730227,"TERMINAL",0,0,"]633;E;2025-07-21 15:27:56 queue;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1519,3730282,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 15:27:56 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 R 1:00:31\t 1 hkn07073365095 dev_accel interact tum_cte0 R 1:00:27\t 1 hkn0901",,terminal_output +1520,3731300,"TERMINAL",0,0,"728",,terminal_output +1521,3732397,"TERMINAL",0,0,"salloc: Job 3365095 has exceeded its time limit and its allocation has been revoked.\nslurmstepd: error: *** STEP 3365095.interactive ON hkn0901 CANCELLED AT 2025-07-21T15:27:59 DUE TO TIME LIMIT ***\r\nTerminated\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1522,3732643,"TERMINAL",0,0,"srun",,terminal_focus +1523,3732699,"TERMINAL",0,0,"84CG30",,terminal_output +1524,3732699,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n",,terminal_output +1525,3733675,"TERMINAL",0,0,"8:005",,terminal_output +1526,3734717,"TERMINAL",0,0,"16",,terminal_output +1527,3735501,"TERMINAL",0,0,"srun",,terminal_focus +1528,3735580,"TERMINAL",0,0,"\r[tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1529,3735757,"TERMINAL",0,0,"27",,terminal_output +1530,3736689,"TERMINAL",0,0,"git diff main > diff.diff",,terminal_output +1531,3736802,"TERMINAL",0,0,"38",,terminal_output +1532,3736865,"TERMINAL",0,0,"",,terminal_output +1533,3737243,"TERMINAL",0,0,"status",,terminal_output +1534,3737616,"TERMINAL",0,0,"# sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +1535,3737912,"TERMINAL",0,0,"49",,terminal_output +1536,3738902,"TERMINAL",0,0,"540",,terminal_output +1537,3739010,"TERMINAL",0,0,"",,terminal_output +1538,3739960,"TERMINAL",0,0,"61",,terminal_output +1539,3740118,"TERMINAL",0,0,"\r",,terminal_output +1540,3740922,"TERMINAL",0,0," sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r",,terminal_output +1541,3741004,"TERMINAL",0,0,"72",,terminal_output +1542,3741064,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r",,terminal_output +1543,3742035,"TERMINAL",0,0,"83",,terminal_output +1544,3742515,"TERMINAL",0,0,"#sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r",,terminal_output +1545,3742730,"TERMINAL",0,0,"[?25l sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r[?25h",,terminal_output +1546,3743139,"TERMINAL",0,0,"94",,terminal_output +1547,3743342,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h[tum_cte0515@hkn0707 jafar]$ [?25h",,terminal_output +1548,3743854,"TERMINAL",0,0,"s",,terminal_output +1549,3743960,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1550,3744025,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1551,3744126,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1552,3744137,"TERMINAL",0,0,"105",,terminal_output +1553,3744299,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1554,3744421,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1555,3744529,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1556,3744591,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1557,3744691,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +1558,3744922,"TERMINAL",0,0,"env/",,terminal_output +1559,3745169,"TERMINAL",0,0,"16",,terminal_output +1560,3745685,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +1561,3745748,"TERMINAL",0,0,"in/",,terminal_output +1562,3745987,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1563,3746220,"TERMINAL",0,0,"27",,terminal_output +1564,3746420,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1565,3746747,"TERMINAL",0,0,"tivate",,terminal_output +1566,3747245,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0707:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0707 jafar]$ ",,terminal_output +1567,3747297,"TERMINAL",0,0,"38",,terminal_output +1568,3747577,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +1569,3747759,"TERMINAL",0,0,"# sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +1570,3748181,"TERMINAL",0,0,"",,terminal_output +1571,3748314,"TERMINAL",0,0,"49",,terminal_output +1572,3749304,"TERMINAL",0,0,"\r",,terminal_output +1573,3749366,"TERMINAL",0,0,"551",,terminal_output +1574,3749663,"TERMINAL",0,0," sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r",,terminal_output +1575,3749785,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r",,terminal_output +1576,3750177,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r[?25h\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +1577,3750325,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1123584\r\nSLURM_JOB_GPUS=3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0707\r\nSLURM_JOB_START_TIME=1753100845\r\nSLURM_STEP_NODELIST=hkn0707\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753136845\r\nSLURM_PMI2_SRUN_PORT=33481\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365091\r\nSLURM_PTY_PORT=35923\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=38\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0707\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0707\r\nSLURM_SRUN_COMM_PORT=32915\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365091\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0707\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=32915\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0707\r\n",,terminal_output +1578,3750430,"TERMINAL",0,0,"72",,terminal_output +1579,3751464,"TERMINAL",0,0,"83",,terminal_output +1580,3751722,"TERMINAL",0,0,"watch",,terminal_focus +1581,3752512,"TERMINAL",0,0,"94",,terminal_output +1582,3752766,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial]633;D;0",,terminal_output +1583,3753544,"TERMINAL",0,0,"srun",,terminal_focus +1584,3759875,"TERMINAL",0,0,"srun",,terminal_focus +1585,3761983,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +1586,3762018,"TERMINAL",0,0,"srun: error: hkn0901: task 0: Killed\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;137",,terminal_output +1587,3762070,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +1588,3763610,"TERMINAL",0,0,"idling",,terminal_command +1589,3763684,"TERMINAL",0,0,"]633;E;2025-07-21 15:28:30 idling;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:28:30 2025Partition dev_cpuonly:\t 7 nodes idle\rPartition cpuonly: 65 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 2 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1590,3764742,"TERMINAL",0,0,"2025-07-21 15:28:31.383504: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1591,3764743,"TERMINAL",0,0,"11",,terminal_output +1592,3765766,"TERMINAL",0,0,"2",,terminal_output +1593,3766808,"TERMINAL",0,0,"3",,terminal_output +1594,3767844,"TERMINAL",0,0,"4",,terminal_output +1595,3768939,"TERMINAL",0,0,"5",,terminal_output +1596,3769942,"TERMINAL",0,0,"6",,terminal_output +1597,3770954,"TERMINAL",0,0,"7",,terminal_output +1598,3771996,"TERMINAL",0,0,"8",,terminal_output +1599,3773141,"TERMINAL",0,0,"9",,terminal_output +1600,3774165,"TERMINAL",0,0,"40",,terminal_output +1601,3775151,"TERMINAL",0,0,"1",,terminal_output +1602,3775553,"TERMINAL",0,0,"salloc",,terminal_focus +1603,3776164,"TERMINAL",0,0,"2",,terminal_output +1604,3776375,"TERMINAL",0,0,"2025-07-21 15:28:43.041745: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1605,3776808,"TERMINAL",0,0,"srun",,terminal_focus +1606,3777201,"TERMINAL",0,0,"3",,terminal_output +1607,3777528,"diff.diff",0,0,"",diff,tab +1608,3778245,"TERMINAL",0,0,"4",,terminal_output +1609,3779283,"TERMINAL",0,0,"5",,terminal_output +1610,3779952,"TERMINAL",0,0,"watch",,terminal_focus +1611,3780375,"TERMINAL",0,0,"6",,terminal_output +1612,3780698,"TERMINAL",0,0,"2025-07-21 15:28:47.367099: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1613,3781431,"TERMINAL",0,0,"7",,terminal_output +1614,3782356,"TERMINAL",0,0,"2025-07-21 15:28:49.009225: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1615,3782415,"TERMINAL",0,0,"9",,terminal_output +1616,3782793,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1617,3785432,"TERMINAL",0,0,"git status",,terminal_command +1618,3785480,"TERMINAL",0,0,"]633;E;2025-07-21 15:28:52 git status;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1619,3785507,"TERMINAL",0,0,"On branch new-arch-sampling\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1620,3786872,"TERMINAL",0,0,"2025-07-21 15:28:53.520973: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1621,3787289,"TERMINAL",0,0,"git branch",,terminal_command +1622,3787340,"TERMINAL",0,0,"]633;E;2025-07-21 15:28:53 git branch;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1h=\r",,terminal_output +1623,3787494,"TERMINAL",0,0," add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n* new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n",,terminal_output +1624,3793063,"TERMINAL",0,0,"2025-07-21 15:28:59.597006: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1625,3800174,"TERMINAL",0,0,"2025-07-21 15:29:06.836584: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1626,3803345,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\n",,terminal_output +1627,3810807,"TERMINAL",0,0,"autoreg sampling...\r\n",,terminal_output +1628,3820260,"TERMINAL",0,0,"git status",,terminal_command +1629,3820268,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:26 git status;ae54d867-0567-4fb2-95c9-86b7932c267e]633;COn branch new-arch-sampling\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1630,3825788,"TERMINAL",0,0,"git stash",,terminal_command +1631,3825829,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:32 git stash;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1632,3826436,"TERMINAL",0,0,"Saved working directory and index state WIP on new-arch-sampling: c7cd6b2 trying to sample\r\n",,terminal_output +1633,3826607,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1634,3832507,"TERMINAL",0,0,"git checkout main",,terminal_command +1635,3832558,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:39 git checkout main;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1636,3832634,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1637,3833998,"TERMINAL",0,0,"git pull",,terminal_command +1638,3834044,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:40 git pull;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1639,3835546,"",0,0,"Switched from branch 'new-arch-sampling' to 'main'",,git_branch_checkout +1640,3835888,"TERMINAL",0,0,"remote: Enumerating objects: 104, done.\r\nremote: Counting objects: 0% (1/101)\rremote: Counting objects: 1% (2/101)\rremote: Counting objects: 2% (3/101)\rremote: Counting objects: 3% (4/101)\rremote: Counting objects: 4% (5/101)\rremote: Counting objects: 5% (6/101)\rremote: Counting objects: 6% (7/101)\rremote: Counting objects: 7% (8/101)\rremote: Counting objects: 8% (9/101)\rremote: Counting objects: 9% (10/101)\rremote: Counting objects: 10% (11/101)\rremote: Counting objects: 11% (12/101)\rremote: Counting objects: 12% (13/101)\rremote: Counting objects: 13% (14/101)\rremote: Counting objects: 14% (15/101)\rremote: Counting objects: 15% (16/101)\rremote: Counting objects: 16% (17/101)\rremote: Counting objects: 17% (18/101)\rremote: Counting objects: 18% (19/101)\rremote: Counting objects: 19% (20/101)\rremote: Counting objects: 20% (21/101)\rremote: Counting objects: 21% (22/101)\rremote: Counting objects: 22% (23/101)\rremote: Counting objects: 23% (24/101)\rremote: Counting objects: 24% (25/101)\rremote: Counting objects: 25% (26/101)\rremote: Counting objects: 26% (27/101)\rremote: Counting objects: 27% (28/101)\rremote: Counting objects: 28% (29/101)\rremote: Counting objects: 29% (30/101)\rremote: Counting objects: 30% (31/101)\rremote: Counting objects: 31% (32/101)\rremote: Counting objects: 32% (33/101)\rremote: Counting objects: 33% (34/101)\rremote: Counting objects: 34% (35/101)\rremote: Counting objects: 35% (36/101)\rremote: Counting objects: 36% (37/101)\rremote: Counting objects: 37% (38/101)\rremote: Counting objects: 38% (39/101)\rremote: Counting objects: 39% (40/101)\rremote: Counting objects: 40% (41/101)\rremote: Counting objects: 41% (42/101)\rremote: Counting objects: 42% (43/101)\rremote: Counting objects: 43% (44/101)\rremote: Counting objects: 44% (45/101)\rremote: Counting objects: 45% (46/101)\rremote: Counting objects: 46% (47/101)\rremote: Counting objects: 47% (48/101)\rremote: Counting objects: 48% (49/101)\rremote: Counting objects: 49% (50/101)\rremote: Counting objects: 50% (51/101)\rremote: Counting objects: 51% (52/101)\rremote: Counting objects: 52% (53/101)\rremote: Counting objects: 53% (54/101)\rremote: Counting objects: 54% (55/101)\rremote: Counting objects: 55% (56/101)\rremote: Counting objects: 56% (57/101)\rremote: Counting objects: 57% (58/101)\rremote: Counting objects: 58% (59/101)\rremote: Counting objects: 59% (60/101)\rremote: Counting objects: 60% (61/101)\rremote: Counting objects: 61% (62/101)\rremote: Counting objects: 62% (63/101)\rremote: Counting objects: 63% (64/101)\rremote: Counting objects: 64% (65/101)\rremote: Counting objects: 65% (66/101)\rremote: Counting objects: 66% (67/101)\rremote: Counting objects: 67% (68/101)\rremote: Counting objects: 68% (69/101)\rremote: Counting objects: 69% (70/101)\rremote: Counting objects: 70% (71/101)\rremote: Counting objects: 71% (72/101)\rremote: Counting objects: 72% (73/101)\rremote: Counting objects: 73% (74/101)\rremote: Counting objects: 74% (75/101)\rremote: Counting objects: 75% (76/101)\rremote: Counting objects: 76% (77/101)\rremote: Counting objects: 77% (78/101)\rremote: Counting objects: 78% (79/101)\rremote: Counting objects: 79% (80/101)\rremote: Counting objects: 80% (81/101)\rremote: Counting objects: 81% (82/101)\rremote: Counting objects: 82% (83/101)\rremote: Counting objects: 83% (84/101)\rremote: Counting objects: 84% (85/101)\rremote: Counting objects: 85% (86/101)\rremote: Counting objects: 86% (87/101)\rremote: Counting objects: 87% (88/101)\rremote: Counting objects: 88% (89/101)\rremote: Counting objects: 89% (90/101)\rremote: Counting objects: 90% (91/101)\rremote: Counting objects: 91% (92/101)\rremote: Counting objects: 92% (93/101)\rremote: Counting objects: 93% (94/101)\rremote: Counting objects: 94% (95/101)\rremote: Counting objects: 95% (96/101)\rremote: Counting objects: 96% (97/101)\rremote: Counting objects: 97% (98/101)\rremote: Counting objects: 98% (99/101)\rremote: Counting objects: 99% (100/101)\rremote: Counting objects: 100% (101/101)\rremote: Counting objects: 100% (101/101), done.\r\nremote: Compressing objects: 2% (1/41)\rremote: Compressing objects: 4% (2/41)\rremote: Compressing objects: 7% (3/41)\r",,terminal_output +1641,3836063,"TERMINAL",0,0,"remote: Compressing objects: 9% (4/41)\rremote: Compressing objects: 12% (5/41)\rremote: Compressing objects: 14% (6/41)\rremote: Compressing objects: 17% (7/41)\rremote: Compressing objects: 19% (8/41)\rremote: Compressing objects: 21% (9/41)\rremote: Compressing objects: 24% (10/41)\rremote: Compressing objects: 26% (11/41)\rremote: Compressing objects: 29% (12/41)\rremote: Compressing objects: 31% (13/41)\rremote: Compressing objects: 34% (14/41)\rremote: Compressing objects: 36% (15/41)\rremote: Compressing objects: 39% (16/41)\rremote: Compressing objects: 41% (17/41)\rremote: Compressing objects: 43% (18/41)\rremote: Compressing objects: 46% (19/41)\rremote: Compressing objects: 48% (20/41)\rremote: Compressing objects: 51% (21/41)\rremote: Compressing objects: 53% (22/41)\rremote: Compressing objects: 56% (23/41)\rremote: Compressing objects: 58% (24/41)\rremote: Compressing objects: 60% (25/41)\rremote: Compressing objects: 63% (26/41)\rremote: Compressing objects: 65% (27/41)\rremote: Compressing objects: 68% (28/41)\rremote: Compressing objects: 70% (29/41)\rremote: Compressing objects: 73% (30/41)\rremote: Compressing objects: 75% (31/41)\rremote: Compressing objects: 78% (32/41)\rremote: Compressing objects: 80% (33/41)\rremote: Compressing objects: 82% (34/41)\rremote: Compressing objects: 85% (35/41)\rremote: Compressing objects: 87% (36/41)\rremote: Compressing objects: 90% (37/41)\rremote: Compressing objects: 92% (38/41)\rremote: Compressing objects: 95% (39/41)\rremote: Compressing objects: 97% (40/41)\rremote: Compressing objects: 100% (41/41)\rremote: Compressing objects: 100% (41/41), done.\r\nremote: Total 74 (delta 49), reused 53 (delta 33), pack-reused 0 (from 0)\r\nUnpacking objects: 1% (1/74)\rUnpacking objects: 2% (2/74)\rUnpacking objects: 4% (3/74)\rUnpacking objects: 5% (4/74)\rUnpacking objects: 6% (5/74)\rUnpacking objects: 8% (6/74)\rUnpacking objects: 9% (7/74)\rUnpacking objects: 10% (8/74)\rUnpacking objects: 12% (9/74)\r",,terminal_output +1642,3836175,"TERMINAL",0,0,"Unpacking objects: 13% (10/74)\rUnpacking objects: 14% (11/74)\rUnpacking objects: 16% (12/74)\rUnpacking objects: 17% (13/74)\rUnpacking objects: 18% (14/74)\rUnpacking objects: 20% (15/74)\rUnpacking objects: 21% (16/74)\rUnpacking objects: 22% (17/74)\rUnpacking objects: 24% (18/74)\rUnpacking objects: 25% (19/74)\rUnpacking objects: 27% (20/74)\rUnpacking objects: 28% (21/74)\rUnpacking objects: 29% (22/74)\r",,terminal_output +1643,3836422,"TERMINAL",0,0,"Unpacking objects: 31% (23/74)\rUnpacking objects: 32% (24/74)\rUnpacking objects: 33% (25/74)\rUnpacking objects: 35% (26/74)\rUnpacking objects: 36% (27/74)\rUnpacking objects: 37% (28/74)\rUnpacking objects: 39% (29/74)\rUnpacking objects: 40% (30/74)\rUnpacking objects: 41% (31/74)\rUnpacking objects: 43% (32/74)\rUnpacking objects: 44% (33/74)\rUnpacking objects: 45% (34/74)\rUnpacking objects: 47% (35/74)\rUnpacking objects: 48% (36/74)\rUnpacking objects: 50% (37/74)\rUnpacking objects: 51% (38/74)\rUnpacking objects: 52% (39/74)\rUnpacking objects: 54% (40/74)\rUnpacking objects: 55% (41/74)\rUnpacking objects: 56% (42/74)\rUnpacking objects: 58% (43/74)\rUnpacking objects: 59% (44/74)\rUnpacking objects: 60% (45/74)\rUnpacking objects: 62% (46/74)\rUnpacking objects: 63% (47/74)\rUnpacking objects: 64% (48/74)\rUnpacking objects: 66% (49/74)\r",,terminal_output +1644,3836580,"TERMINAL",0,0,"Unpacking objects: 67% (50/74)\rUnpacking objects: 68% (51/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 70% (52/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 71% (53/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 72% (54/74), 11.66 KiB | 21.00 KiB/s\r",,terminal_output +1645,3836778,"TERMINAL",0,0,"Unpacking objects: 74% (55/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 75% (56/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 77% (57/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 78% (58/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 79% (59/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 81% (60/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 82% (61/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 83% (62/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 85% (63/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 86% (64/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 87% (65/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 89% (66/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 90% (67/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 91% (68/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 93% (69/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 94% (70/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 95% (71/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 97% (72/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 98% (73/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 100% (74/74), 11.66 KiB | 21.00 KiB/s\rUnpacking objects: 100% (74/74), 14.49 KiB | 18.00 KiB/s, done.\r\n",,terminal_output +1646,3836936,"TERMINAL",0,0,"From github.com:p-doom/jafar\r\n 3391a39..c68e03e main -> origin/main\r\n",,terminal_output +1647,3837038,"TERMINAL",0,0," * [new branch] action-mapper -> origin/action-mapper\r\n * [new branch] activation-checkpointing-dots-with-no-batch-dim -> origin/activation-checkpointing-dots-with-no-batch-dim\r\n 6cb379a..f08eeb4 cudnn-flash-attn-mixed-precision-4 -> origin/cudnn-flash-attn-mixed-precision-4\r\n * [new branch] distinct-ffn-dim -> origin/distinct-ffn-dim\r\n * [new branch] dynamics-gt-actions -> origin/dynamics-gt-actions\r\n * [new branch] fix-dtype-hint-warning -> origin/fix-dtype-hint-warning\r\n * [new branch] input-pipeline-download-action-files -> origin/input-pipeline-download-action-files\r\n * [new branch] omit-dataloader-restore -> origin/omit-dataloader-restore\r\n * [new branch] revert-partial-remat -> origin/revert-partial-remat\r\n",,terminal_output +1648,3837151,"TERMINAL",0,0,"Updating 3391a39..c68e03e\r\n",,terminal_output +1649,3837421,"TERMINAL",0,0,"Fast-forward\r\n",,terminal_output +1650,3837528,"TERMINAL",0,0," genie.py | 10 ++--\r\n input_pipeline/download/openai/download_actions_files.py | 90 ++++++++++++++++++++++++++++++++\r\n models/dynamics.py | 2 +\r\n models/lam.py | 3 ++\r\n models/tokenizer.py | 3 ++\r\n requirements.txt | 2 +-\r\n sample.py | 2 +\r\n train_dynamics.py | 2 +\r\n train_lam.py | 2 +\r\n train_tokenizer.py | 2 +\r\n utils/nn.py | 70 ++++++++++++++++++++++++-\r\n 11 files changed, 182 insertions(+), 6 deletions(-)\r\n create mode 100644 input_pipeline/download/openai/download_actions_files.py\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1651,3839724,"TERMINAL",0,0,"git status",,terminal_command +1652,3839757,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:46 git status;ae54d867-0567-4fb2-95c9-86b7932c267e]633;COn branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1653,3841820,"TERMINAL",0,0,"git branch",,terminal_command +1654,3841830,"TERMINAL",0,0,"]633;E;2025-07-21 15:29:48 git branch;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1h=\r add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n* main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar[?2004h",,terminal_output +1655,3854612,"TERMINAL",0,0,"git checkout new-arch-sampling",,terminal_command +1656,3854657,"TERMINAL",0,0,"]633;E;2025-07-21 15:30:01 git checkout new-arch-sampling;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1657,3854730,"TERMINAL",0,0,"Switched to branch 'new-arch-sampling'\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1658,3855548,"",0,0,"Switched from branch 'main' to 'new-arch-sampling'",,git_branch_checkout +1659,3864824,"TERMINAL",0,0,"git branch",,terminal_command +1660,3864860,"TERMINAL",0,0,"]633;E;2025-07-21 15:30:11 git branch;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1h=\r add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n* new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n",,terminal_output +1661,3864883,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1662,3877833,"TERMINAL",0,0,"git branch",,terminal_command +1663,3877846,"TERMINAL",0,0,"]633;E;2025-07-21 15:30:24 git branch;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1h=\r add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n* new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1664,3887505,"TERMINAL",0,0,"git merge main",,terminal_command +1665,3887556,"TERMINAL",0,0,"]633;E;2025-07-21 15:30:34 git merge main;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1666,3887694,"TERMINAL",0,0,"Auto-merging genie.py\r\nCONFLICT (content): Merge conflict in genie.py\r\nAuto-merging models/dynamics.py\r\nCONFLICT (content): Merge conflict in models/dynamics.py\r\nAuto-merging sample.py\r\nAuto-merging train_dynamics.py\r\nAuto-merging utils/nn.py\r\nCONFLICT (content): Merge conflict in utils/nn.py\r\nAutomatic merge failed; fix conflicts and then commit the result.\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1667,3895654,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n<<<<<<< HEAD\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n=======\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n>>>>>>> main\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(30):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n next_token_logits = dyna_outputs[""token_logits""][:, t-1, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +1668,3898748,"genie.py",0,0,"",python,tab +1669,3924690,"genie.py",2119,370,"\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n",python,content +1670,3927123,"TERMINAL",0,0,"2025-07-21 15:31:13.787784: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:31:13.788374: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:31:13.788485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 15:31:13.788836: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1671,3933070,"genie.py",2568,0,"",python,selection_mouse +1672,3934000,"genie.py",2567,0,"",python,selection_command +1673,3934662,"genie.py",2557,0,"",python,selection_command +1674,3934847,"genie.py",2557,0," ",python,content +1675,3935399,"genie.py",2560,0,"",python,selection_command +1676,3936655,"genie.py",2620,0,"",python,selection_command +1677,3937154,"genie.py",2634,0,"",python,selection_command +1678,3937177,"genie.py",2651,0,"",python,selection_command +1679,3937208,"genie.py",2703,0,"",python,selection_command +1680,3937264,"genie.py",2744,0,"",python,selection_command +1681,3937273,"genie.py",2796,0,"",python,selection_command +1682,3937326,"genie.py",2845,0,"",python,selection_command +1683,3937331,"genie.py",2892,0,"",python,selection_command +1684,3937384,"genie.py",2930,0,"",python,selection_command +1685,3937480,"genie.py",2976,0,"",python,selection_command +1686,3937764,"genie.py",2994,0,"\n use_flash_attention=self.use_flash_attention,",python,content +1687,3937782,"genie.py",3011,0,"",python,selection_command +1688,3944314,"genie.py",0,0,"",python,tab +1689,3961474,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n<<<<<<< HEAD\n spacial_bert=True,\n=======\n use_flash_attention=self.use_flash_attention,\n>>>>>>> main\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:,:,1:]\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)",python,tab +1690,3965037,"models/dynamics.py",0,0,"",python,tab +1691,3969582,"models/dynamics.py",663,0," spacial_bert=True,\n use_flash_attention=self.use_flash_attention,\n",python,content +1692,3979522,"models/dynamics.py",0,0,"",python,tab +1693,3982549,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\nfrom functools import partial\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n<<<<<<< HEAD\n spacial_bert: bool = True\n=======\n use_flash_attention: bool\n>>>>>>> main\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n<<<<<<< HEAD\n )(z, mask=spacial_mask)\n=======\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=False),\n )(z)\n>>>>>>> main\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=True),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n<<<<<<< HEAD\n spacial_bert: bool = True\n=======\n use_flash_attention: bool\n>>>>>>> main\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n<<<<<<< HEAD\n spacial_bert=self.spacial_bert,\n=======\n use_flash_attention=self.use_flash_attention,\n>>>>>>> main\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n \n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = 'cudnn' if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, '... l h d -> (...) l h d')\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False)\n return jnp.logical_and(attention_mask, expanded_mask)\n \n original_shape = query.shape\n original_seq_len = query.shape[-3]\n \n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n \n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n \n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n \n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n \n return attention_fn\n\n",python,tab +1694,3984791,"utils/nn.py",0,0,"",python,tab +1695,3988583,"utils/nn.py",4388,0," use_flash_attention: bool\n spacial_bert: bool = True\n",python,content +1696,3992500,"utils/nn.py",5023,0,"",python,selection_command +1697,4026478,"utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spacial_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +1698,4026479,"utils/nn.py",4671,0,"",python,selection_mouse +1699,4028673,"utils/nn.py",4963,0,"",python,selection_mouse +1700,4028787,"utils/nn.py",4960,4,"mask",python,selection_mouse +1701,4029808,"utils/nn.py",4960,0,"",python,selection_mouse +1702,4029926,"utils/nn.py",4960,4,"mask",python,selection_mouse +1703,4030994,"utils/nn.py",4964,0,"",python,selection_mouse +1704,4033830,"utils/nn.py",4960,4,"mask",python,selection_mouse +1705,4034681,"utils/nn.py",4961,0,"",python,selection_mouse +1706,4035017,"utils/nn.py",4960,4,"mask",python,selection_mouse +1707,4061682,"utils/nn.py",0,0,"",python,tab +1708,4061683,"utils/nn.py",5102,0,"",python,selection_mouse +1709,4067033,"utils/nn.py",4753,0,"",python,selection_mouse +1710,4067176,"utils/nn.py",4752,4,"self",python,selection_mouse +1711,4067354,"utils/nn.py",4752,17,"self.spacial_bert",python,selection_mouse +1712,4070781,"utils/nn.py",5103,0,"",python,selection_mouse +1713,4072812,"utils/nn.py",5103,5,"",python,content +1714,4074698,"utils/nn.py",4760,0,"",python,selection_mouse +1715,4074816,"utils/nn.py",4757,12,"spacial_bert",python,selection_mouse +1716,4076162,"utils/nn.py",5103,0,"",python,selection_mouse +1717,4076721,"utils/nn.py",5103,0,"n",python,content +1718,4076723,"utils/nn.py",5104,0,"",python,selection_keyboard +1719,4076873,"utils/nn.py",5104,0,"o",python,content +1720,4076875,"utils/nn.py",5105,0,"",python,selection_keyboard +1721,4076974,"utils/nn.py",5105,0,"t",python,content +1722,4076976,"utils/nn.py",5106,0,"",python,selection_keyboard +1723,4077039,"utils/nn.py",5106,0," ",python,content +1724,4077041,"utils/nn.py",5107,0,"",python,selection_keyboard +1725,4077552,"utils/nn.py",5107,0,"self.spacial_bert",python,content +1726,4080131,"utils/nn.py",5123,0,"",python,selection_command +1727,4084287,"utils/nn.py",4773,0,"",python,selection_mouse +1728,4085838,"utils/nn.py",4721,75,"",python,content +1729,4085867,"utils/nn.py",4729,0,"",python,selection_command +1730,4096684,"utils/nn.py",6486,0,"",python,selection_command +1731,4104020,"utils/nn.py",6486,0," spacial_bert: bool = True\n use_flash_attention: bool\n",python,content +1732,4106535,"utils/nn.py",7370,0,"",python,selection_command +1733,4119297,"utils/nn.py",7358,0," spacial_bert=self.spacial_bert,\n use_flash_attention=self.use_flash_attention,\n",python,content +1734,4128786,"models/dynamics.py",0,0,"",python,tab +1735,4132879,"utils/nn.py",0,0,"",python,tab +1736,4140135,"utils/nn.py",5082,0,"",python,selection_mouse +1737,4140137,"utils/nn.py",5081,0,"",python,selection_command +1738,4172581,"utils/nn.py",5374,0,"",python,selection_mouse +1739,4172895,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\nfrom __future__ import annotations\n\nimport functools\nimport inspect\nimport warnings\nfrom typing import Any, overload\nfrom collections.abc import Callable\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax.linen import initializers\nfrom flax.linen.dtypes import promote_dtype\nfrom flax.linen.linear import (\n DenseGeneral,\n default_kernel_init,\n)\nfrom flax.linen.module import Module, compact, merge_param\nfrom flax.linen.normalization import LayerNorm\nfrom flax.typing import (\n Array,\n PRNGKey,\n Dtype,\n Shape as Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in einsum.\n einsum: If unspecified, default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and `einsum` are\n specified.\n\n Returns:\n Output of shape ``[batch..., num_heads, q_length, kv_length]``.\n """"""\n if (precision or einsum_dot_general) and einsum:\n raise ValueError(\n 'precision/einsum_dot_general and einsum are mutually exclusive. Please'\n ' specify only one of them.'\n )\n if not einsum:\n einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n\n query, key = promote_dtype(query, key, dtype=dtype)\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = einsum('...qhd,...khd->...hqk', query, key)\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n if force_fp32_for_softmax and dtype != jnp.float32:\n attn_weights = jax.nn.softmax(attn_weights.astype(jnp.float32))\n else:\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow('intermediates', 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n qk_attn_weights_einsum: Callable[..., Array] | None = None,\n attn_weights_value_einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see ``jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in `jnp.einsum`.\n qk_attn_weights_einsum: the einsum for computing the attention weights. When\n unspecified, the default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n attn_weights_value_einsum: the einsum for computing the product of the\n attention weights and the values. When unspecified, the default\n `jnp.einsum` will be used. This argument is mutually exclusive with\n `precision` and `einsum_dot_general`.\n\n Returns:\n Output of shape ``[batch..., q_length, num_heads, v_depth_per_head]``.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and\n `qk_attn_weights_einsum`/`attn_weights_value_einsum` are\n specified.\n """"""\n if (qk_attn_weights_einsum and not attn_weights_value_einsum) or (\n not qk_attn_weights_einsum and attn_weights_value_einsum\n ):\n raise ValueError(\n 'qk_attn_weights_einsum and attn_weights_value_einsum must be specified'\n ' together.'\n )\n if (precision or einsum_dot_general) and (\n qk_attn_weights_einsum or attn_weights_value_einsum\n ):\n raise ValueError(\n 'precision/einsum_dot_general and'\n ' qk_attn_weights_einsum/attn_weights_value_einsum are mutually'\n ' exclusive. Please specify only one of them.'\n )\n\n query, key, value = promote_dtype(query, key, value, dtype=dtype)\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n force_fp32_for_softmax,\n einsum_dot_general=einsum_dot_general,\n einsum=qk_attn_weights_einsum,\n )\n if not attn_weights_value_einsum:\n attn_weights_value_einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n # return weighted sum over values for each query position\n return attn_weights_value_einsum(\n '...hqk,...khd->...qhd',\n attn_weights,\n value,\n )\n\n\nclass MultiHeadDotProductAttention(Module):\n """"""Multi-head dot-product attention.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: Number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: The dtype of the computation (default: infer from inputs and params)\n param_dtype: The dtype passed to parameter initializers (default: float32)\n qkv_features: Dimension of the key, query, and value.\n out_features: Dimension of the last projection\n broadcast_dropout: Use a broadcasted dropout along batch dims.\n dropout_rate: Dropout rate.\n deterministic: If False, the attention weight is masked randomly using\n dropout, whereas if True, the attention weights are deterministic.\n precision: Numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: Initializer for the kernel of the Dense layers.\n out_kernel_init: Optional Initializer for the kernel of the output Dense layer,\n if None, ``kernel_init`` will be used.\n bias_init: Initializer for the bias of the Dense layers.\n out_bias_init: Optional Initializer for the bias of the output Dense layer,\n if None, ``bias_init`` will be used.\n use_bias: Whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: Whether to prepare and use an autoregressive cache.\n normalize_qk: Should QK normalization be applied (arxiv.org/abs/2302.05442).\n qk_attn_weights_einsum_cls: factory function to create the einsum for\n computing the attention weights.\n attn_weights_value_einsum_cls: factory function to create the einsum for\n computing the product of the attention weights and the values.\n """"""\n\n num_heads: int\n dtype: Dtype | None = None\n param_dtype: Dtype = jnp.float32\n qkv_features: int | None = None\n out_features: int | None = None\n broadcast_dropout: bool = True\n dropout_rate: float = 0.0\n deterministic: bool | None = None\n precision: PrecisionLike = None\n kernel_init: Initializer = default_kernel_init\n out_kernel_init: Initializer | None = None\n bias_init: Initializer = initializers.zeros_init()\n out_bias_init: Initializer | None = None\n use_bias: bool = True\n attention_fn: Callable[..., Array] = dot_product_attention\n decode: bool = False\n normalize_qk: bool = False\n force_fp32_for_softmax: bool = False\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None\n out_dot_general: DotGeneralT | None = None\n qkv_dot_general_cls: Any = None\n out_dot_general_cls: Any = None\n qk_attn_weights_einsum_cls: Callable[..., Callable[..., Array]] | None = None\n attn_weights_value_einsum_cls: Callable[..., Callable[..., Array]] | None = (\n None\n )\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @compact\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n inputs_k: key of shape ``[batch_sizes..., length, features]``. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape ``[batch_sizes..., length, features]``. If None,\n inputs_v will copy the value of inputs_k.\n inputs_kv: key/values of shape ``[batch_sizes..., length, features]``. If\n None, inputs_kv will copy the value of inputs_q. This arg will be\n deprecated soon. Use inputs_k and inputs_v instead.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n dropout_rng: optional rng key to pass to the attention layer's dropout\n mask. Otherwise, self.make_rng('dropout') is used instead.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection. Remember to mark 'intermediates' as\n mutable via ``mutable=['intermediates']`` in order to have that\n collection returned.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n if inputs_kv is not None:\n if inputs_k is not None or inputs_v is not None:\n raise ValueError(\n 'If either `inputs_k` or `inputs_v` is not None, '\n '`inputs_kv` must be None. If `inputs_kv` is not None, both `inputs_k` '\n 'and `inputs_v` must be None. We recommend using `inputs_k` and '\n '`inputs_v` args, since `inputs_kv` will be deprecated soon. See '\n 'https://github.com/google/flax/discussions/3389 for more '\n 'information.'\n )\n inputs_k = inputs_v = inputs_kv\n warnings.warn(\n 'The inputs_kv arg will be deprecated soon. '\n 'Use inputs_k and inputs_v instead. See '\n 'https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n else:\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n elif inputs_v.shape[-1] == inputs_v.shape[-2]:\n warnings.warn(\n f'You are passing an array of shape {inputs_v.shape} '\n 'to the `inputs_v` arg, when you may have intended '\n 'to pass it to the `mask` arg. As of Flax version '\n '0.7.4, the function signature of '\n ""MultiHeadDotProductAttention's `__call__` method ""\n 'has changed to `__call__(inputs_q, inputs_k=None, '\n 'inputs_v=None, *, inputs_kv=None, mask=None, '\n 'deterministic=None)`. Use the kwarg `mask` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'and read the docstring for more information.',\n DeprecationWarning,\n )\n\n features = self.out_features or inputs_q.shape[-1]\n qkv_features = self.qkv_features or inputs_q.shape[-1]\n assert qkv_features % self.num_heads == 0, (\n f'Memory dimension ({qkv_features}) must be divisible by number of'\n f' heads ({self.num_heads}).'\n )\n head_dim = qkv_features // self.num_heads\n\n dense = functools.partial(\n DenseGeneral,\n axis=-1,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n features=(self.num_heads, head_dim),\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n query, key, value = (\n dense(name='query')(inputs_q),\n dense(name='key')(inputs_k),\n dense(name='value')(inputs_v),\n )\n\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = LayerNorm(\n name='query_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(query) # type: ignore[call-arg]\n key = LayerNorm(\n name='key_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(key) # type: ignore[call-arg]\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n if self.decode:\n # detect if we're initializing by absence of existing cache data.\n is_initialized = self.has_variable('cache', 'cached_key')\n cached_key = self.variable(\n 'cache', 'cached_key', jnp.zeros, key.shape, key.dtype\n )\n cached_value = self.variable(\n 'cache', 'cached_value', jnp.zeros, value.shape, value.dtype\n )\n cache_index = self.variable(\n 'cache', 'cache_index', lambda: jnp.array(0, dtype=jnp.int32)\n )\n if is_initialized:\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = cache_index.value\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices: tuple[int | jax.Array, ...] = (zero,) * len(\n batch_dims\n ) + (\n cur_index,\n zero,\n zero,\n )\n key = lax.dynamic_update_slice(cached_key.value, key, indices)\n value = lax.dynamic_update_slice(cached_value.value, value, indices)\n cached_key.value = key\n cached_value.value = value\n cache_index.value = cache_index.value + 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n m_deterministic = merge_param(\n 'deterministic', self.deterministic, deterministic\n )\n if not m_deterministic and dropout_rng is None:\n dropout_rng = self.make_rng('dropout')\n else:\n m_deterministic = True\n\n # `qk_attn_weights_einsum` and `attn_weights_value_einsum` are optional\n # arguments that can be used to override the default `jnp.einsum`. They\n # exist for quantized einsum support in AQT.\n qk_attn_weights_einsum = (\n self.qk_attn_weights_einsum_cls()\n if self.qk_attn_weights_einsum_cls\n else None\n )\n attn_weights_value_einsum = (\n self.attn_weights_value_einsum_cls()\n if self.attn_weights_value_einsum_cls\n else None\n )\n # apply attention\n attn_args = (query, key, value)\n # This kwargs list match the default nn.dot_product_attention.\n # For custom `attention_fn`s, invalid kwargs will be filtered.\n attn_kwargs = dict(\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=m_deterministic,\n dtype=self.dtype,\n precision=self.precision,\n force_fp32_for_softmax=self.force_fp32_for_softmax,\n qk_attn_weights_einsum=qk_attn_weights_einsum,\n attn_weights_value_einsum=attn_weights_value_einsum,\n )\n attn_kwargs = {\n k: v\n for k, v in attn_kwargs.items()\n if k in inspect.signature(self.attention_fn).parameters\n }\n if sow_weights:\n x = self.attention_fn(*attn_args, **attn_kwargs, module=self)\n else:\n x = self.attention_fn(*attn_args, **attn_kwargs)\n # back to the original inputs dimensions\n out = DenseGeneral(\n features=features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n name='out', # type: ignore[call-arg]\n )(x)\n return out\n\n\nclass MultiHeadAttention(MultiHeadDotProductAttention):\n """"""Multi-head dot-product attention.\n Alias for ``MultiHeadDotProductAttention``.\n\n **NOTE**: ``MultiHeadAttention`` is a wrapper of ``MultiHeadDotProductAttention``,\n and so their implementations are identical. However ``MultiHeadAttention`` layers\n will, by default, be named ``MultiHeadAttention_{index}``, whereas ``MultiHeadDotProductAttention``\n will be named ``MultiHeadDotProductAttention_{index}``. Therefore, this could affect\n checkpointing, param collection names and RNG threading (since the layer name is\n used when generating new RNG's) within the module.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n bias_init: initializer for the bias of the Dense layers.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n """"""\n\n\nclass SelfAttention(MultiHeadDotProductAttention):\n """"""Self-attention special case of multi-head dot-product attention.\n This layer is deprecated in favor of ``MultiHeadDotProductAttention``.\n\n Example usage::\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> variables = layer.init(jax.random.key(0), jnp.ones((4, 3, 2, 5)))\n """"""\n\n @compact\n def __call__( # type: ignore\n self,\n inputs_q: Array,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product self-attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n warnings.warn(\n 'SelfAttention will be deprecated soon. Use '\n '`MultiHeadDotProductAttention.__call__(inputs_q)` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n return super().__call__(\n inputs_q,\n mask=mask,\n deterministic=deterministic,\n dropout_rng=dropout_rng,\n sow_weights=sow_weights,\n )\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., ``[batch..., len_q]``, ``[batch..., len_kv]``, the\n attention weights will be ``[batch..., heads, len_q, len_kv]`` and this\n function will produce ``[batch..., 1, len_q, len_kv]``.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len_q, len_kv]`` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., ``[batch..., len]``, the self-attention weights\n will be ``[batch..., heads, len, len]`` and this function will produce a\n causal mask of shape ``[batch..., 1, len, len]``.\n\n Args:\n x: input array of shape ``[batch..., len]``\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len, len]`` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +1740,4180286,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27282,0,"",python,selection_mouse +1741,4192747,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15126,0,"",python,selection_mouse +1742,4192878,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,12,"attention_fn",python,selection_mouse +1743,4193217,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,14,"attention_fn: ",python,selection_mouse +1744,4193251,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,35,"attention_fn: dot_product_attention",python,selection_mouse +1745,4193623,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,36,"attention_fn: dot_product_attention ",python,selection_mouse +1746,4193667,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,38,"attention_fn: dot_product_attention or",python,selection_mouse +1747,4193670,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,39,"attention_fn: dot_product_attention or ",python,selection_mouse +1748,4193729,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,49,"attention_fn: dot_product_attention or compatible",python,selection_mouse +1749,4193986,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,50,"attention_fn: dot_product_attention or compatible ",python,selection_mouse +1750,4194019,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,58,"attention_fn: dot_product_attention or compatible function",python,selection_mouse +1751,4194303,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,59,"attention_fn: dot_product_attention or compatible function.",python,selection_mouse +1752,4194354,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,60,"attention_fn: dot_product_attention or compatible function. ",python,selection_mouse +1753,4194407,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,67,"attention_fn: dot_product_attention or compatible function. Accepts",python,selection_mouse +1754,4194872,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,59,"attention_fn: dot_product_attention or compatible function.",python,selection_mouse +1755,4194889,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,58,"attention_fn: dot_product_attention or compatible function",python,selection_mouse +1756,4194933,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15106,26,"use bias.\n attention_fn",python,selection_mouse +1757,4194940,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15095,37,"transforms use bias.\n attention_fn",python,selection_mouse +1758,4195007,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,49,"attention_fn: dot_product_attention or compatible",python,selection_mouse +1759,4195007,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,39,"attention_fn: dot_product_attention or ",python,selection_mouse +1760,4195022,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,38,"attention_fn: dot_product_attention or",python,selection_mouse +1761,4195040,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,35,"attention_fn: dot_product_attention",python,selection_mouse +1762,4195232,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,104,"attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns",python,selection_mouse +1763,4195251,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,97,"attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and ",python,selection_mouse +1764,4195265,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,96,"attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and",python,selection_mouse +1765,4195347,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,93,"attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, ",python,selection_mouse +1766,4195407,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15120,92,"attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value,",python,selection_mouse +1767,4195970,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15212,0,"",python,selection_mouse +1768,4198432,"utils/nn.py",0,0,"",python,tab +1769,4199832,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +1770,4200234,".venv/lib/python3.10/site-packages/flax/linen/attention.py",14612,0,"",python,selection_mouse +1771,4201513,"utils/nn.py",0,0,"",python,tab +1772,4212465,"utils/nn.py",5603,0,"",python,selection_mouse +1773,4229247,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +1774,4230723,"utils/nn.py",0,0,"",python,tab +1775,4236926,"utils/nn.py",5082,0,"",python,selection_mouse +1776,4236932,"utils/nn.py",5081,0,"",python,selection_command +1777,4239316,"utils/nn.py",5773,0,"",python,selection_mouse +1778,4239473,"utils/nn.py",5773,1,",",python,selection_mouse +1779,4239493,"utils/nn.py",5773,2,", ",python,selection_mouse +1780,4239547,"utils/nn.py",5773,4,", ma",python,selection_mouse +1781,4239547,"utils/nn.py",5773,5,", mas",python,selection_mouse +1782,4239547,"utils/nn.py",5773,6,", mask",python,selection_mouse +1783,4239557,"utils/nn.py",5773,7,", mask=",python,selection_mouse +1784,4239610,"utils/nn.py",5773,8,", mask=c",python,selection_mouse +1785,4239610,"utils/nn.py",5773,9,", mask=ca",python,selection_mouse +1786,4239611,"utils/nn.py",5773,10,", mask=cau",python,selection_mouse +1787,4239623,"utils/nn.py",5773,11,", mask=caus",python,selection_mouse +1788,4239680,"utils/nn.py",5773,12,", mask=causa",python,selection_mouse +1789,4239680,"utils/nn.py",5773,37,", mask=causal_mask)\n x = x + z",python,selection_mouse +1790,4240446,"utils/nn.py",5773,17,", mask=causal_mas",python,selection_mouse +1791,4240800,"utils/nn.py",5773,18,", mask=causal_mask",python,selection_mouse +1792,4241580,"utils/nn.py",5773,18,"",python,content +1793,4255584,"TERMINAL",0,0,"bash",,terminal_focus +1794,4257345,"TERMINAL",0,0,"bash",,terminal_focus +1795,4261268,"TERMINAL",0,0,"git status",,terminal_command +1796,4261289,"TERMINAL",0,0,"]633;E;2025-07-21 15:36:47 git status;ae54d867-0567-4fb2-95c9-86b7932c267e]633;COn branch new-arch-sampling\r\nAll conflicts fixed but you are still merging.\r\n (use ""git commit"" to conclude merge)\r\n\r\nChanges to be committed:\r\n\tmodified: genie.py\r\n\tnew file: input_pipeline/download/openai/download_actions_files.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: models/tokenizer.py\r\n\tmodified: requirements.txt\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\tmodified: utils/nn.py\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1797,4270381,"TERMINAL",0,0,"git stash pop",,terminal_command +1798,4270422,"TERMINAL",0,0,"]633;E;2025-07-21 15:36:56 git stash pop;ae54d867-0567-4fb2-95c9-86b7932c267e]633;Cerror: Your local changes to the following files would be overwritten by merge:\r\n\tutils/nn.py\r\nPlease commit your changes or stash them before you merge.\r\nAborting\r\nOn branch new-arch-sampling\r\nAll conflicts fixed but you are still merging.\r\n (use ""git commit"" to conclude merge)\r\n\r\nChanges to be committed:\r\n\tmodified: genie.py\r\n\tnew file: input_pipeline/download/openai/download_actions_files.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: models/tokenizer.py\r\n\tmodified: requirements.txt\r\n\tmodified: sample.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\tmodified: utils/nn.py\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nThe stash entry is kept in case you need it again.\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1",,terminal_output +1799,4298010,"utils/nn.py",0,0,"",python,tab +1800,4298083,"utils/nn.py",5762,0,"",python,selection_command +1801,4312987,"utils/nn.py",6017,0,"",python,selection_mouse +1802,4316212,"TERMINAL",0,0,"git stash pop",,terminal_command +1803,4316259,"TERMINAL",0,0,"]633;E;2025-07-21 15:37:42 git stash pop;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1804,4316313,"TERMINAL",0,0,"Auto-merging genie.py\r\nAuto-merging models/dynamics.py\r\nAuto-merging models/lam.py\r\nAuto-merging train_dynamics.py\r\nAuto-merging utils/nn.py\r\nCONFLICT (content): Merge conflict in utils/nn.py\r\n",,terminal_output +1805,4316397,"TERMINAL",0,0,"On branch new-arch-sampling\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\r\nUnmerged paths:\r\n (use ""git restore --staged ..."" to unstage)\r\n (use ""git add ..."" to mark resolution)\r\n\tboth modified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nThe stash entry is kept in case you need it again.\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1",,terminal_output +1806,4322948,"utils/nn.py",0,0,"",python,tab +1807,4325189,"utils/nn.py",0,0,"",python,tab +1808,4333006,"utils/nn.py",4388,30," use_flash_attention: bool\n spatial_bert: bool = True\n",python,content +1809,4338371,"utils/nn.py",4729,0,"",python,selection_command +1810,4350806,"utils/nn.py",5023,0,"",python,selection_command +1811,4352187,"utils/nn.py",7338,0,"",python,selection_command +1812,4352837,"utils/nn.py",4729,0,"",python,selection_command +1813,4376719,"utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spatial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spatial_mask = None if self.spatial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spatial_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spatial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +1814,4376720,"utils/nn.py",4668,0,"",python,selection_mouse +1815,4376862,"utils/nn.py",4661,12,"spatial_mask",python,selection_mouse +1816,4389369,"utils/nn.py",0,0,"",python,tab +1817,4389370,"utils/nn.py",5040,0,"",python,selection_mouse +1818,4390299,"utils/nn.py",5040,1,"t",python,content +1819,4393539,"utils/nn.py",7356,0,"",python,selection_command +1820,4398793,"utils/nn.py",7340,48," spatial_bert=self.spacial_bert,\n use_flash_attention=self.use_flash_attention,\n",python,content +1821,4402604,"utils/nn.py",0,0,"",python,tab +1822,4403654,"utils/nn.py",4635,0,"",python,selection_mouse +1823,4403655,"utils/nn.py",4634,0,"",python,selection_command +1824,4405838,"utils/nn.py",4660,1,"s",python,selection_command +1825,4405919,"utils/nn.py",5037,2,"sp",python,selection_command +1826,4406044,"utils/nn.py",5037,3,"spa",python,selection_command +1827,4406263,"utils/nn.py",6472,4,"spac",python,selection_command +1828,4406348,"utils/nn.py",6472,5,"spaci",python,selection_command +1829,4406453,"utils/nn.py",6472,6,"spacia",python,selection_command +1830,4406538,"utils/nn.py",6472,7,"spacial",python,selection_command +1831,4411616,"utils/nn.py",6472,7,"spatial",python,content +1832,4411620,"utils/nn.py",7374,7,"spacial",python,selection_command +1833,4412357,"utils/nn.py",7374,7,"spatial",python,content +1834,4430457,"utils/nn.py",0,0,"",python,tab +1835,4430458,"utils/nn.py",6468,0,"",python,selection_command +1836,4446360,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\nfrom functools import partial\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n spatial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=not self.spatial_bert),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=True),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spatial_bert: bool = True\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spatial_bert=self.spatial_bert,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n \n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = 'cudnn' if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, '... l h d -> (...) l h d')\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False)\n return jnp.logical_and(attention_mask, expanded_mask)\n \n original_shape = query.shape\n original_seq_len = query.shape[-3]\n \n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n \n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n \n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n \n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n \n return attention_fn\n\n",python,tab +1837,4446361,"utils/nn.py",4418,0,"",python,selection_command +1838,4467260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",0,0,"\n# Please enter the commit message for your changes. Lines starting\n# with '#' will be ignored, and an empty message aborts the commit.\n#\n# On branch new-arch-sampling\n# Changes to be committed:\n#\tmodified: genie.py\n#\tmodified: models/dynamics.py\n#\tmodified: models/lam.py\n#\tmodified: train_dynamics.py\n#\tmodified: utils/nn.py\n#\n# Untracked files:\n#\tdiff.diff\n#\tdiff.log\n#\tlogs/\n#\tread_tf_record.py\n#\trequirements-franz.txt\n#\tscripts_cremers/\n#\tscripts_horeka/\n#\tslurm-3359333.out\n#\tslurm-3359334.out\n#\tslurm-3359338.out\n#\tslurm/\n#\tutils/logger_bak.py\n#\tutils/visualizer.py\n#\tweekend-job-requeuer.sh\n#\tweekend-job-starter.sh\n#\n",git-commit,tab +1839,4468364,"TERMINAL",0,0,"srun",,terminal_focus +1840,4485740,"TERMINAL",0,0,"salloc",,terminal_focus +1841,4487537,"TERMINAL",0,0,"bash",,terminal_focus +1842,4489619,"TERMINAL",0,0,"git status",,terminal_command +1843,4489658,"TERMINAL",0,0,"]633;E;2025-07-21 15:40:36 git status;ae54d867-0567-4fb2-95c9-86b7932c267e]633;COn branch new-arch-sampling\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1844,4508202,"vscode.git.Git",0,0,"2025-07-21 14:25:46.798 [info] [main] Log level: Info\n2025-07-21 14:25:46.798 [info] [main] Validating found git in: ""git""\n2025-07-21 14:25:46.799 [info] [main] Using git ""2.43.5"" from ""git""\n2025-07-21 14:25:46.799 [info] [Model][doInitialScan] Initial repository scan started\n2025-07-21 14:25:46.799 [info] > git rev-parse --show-toplevel [202ms]\n2025-07-21 14:25:46.799 [info] > git rev-parse --path-format=relative --show-toplevel [11ms]\n2025-07-21 14:25:46.823 [info] > git rev-parse --git-dir --git-common-dir [12ms]\n2025-07-21 14:25:46.848 [info] [Model][openRepository] Opened repository: /home/hk-project-p0023960/tum_cte0515/Projects/jafar\n2025-07-21 14:25:46.931 [info] > git rev-parse --show-toplevel [51ms]\n2025-07-21 14:25:46.941 [info] > git config --get commit.template [76ms]\n2025-07-21 14:25:46.983 [info] > git rev-parse --path-format=relative --show-toplevel [43ms]\n2025-07-21 14:25:47.015 [info] > git rev-parse --show-toplevel [14ms]\n2025-07-21 14:25:47.036 [info] > git rev-parse --path-format=relative --show-toplevel [11ms]\n2025-07-21 14:25:47.059 [info] > git rev-parse --show-toplevel [9ms]\n2025-07-21 14:25:47.086 [info] > git rev-parse --path-format=relative --show-toplevel [12ms]\n2025-07-21 14:25:47.102 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [138ms]\n2025-07-21 14:25:47.150 [info] > git rev-parse --show-toplevel [49ms]\n2025-07-21 14:25:47.172 [info] > git rev-parse --path-format=relative --show-toplevel [10ms]\n2025-07-21 14:25:47.194 [info] > git rev-parse --show-toplevel [10ms]\n2025-07-21 14:25:47.207 [info] > git status -z -uall [89ms]\n2025-07-21 14:25:47.216 [info] > git rev-parse --path-format=relative --show-toplevel [11ms]\n2025-07-21 14:25:47.238 [info] > git rev-parse --show-toplevel [11ms]\n2025-07-21 14:25:47.263 [info] > git rev-parse --path-format=relative --show-toplevel [11ms]\n2025-07-21 14:25:47.289 [info] > git rev-parse --show-toplevel [9ms]\n2025-07-21 14:25:47.315 [info] > git rev-parse --path-format=relative --show-toplevel [10ms]\n2025-07-21 14:25:47.345 [info] > git rev-parse --show-toplevel [18ms]\n2025-07-21 14:25:47.368 [info] > git rev-parse --path-format=relative --show-toplevel [10ms]\n2025-07-21 14:25:47.387 [info] > git rev-parse --show-toplevel [8ms]\n2025-07-21 14:25:47.405 [info] > git rev-parse --path-format=relative --show-toplevel [8ms]\n2025-07-21 14:25:47.427 [info] > git rev-parse --show-toplevel [10ms]\n2025-07-21 14:25:47.501 [info] > git check-ignore -v -z --stdin [34ms]\n2025-07-21 14:25:47.503 [info] > git rev-parse --path-format=relative --show-toplevel [61ms]\n2025-07-21 14:25:47.525 [info] > git rev-parse --show-toplevel [8ms]\n2025-07-21 14:25:47.543 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [406ms]\n2025-07-21 14:25:47.652 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [77ms]\n2025-07-21 14:25:47.666 [info] > git rev-parse --path-format=relative --show-toplevel [126ms]\n2025-07-21 14:25:47.701 [info] > git config --local branch.new-arch-sampling.vscode-merge-base [35ms]\n2025-07-21 14:25:47.714 [info] > git config --get commit.template [65ms]\n2025-07-21 14:25:47.715 [info] > git rev-parse --show-toplevel [25ms]\n2025-07-21 14:25:47.742 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/origin/main refs/remotes/origin/main [28ms]\n2025-07-21 14:25:47.746 [info] > git rev-parse --path-format=relative --show-toplevel [18ms]\n2025-07-21 14:25:47.760 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [20ms]\n2025-07-21 14:25:47.789 [info] > git rev-parse --show-toplevel [29ms]\n2025-07-21 14:25:47.821 [info] > git status -z -uall [48ms]\n2025-07-21 14:25:47.822 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [37ms]\n2025-07-21 14:25:47.824 [info] > git rev-parse --path-format=relative --show-toplevel [21ms]\n2025-07-21 14:25:47.860 [info] > git rev-parse --show-toplevel [17ms]\n2025-07-21 14:25:47.879 [info] > git rev-parse --path-format=relative --show-toplevel [9ms]\n2025-07-21 14:25:47.899 [info] > git rev-parse --git-dir --git-common-dir [10ms]\n2025-07-21 14:25:47.903 [info] [Model][openRepository] Opened repository: /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm\n2025-07-21 14:25:47.949 [info] > git rev-parse --show-toplevel [22ms]\n2025-07-21 14:25:47.961 [info] > git config --get commit.template [47ms]\n2025-07-21 14:25:47.973 [info] > git rev-parse --path-format=relative --show-toplevel [12ms]\n2025-07-21 14:25:47.992 [info] > git rev-parse --show-toplevel [9ms]\n2025-07-21 14:25:48.007 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [35ms]\n2025-07-21 14:25:48.049 [info] > git rev-parse --path-format=relative --show-toplevel [46ms]\n2025-07-21 14:25:48.062 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [27ms]\n2025-07-21 14:25:48.069 [info] > git status -z -uall [46ms]\n2025-07-21 14:25:48.139 [info] > git rev-parse --show-toplevel [78ms]\n2025-07-21 14:25:48.154 [info] > git config --get commit.template [45ms]\n2025-07-21 14:25:48.154 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [63ms]\n2025-07-21 14:25:48.170 [info] > git rev-parse --path-format=relative --show-toplevel [19ms]\n2025-07-21 14:25:48.175 [info] [Model][doInitialScan] Initial repository scan completed - repositories (2), closed repositories (0), parent repositories (0), unsafe repositories (0)\n2025-07-21 14:25:48.256 [info] > git show --textconv :models/dynamics.py [69ms]\n2025-07-21 14:25:48.256 [info] > git show --textconv :train_dynamics.py [58ms]\n2025-07-21 14:25:48.285 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [62ms]\n2025-07-21 14:25:48.295 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [84ms]\n2025-07-21 14:25:48.306 [info] > git config --local branch.main.vscode-merge-base [139ms]\n2025-07-21 14:25:48.329 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [33ms]\n2025-07-21 14:25:48.329 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [83ms]\n2025-07-21 14:25:48.331 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/origin/main refs/remotes/origin/main [15ms]\n2025-07-21 14:25:48.341 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [36ms]\n2025-07-21 14:25:48.382 [info] > git merge-base refs/heads/main refs/remotes/origin/main [41ms]\n2025-07-21 14:25:48.392 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [23ms]\n2025-07-21 14:25:48.397 [info] > git status -z -uall [38ms]\n2025-07-21 14:25:48.409 [info] > git diff --name-status -z --diff-filter=ADMR 34811024ba5b1f400b8a0e4c5a1d27256172ba52...refs/remotes/origin/main [19ms]\n2025-07-21 14:25:48.420 [info] > git merge-base refs/heads/main refs/remotes/origin/main [13ms]\n2025-07-21 14:25:48.449 [info] > git diff --name-status -z --diff-filter=ADMR 34811024ba5b1f400b8a0e4c5a1d27256172ba52...refs/remotes/origin/main [15ms]\n2025-07-21 14:25:48.594 [info] > git check-ignore -v -z --stdin [92ms]\n2025-07-21 14:25:48.594 [info] > git check-ignore -v -z --stdin [75ms]\n2025-07-21 14:25:48.595 [info] > git show --textconv :models/tokenizer.py [59ms]\n2025-07-21 14:25:48.595 [info] > git show --textconv :train_dynamics.py [44ms]\n2025-07-21 14:25:48.595 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [19ms]\n2025-07-21 14:25:48.616 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py [52ms]\n2025-07-21 14:25:48.628 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [18ms]\n2025-07-21 14:25:48.700 [info] > git cat-file -s 7333438b8396f00c40b77e48b06d6f860746bf83 [74ms]\n2025-07-21 14:25:49.250 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [483ms]\n2025-07-21 14:25:49.300 [info] > git check-ignore -v -z --stdin [16ms]\n2025-07-21 14:25:49.371 [info] > git show --textconv :train_dynamics.py [18ms]\n2025-07-21 14:25:49.378 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [10ms]\n2025-07-21 14:25:49.408 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [16ms]\n2025-07-21 14:25:55.544 [info] > git config --global user.name [9ms]\n2025-07-21 14:25:55.564 [info] > git config --global user.email [8ms]\n2025-07-21 14:25:55.564 [info] [main] Stored git author name in global state: Mihir Mahajan \n2025-07-21 14:28:14.416 [info] > git show --textconv :models/dynamics.py [19ms]\n2025-07-21 14:28:14.421 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [10ms]\n2025-07-21 14:28:14.446 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [12ms]\n2025-07-21 14:28:14.571 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [88ms]\n2025-07-21 14:28:19.589 [info] > git show --textconv :train_dynamics.py [15ms]\n2025-07-21 14:28:19.598 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [10ms]\n2025-07-21 14:28:19.622 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [11ms]\n2025-07-21 14:28:21.375 [info] > git show --textconv :jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch [17ms]\n2025-07-21 14:28:21.384 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch [10ms]\n2025-07-21 14:28:21.494 [info] > git blame --root --incremental 34811024ba5b1f400b8a0e4c5a1d27256172ba52 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch [31ms]\n2025-07-21 14:28:21.494 [info] fatal: no such path jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch in 34811024ba5b1f400b8a0e4c5a1d27256172ba52\n2025-07-21 14:28:21.741 [info] > git show --textconv :train_dynamics.py [16ms]\n2025-07-21 14:28:21.749 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [10ms]\n2025-07-21 14:28:21.772 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [11ms]\n2025-07-21 14:28:21.842 [info] > git check-ignore -v -z --stdin [16ms]\n2025-07-21 14:28:29.890 [info] > git show --textconv :jobs/mihir/horeka/yolo-runs/sampling.sh [17ms]\n2025-07-21 14:28:29.898 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/sampling.sh [9ms]\n2025-07-21 14:28:29.989 [info] > git blame --root --incremental 34811024ba5b1f400b8a0e4c5a1d27256172ba52 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/sampling.sh [18ms]\n2025-07-21 14:28:29.989 [info] fatal: no such path jobs/mihir/horeka/yolo-runs/sampling.sh in 34811024ba5b1f400b8a0e4c5a1d27256172ba52\n2025-07-21 14:28:42.759 [info] > git show --textconv :sample.py [20ms]\n2025-07-21 14:28:42.764 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [10ms]\n2025-07-21 14:28:42.789 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [11ms]\n2025-07-21 14:28:43.098 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [269ms]\n2025-07-21 14:29:07.318 [info] > git show --textconv :genie.py [20ms]\n2025-07-21 14:29:07.322 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [11ms]\n2025-07-21 14:29:07.345 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [10ms]\n2025-07-21 14:29:07.567 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [175ms]\n2025-07-21 14:29:08.973 [info] > git show --textconv :sample.py [13ms]\n2025-07-21 14:29:08.983 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [11ms]\n2025-07-21 14:29:09.009 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [12ms]\n2025-07-21 14:29:43.803 [info] > git show --textconv :genie.py [15ms]\n2025-07-21 14:29:43.813 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [10ms]\n2025-07-21 14:29:43.836 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [12ms]\n2025-07-21 14:38:13.413 [info] > git config --get commit.template [14ms]\n2025-07-21 14:38:13.572 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [161ms]\n2025-07-21 14:38:13.741 [info] > git status -z -uall [154ms]\n2025-07-21 14:38:13.972 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [372ms]\n2025-07-21 14:38:24.114 [info] > git show --textconv :models/dynamics.py [17ms]\n2025-07-21 14:38:24.125 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [12ms]\n2025-07-21 14:38:24.152 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [13ms]\n2025-07-21 14:38:36.316 [info] > git config --get commit.template [17ms]\n2025-07-21 14:38:36.326 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [10ms]\n2025-07-21 14:38:36.375 [info] > git status -z -uall [33ms]\n2025-07-21 14:38:36.394 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [38ms]\n2025-07-21 14:38:41.430 [info] > git config --get commit.template [14ms]\n2025-07-21 14:38:41.441 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 14:38:41.482 [info] > git status -z -uall [29ms]\n2025-07-21 14:38:41.510 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [42ms]\n2025-07-21 14:38:46.551 [info] > git config --get commit.template [15ms]\n2025-07-21 14:38:46.565 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 14:38:46.614 [info] > git status -z -uall [36ms]\n2025-07-21 14:38:46.618 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [27ms]\n2025-07-21 14:48:26.579 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [12ms]\n2025-07-21 14:48:26.603 [info] > git show --textconv :train_dynamics.py [49ms]\n2025-07-21 14:48:26.603 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [13ms]\n2025-07-21 14:48:47.358 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [11ms]\n2025-07-21 14:48:47.372 [info] > git show --textconv :models/dynamics.py [38ms]\n2025-07-21 14:48:47.384 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [12ms]\n2025-07-21 14:48:53.733 [info] > git show --textconv :train_dynamics.py [15ms]\n2025-07-21 14:48:53.745 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py [13ms]\n2025-07-21 14:48:53.779 [info] > git cat-file -s c271af31adc0ad308c2c820458856224a05c2b1f [17ms]\n2025-07-21 14:53:04.594 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 14:53:04.629 [info] > git show --textconv :models/dynamics.py [63ms]\n2025-07-21 14:53:04.630 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [21ms]\n2025-07-21 14:58:07.758 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [10ms]\n2025-07-21 14:58:07.785 [info] > git show --textconv :utils/nn.py [51ms]\n2025-07-21 14:58:07.785 [info] > git cat-file -s 3b64fa0c9e4388d1a3f7a2a4f127e182a6a1a27a [16ms]\n2025-07-21 14:58:08.091 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [257ms]\n2025-07-21 14:58:08.182 [info] > git check-ignore -v -z --stdin [20ms]\n2025-07-21 15:00:41.000 [info] > git config --get commit.template [13ms]\n2025-07-21 15:00:41.217 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [218ms]\n2025-07-21 15:00:41.337 [info] > git status -z -uall [107ms]\n2025-07-21 15:00:41.605 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [358ms]\n2025-07-21 15:00:51.848 [info] > git config --get commit.template [13ms]\n2025-07-21 15:00:51.866 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [19ms]\n2025-07-21 15:00:51.913 [info] > git status -z -uall [35ms]\n2025-07-21 15:00:51.967 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [74ms]\n2025-07-21 15:05:24.569 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [10ms]\n2025-07-21 15:05:24.597 [info] > git show --textconv :models/dynamics.py [50ms]\n2025-07-21 15:05:24.597 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [17ms]\n2025-07-21 15:05:40.750 [info] > git config --get commit.template [16ms]\n2025-07-21 15:05:40.939 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [190ms]\n2025-07-21 15:05:41.067 [info] > git status -z -uall [116ms]\n2025-07-21 15:05:41.208 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [244ms]\n2025-07-21 15:05:46.772 [info] > git config --get commit.template [17ms]\n2025-07-21 15:05:46.820 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [51ms]\n2025-07-21 15:05:46.860 [info] > git status -z -uall [30ms]\n2025-07-21 15:05:46.892 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [52ms]\n2025-07-21 15:07:11.982 [info] > git show --textconv :genie.py [17ms]\n2025-07-21 15:07:11.988 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [11ms]\n2025-07-21 15:07:12.012 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [11ms]\n2025-07-21 15:07:25.359 [info] > git show --textconv :models/dynamics.py [18ms]\n2025-07-21 15:07:25.365 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [9ms]\n2025-07-21 15:07:25.389 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [11ms]\n2025-07-21 15:07:27.230 [info] > git show --textconv :genie.py [15ms]\n2025-07-21 15:07:27.238 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [9ms]\n2025-07-21 15:07:27.261 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [11ms]\n2025-07-21 15:15:57.684 [info] > git config --get commit.template [15ms]\n2025-07-21 15:15:57.825 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [142ms]\n2025-07-21 15:15:57.982 [info] > git status -z -uall [143ms]\n2025-07-21 15:15:58.204 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [350ms]\n2025-07-21 15:19:20.645 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [10ms]\n2025-07-21 15:19:20.659 [info] > git show --textconv :sample.py [37ms]\n2025-07-21 15:19:20.669 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [11ms]\n2025-07-21 15:19:22.346 [info] > git show --textconv :sample.py [15ms]\n2025-07-21 15:19:22.353 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [10ms]\n2025-07-21 15:19:22.379 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [11ms]\n2025-07-21 15:19:27.404 [info] > git show --textconv :genie.py [14ms]\n2025-07-21 15:19:27.414 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [11ms]\n2025-07-21 15:19:27.436 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [11ms]\n2025-07-21 15:19:28.260 [info] > git show --textconv :genie.py [16ms]\n2025-07-21 15:19:28.268 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [10ms]\n2025-07-21 15:19:28.291 [info] > git cat-file -s 5494c905b511741e0bdac7e107aacc88bdbb0849 [12ms]\n2025-07-21 15:19:38.195 [info] > git config --get commit.template [14ms]\n2025-07-21 15:19:38.222 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [27ms]\n2025-07-21 15:19:38.293 [info] > git status -z -uall [58ms]\n2025-07-21 15:19:38.339 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [91ms]\n2025-07-21 15:19:44.505 [info] > git show --textconv :jobs/mihir/horeka/yolo-runs/sampling.sh [15ms]\n2025-07-21 15:19:44.512 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/sampling.sh [10ms]\n2025-07-21 15:19:49.282 [info] > git config --get commit.template [13ms]\n2025-07-21 15:19:49.308 [info] > git config --get commit.template [26ms]\n2025-07-21 15:19:49.312 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:19:49.348 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [42ms]\n2025-07-21 15:19:49.379 [info] > git status -z -uall [53ms]\n2025-07-21 15:19:49.386 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [40ms]\n2025-07-21 15:19:49.407 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [29ms]\n2025-07-21 15:19:49.912 [info] > git status -z -uall [547ms]\n2025-07-21 15:20:13.074 [info] > git show --textconv :jobs/mihir/horeka/yolo-runs/sampling.sh [14ms]\n2025-07-21 15:20:13.082 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/sampling.sh [9ms]\n2025-07-21 15:20:31.785 [info] > git show --textconv :jobs/mihir/horeka/yolo-runs/sampling.sh [14ms]\n2025-07-21 15:20:31.794 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/slurm/jobs/mihir/horeka/yolo-runs/sampling.sh [10ms]\n2025-07-21 15:21:20.490 [info] > git config --get commit.template [16ms]\n2025-07-21 15:21:20.511 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [22ms]\n2025-07-21 15:21:20.574 [info] > git status -z -uall [51ms]\n2025-07-21 15:21:20.607 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [70ms]\n2025-07-21 15:24:05.323 [info] > git check-ignore -v -z --stdin [16ms]\n2025-07-21 15:24:39.055 [info] > git config --get commit.template [9ms]\n2025-07-21 15:24:39.099 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [28ms]\n2025-07-21 15:24:39.195 [info] > git status -z -uall [82ms]\n2025-07-21 15:24:39.209 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [83ms]\n2025-07-21 15:24:44.245 [info] > git config --get commit.template [15ms]\n2025-07-21 15:24:44.262 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [19ms]\n2025-07-21 15:24:44.309 [info] > git status -z -uall [34ms]\n2025-07-21 15:24:44.325 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [36ms]\n2025-07-21 15:25:01.361 [info] > git show --textconv :models/dynamics.py [18ms]\n2025-07-21 15:25:01.369 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [10ms]\n2025-07-21 15:25:01.396 [info] > git cat-file -s b20672fe6b445b78c9db8b5380128261f4db5f09 [11ms]\n2025-07-21 15:25:07.250 [info] > git show --textconv :sample.py [31ms]\n2025-07-21 15:25:07.250 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [15ms]\n2025-07-21 15:25:07.276 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [12ms]\n2025-07-21 15:26:58.216 [info] > git show --textconv :diff.diff [22ms]\n2025-07-21 15:26:58.218 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/diff.diff [10ms]\n2025-07-21 15:26:58.323 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/diff.diff [29ms]\n2025-07-21 15:26:58.323 [info] fatal: no such path diff.diff in c7cd6b2eea65bd1dea457231f2f68e9e76da3c21\n2025-07-21 15:28:44.482 [info] > git show --textconv :diff.diff [16ms]\n2025-07-21 15:28:44.490 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/diff.diff [11ms]\n2025-07-21 15:28:54.315 [info] > git config --get commit.template [20ms]\n2025-07-21 15:28:54.333 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [20ms]\n2025-07-21 15:28:54.382 [info] > git status -z -uall [33ms]\n2025-07-21 15:28:54.756 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [386ms]\n2025-07-21 15:29:33.401 [info] > git config --get commit.template [14ms]\n2025-07-21 15:29:33.416 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:29:33.469 [info] > git status -z -uall [41ms]\n2025-07-21 15:29:33.491 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [49ms]\n2025-07-21 15:29:34.284 [info] > git config --get commit.template [15ms]\n2025-07-21 15:29:34.301 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:29:34.347 [info] > git status -z -uall [33ms]\n2025-07-21 15:29:34.381 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [50ms]\n2025-07-21 15:29:34.599 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [11ms]\n2025-07-21 15:29:34.622 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [11ms]\n2025-07-21 15:29:34.680 [info] > git show --textconv :sample.py [15ms]\n2025-07-21 15:29:39.407 [info] > git config --get commit.template [16ms]\n2025-07-21 15:29:39.423 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [19ms]\n2025-07-21 15:29:39.468 [info] > git status -z -uall [31ms]\n2025-07-21 15:29:39.510 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [58ms]\n2025-07-21 15:29:39.549 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [15ms]\n2025-07-21 15:29:39.570 [info] > git config --local branch.main.vscode-merge-base [7ms]\n2025-07-21 15:29:39.632 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/origin/main refs/remotes/origin/main [48ms]\n2025-07-21 15:29:39.656 [info] > git merge-base refs/heads/main refs/remotes/origin/main [13ms]\n2025-07-21 15:29:39.679 [info] > git diff --name-status -z --diff-filter=ADMR 3391a39cc24fa3eb97c5b50d68c0f6314bc67bda...refs/remotes/origin/main [14ms]\n2025-07-21 15:29:40.644 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [9ms]\n2025-07-21 15:29:40.667 [info] > git cat-file -s 4ef592d38ac456b35ffd83b0c3a0fd56acb30aba [11ms]\n2025-07-21 15:29:40.715 [info] > git show --textconv :sample.py [12ms]\n2025-07-21 15:29:41.933 [info] > git config --get commit.template [15ms]\n2025-07-21 15:29:41.954 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [22ms]\n2025-07-21 15:29:41.998 [info] > git status -z -uall [29ms]\n2025-07-21 15:29:42.038 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [57ms]\n2025-07-21 15:29:42.046 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [10ms]\n2025-07-21 15:29:42.067 [info] > git cat-file -s 4ef592d38ac456b35ffd83b0c3a0fd56acb30aba [10ms]\n2025-07-21 15:29:42.135 [info] > git show --textconv :sample.py [30ms]\n2025-07-21 15:29:44.303 [info] > git config --get commit.template [17ms]\n2025-07-21 15:29:44.318 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [17ms]\n2025-07-21 15:29:44.371 [info] > git status -z -uall [36ms]\n2025-07-21 15:29:44.444 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [96ms]\n2025-07-21 15:29:44.479 [info] > git merge-base refs/heads/main refs/remotes/origin/main [14ms]\n2025-07-21 15:29:44.505 [info] > git diff --name-status -z --diff-filter=ADMR c68e03e245819070bc7cd071c3e94cbd938a1f33...refs/remotes/origin/main [13ms]\n2025-07-21 15:29:45.434 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [9ms]\n2025-07-21 15:29:45.458 [info] > git cat-file -s e36b0c355527e4273e484fd3d2cc550ecb14712d [11ms]\n2025-07-21 15:29:45.504 [info] > git show --textconv :sample.py [13ms]\n2025-07-21 15:29:47.073 [info] > git config --get commit.template [15ms]\n2025-07-21 15:29:47.089 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [17ms]\n2025-07-21 15:29:47.131 [info] > git status -z -uall [28ms]\n2025-07-21 15:29:47.163 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [47ms]\n2025-07-21 15:29:47.690 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [11ms]\n2025-07-21 15:29:47.714 [info] > git cat-file -s e36b0c355527e4273e484fd3d2cc550ecb14712d [12ms]\n2025-07-21 15:29:47.777 [info] > git show --textconv :sample.py [14ms]\n2025-07-21 15:29:48.601 [info] > git config --get commit.template [17ms]\n2025-07-21 15:29:48.616 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [16ms]\n2025-07-21 15:29:48.663 [info] > git status -z -uall [31ms]\n2025-07-21 15:29:48.694 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [47ms]\n2025-07-21 15:29:52.195 [info] > git config --get commit.template [12ms]\n2025-07-21 15:29:52.203 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/main refs/remotes/main [9ms]\n2025-07-21 15:29:52.245 [info] > git status -z -uall [29ms]\n2025-07-21 15:29:52.268 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [39ms]\n2025-07-21 15:30:01.511 [info] > git config --get commit.template [25ms]\n2025-07-21 15:30:01.526 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:30:01.582 [info] > git status -z -uall [39ms]\n2025-07-21 15:30:01.603 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [44ms]\n2025-07-21 15:30:01.639 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [12ms]\n2025-07-21 15:30:01.662 [info] > git config --local branch.new-arch-sampling.vscode-merge-base [7ms]\n2025-07-21 15:30:01.694 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/origin/main refs/remotes/origin/main [17ms]\n2025-07-21 15:30:02.706 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [7ms]\n2025-07-21 15:30:02.734 [info] > git cat-file -s 3048b3abf8c696778471909dacf177503e89f79e [13ms]\n2025-07-21 15:30:02.776 [info] > git show --textconv :sample.py [15ms]\n2025-07-21 15:30:11.660 [info] > git config --get commit.template [14ms]\n2025-07-21 15:30:11.675 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:30:11.722 [info] > git status -z -uall [35ms]\n2025-07-21 15:30:11.745 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [42ms]\n2025-07-21 15:30:24.618 [info] > git config --get commit.template [15ms]\n2025-07-21 15:30:24.635 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [19ms]\n2025-07-21 15:30:24.687 [info] > git status -z -uall [37ms]\n2025-07-21 15:30:24.750 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [85ms]\n2025-07-21 15:30:35.574 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [19ms]\n2025-07-21 15:30:35.627 [info] > git status -z -uall [40ms]\n2025-07-21 15:30:35.676 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [72ms]\n2025-07-21 15:30:35.689 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [16ms]\n2025-07-21 15:30:35.716 [info] > git cat-file -s de74ec04fe18d5e59743bdac9b6cd8727c3ae5a3 [13ms]\n2025-07-21 15:30:35.820 [info] > git show --textconv :sample.py [30ms]\n2025-07-21 15:30:36.116 [info] > git show --textconv HEAD:sample.py [15ms]\n2025-07-21 15:30:36.127 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py [13ms]\n2025-07-21 15:30:45.152 [info] > git diff --name-status -z --diff-filter=ADMR MERGE_HEAD...HEAD [14ms]\n2025-07-21 15:30:45.177 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z MERGE_HEAD [55ms]\n2025-07-21 15:30:45.213 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [103ms]\n2025-07-21 15:30:45.353 [info] > git show --textconv :1:genie.py [74ms]\n2025-07-21 15:30:45.353 [info] > git show --textconv c68e03e245819070bc7cd071c3e94cbd938a1f33:genie.py [62ms]\n2025-07-21 15:30:45.353 [info] > git show --textconv c7cd6b2eea65bd1dea457231f2f68e9e76da3c21:genie.py [45ms]\n2025-07-21 15:30:45.354 [info] > git ls-tree -l c68e03e245819070bc7cd071c3e94cbd938a1f33 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [20ms]\n2025-07-21 15:30:45.356 [info] > git ls-tree -l :1 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [34ms]\n2025-07-21 15:30:45.356 [info] fatal: Not a valid object name :1\n2025-07-21 15:30:45.358 [info] > git ls-tree -l c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [11ms]\n2025-07-21 15:30:47.606 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:30:47.654 [info] > git status -z -uall [37ms]\n2025-07-21 15:30:47.678 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [49ms]\n2025-07-21 15:31:13.617 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 15:31:13.669 [info] > git status -z -uall [36ms]\n2025-07-21 15:31:13.678 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [27ms]\n2025-07-21 15:31:23.787 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:31:23.831 [info] > git status -z -uall [28ms]\n2025-07-21 15:31:23.867 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [50ms]\n2025-07-21 15:31:28.908 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:31:28.953 [info] > git status -z -uall [33ms]\n2025-07-21 15:31:28.976 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [43ms]\n2025-07-21 15:31:31.093 [info] > git add -A -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [20ms]\n2025-07-21 15:31:31.125 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:31:31.167 [info] > git status -z -uall [31ms]\n2025-07-21 15:31:31.203 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [52ms]\n2025-07-21 15:31:31.690 [info] > git show --textconv :genie.py [42ms]\n2025-07-21 15:31:31.690 [info] > git show --textconv HEAD:genie.py [28ms]\n2025-07-21 15:31:31.690 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [15ms]\n2025-07-21 15:31:31.705 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [16ms]\n2025-07-21 15:31:31.716 [info] > git cat-file -s 12100a85ba6fba1cb0a3b2b27b7a01ed40a4f2a7 [12ms]\n2025-07-21 15:31:31.965 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [205ms]\n2025-07-21 15:31:31.991 [info] > git check-ignore -v -z --stdin [29ms]\n2025-07-21 15:31:32.328 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [15ms]\n2025-07-21 15:31:32.341 [info] > git log --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z --shortstat --diff-merges=first-parent -n50 --skip=0 --topo-order --decorate=full --stdin [947ms]\n2025-07-21 15:31:32.346 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py [19ms]\n2025-07-21 15:31:32.351 [info] > git cat-file -s 12100a85ba6fba1cb0a3b2b27b7a01ed40a4f2a7 [11ms]\n2025-07-21 15:31:32.412 [info] > git show --textconv HEAD:genie.py [25ms]\n2025-07-21 15:31:32.416 [info] > git show --textconv :genie.py [16ms]\n2025-07-21 15:31:34.012 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:31:34.053 [info] > git status -z -uall [29ms]\n2025-07-21 15:31:34.079 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [42ms]\n2025-07-21 15:31:51.502 [info] > git diff --name-status -z --diff-filter=ADMR MERGE_HEAD...HEAD [21ms]\n2025-07-21 15:31:51.516 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [61ms]\n2025-07-21 15:31:51.549 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z MERGE_HEAD [81ms]\n2025-07-21 15:31:51.679 [info] > git show --textconv :1:models/dynamics.py [72ms]\n2025-07-21 15:31:51.679 [info] > git show --textconv c68e03e245819070bc7cd071c3e94cbd938a1f33:models/dynamics.py [59ms]\n2025-07-21 15:31:51.679 [info] > git show --textconv c7cd6b2eea65bd1dea457231f2f68e9e76da3c21:models/dynamics.py [47ms]\n2025-07-21 15:31:51.680 [info] > git ls-tree -l :1 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [29ms]\n2025-07-21 15:31:51.680 [info] fatal: Not a valid object name :1\n2025-07-21 15:31:51.682 [info] > git ls-tree -l c68e03e245819070bc7cd071c3e94cbd938a1f33 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [17ms]\n2025-07-21 15:31:51.690 [info] > git ls-tree -l c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 15:31:53.867 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:31:53.916 [info] > git status -z -uall [33ms]\n2025-07-21 15:31:53.942 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [44ms]\n2025-07-21 15:31:58.982 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:31:59.030 [info] > git status -z -uall [34ms]\n2025-07-21 15:31:59.063 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [54ms]\n2025-07-21 15:32:06.292 [info] > git add -A -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 15:32:06.322 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 15:32:06.373 [info] > git status -z -uall [39ms]\n2025-07-21 15:32:06.409 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [62ms]\n2025-07-21 15:32:06.824 [info] > git show --textconv :models/dynamics.py [42ms]\n2025-07-21 15:32:06.824 [info] > git show --textconv HEAD:models/dynamics.py [27ms]\n2025-07-21 15:32:06.824 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [14ms]\n2025-07-21 15:32:06.839 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [17ms]\n2025-07-21 15:32:06.847 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [10ms]\n2025-07-21 15:32:07.534 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [14ms]\n2025-07-21 15:32:07.548 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [15ms]\n2025-07-21 15:32:07.557 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [12ms]\n2025-07-21 15:32:07.592 [info] > git show --textconv HEAD:models/dynamics.py [15ms]\n2025-07-21 15:32:07.604 [info] > git show --textconv :models/dynamics.py [14ms]\n2025-07-21 15:32:08.534 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [12ms]\n2025-07-21 15:32:08.579 [info] > git status -z -uall [32ms]\n2025-07-21 15:32:08.600 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [40ms]\n2025-07-21 15:32:11.145 [info] > git diff --name-status -z --diff-filter=ADMR MERGE_HEAD...HEAD [24ms]\n2025-07-21 15:32:11.165 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z MERGE_HEAD [57ms]\n2025-07-21 15:32:11.252 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [157ms]\n2025-07-21 15:32:11.430 [info] > git show --textconv :1:utils/nn.py [130ms]\n2025-07-21 15:32:11.430 [info] > git show --textconv c68e03e245819070bc7cd071c3e94cbd938a1f33:utils/nn.py [117ms]\n2025-07-21 15:32:11.430 [info] > git show --textconv c7cd6b2eea65bd1dea457231f2f68e9e76da3c21:utils/nn.py [105ms]\n2025-07-21 15:32:11.430 [info] > git ls-tree -l c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [64ms]\n2025-07-21 15:32:11.431 [info] > git ls-tree -l c68e03e245819070bc7cd071c3e94cbd938a1f33 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [80ms]\n2025-07-21 15:32:11.431 [info] > git ls-tree -l :1 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [93ms]\n2025-07-21 15:32:11.431 [info] fatal: Not a valid object name :1\n2025-07-21 15:32:13.636 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:32:13.674 [info] > git status -z -uall [28ms]\n2025-07-21 15:32:13.718 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [57ms]\n2025-07-21 15:32:18.802 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [55ms]\n2025-07-21 15:32:18.843 [info] > git status -z -uall [29ms]\n2025-07-21 15:32:18.870 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [43ms]\n2025-07-21 15:32:53.280 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [64ms]\n2025-07-21 15:33:29.138 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 15:33:29.183 [info] > git status -z -uall [32ms]\n2025-07-21 15:33:29.194 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [28ms]\n2025-07-21 15:33:41.703 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:33:41.743 [info] > git status -z -uall [28ms]\n2025-07-21 15:33:41.773 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [45ms]\n2025-07-21 15:33:46.812 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:33:46.862 [info] > git status -z -uall [37ms]\n2025-07-21 15:33:46.894 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [54ms]\n2025-07-21 15:33:54.748 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 15:33:54.796 [info] > git status -z -uall [35ms]\n2025-07-21 15:33:54.837 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [62ms]\n2025-07-21 15:34:13.098 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:34:13.137 [info] > git status -z -uall [27ms]\n2025-07-21 15:34:13.178 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [54ms]\n2025-07-21 15:34:28.218 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:34:28.260 [info] > git status -z -uall [29ms]\n2025-07-21 15:34:28.288 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [43ms]\n2025-07-21 15:34:35.588 [info] > git add -A -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [48ms]\n2025-07-21 15:34:35.620 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:34:35.666 [info] > git status -z -uall [31ms]\n2025-07-21 15:34:35.704 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [53ms]\n2025-07-21 15:34:35.857 [info] > git show --textconv :models/dynamics.py [62ms]\n2025-07-21 15:34:35.858 [info] > git show --textconv HEAD:models/dynamics.py [51ms]\n2025-07-21 15:34:35.858 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [27ms]\n2025-07-21 15:34:35.858 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [38ms]\n2025-07-21 15:34:35.880 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [11ms]\n2025-07-21 15:34:36.287 [info] > git check-ignore -v -z --stdin [12ms]\n2025-07-21 15:34:36.817 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 15:34:36.831 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [15ms]\n2025-07-21 15:34:36.842 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [12ms]\n2025-07-21 15:34:36.870 [info] > git show --textconv HEAD:models/dynamics.py [15ms]\n2025-07-21 15:34:36.898 [info] > git show --textconv :models/dynamics.py [12ms]\n2025-07-21 15:34:37.821 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:34:37.858 [info] > git status -z -uall [25ms]\n2025-07-21 15:34:37.928 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [79ms]\n2025-07-21 15:34:39.852 [info] > git show --textconv :utils/nn.py [39ms]\n2025-07-21 15:34:39.852 [info] > git show --textconv HEAD:utils/nn.py [26ms]\n2025-07-21 15:34:39.852 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [14ms]\n2025-07-21 15:34:39.867 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [16ms]\n2025-07-21 15:34:39.874 [info] > git cat-file -s 5364cc158733e0116840aeb9108f9d09544e78d6 [11ms]\n2025-07-21 15:34:40.027 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [108ms]\n2025-07-21 15:35:19.922 [info] > git show --textconv :.venv/lib/python3.10/site-packages/flax/linen/attention.py [15ms]\n2025-07-21 15:35:19.929 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py [9ms]\n2025-07-21 15:35:20.046 [info] > git blame --root --incremental c7cd6b2eea65bd1dea457231f2f68e9e76da3c21 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py [18ms]\n2025-07-21 15:35:20.046 [info] fatal: no such path .venv/lib/python3.10/site-packages/flax/linen/attention.py in c7cd6b2eea65bd1dea457231f2f68e9e76da3c21\n2025-07-21 15:35:20.058 [info] > git check-ignore -v -z --stdin [15ms]\n2025-07-21 15:35:45.469 [info] > git show --textconv :utils/nn.py [32ms]\n2025-07-21 15:35:45.469 [info] > git show --textconv HEAD:utils/nn.py [24ms]\n2025-07-21 15:35:45.469 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [13ms]\n2025-07-21 15:35:45.482 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [15ms]\n2025-07-21 15:35:45.492 [info] > git cat-file -s 5364cc158733e0116840aeb9108f9d09544e78d6 [12ms]\n2025-07-21 15:35:46.860 [info] > git show --textconv :.venv/lib/python3.10/site-packages/flax/linen/attention.py [80ms]\n2025-07-21 15:35:46.862 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py [68ms]\n2025-07-21 15:35:48.542 [info] > git show --textconv :utils/nn.py [46ms]\n2025-07-21 15:35:48.542 [info] > git show --textconv HEAD:utils/nn.py [33ms]\n2025-07-21 15:35:48.543 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [19ms]\n2025-07-21 15:35:48.562 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [22ms]\n2025-07-21 15:35:48.567 [info] > git cat-file -s 5364cc158733e0116840aeb9108f9d09544e78d6 [9ms]\n2025-07-21 15:36:16.215 [info] > git show --textconv :.venv/lib/python3.10/site-packages/flax/linen/attention.py [16ms]\n2025-07-21 15:36:16.224 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py [9ms]\n2025-07-21 15:36:17.740 [info] > git show --textconv :utils/nn.py [39ms]\n2025-07-21 15:36:17.740 [info] > git show --textconv HEAD:utils/nn.py [27ms]\n2025-07-21 15:36:17.741 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [15ms]\n2025-07-21 15:36:17.759 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [20ms]\n2025-07-21 15:36:17.766 [info] > git cat-file -s 5364cc158733e0116840aeb9108f9d09544e78d6 [11ms]\n2025-07-21 15:36:30.570 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [26ms]\n2025-07-21 15:36:30.630 [info] > git status -z -uall [46ms]\n2025-07-21 15:36:30.826 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [227ms]\n2025-07-21 15:36:58.191 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:36:58.248 [info] > git status -z -uall [40ms]\n2025-07-21 15:36:58.270 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [47ms]\n2025-07-21 15:36:58.365 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [19ms]\n2025-07-21 15:36:58.400 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [22ms]\n2025-07-21 15:36:58.400 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [38ms]\n2025-07-21 15:36:58.488 [info] > git show --textconv :models/dynamics.py [13ms]\n2025-07-21 15:36:58.501 [info] > git show --textconv HEAD:models/dynamics.py [14ms]\n2025-07-21 15:37:03.313 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:37:03.353 [info] > git status -z -uall [29ms]\n2025-07-21 15:37:03.383 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [41ms]\n2025-07-21 15:37:21.168 [info] > git check-ignore -v -z --stdin [74ms]\n2025-07-21 15:37:22.665 [info] > git check-ignore -v -z --stdin [19ms]\n2025-07-21 15:37:22.684 [info] > git check-ignore -v -z --stdin [22ms]\n2025-07-21 15:37:24.636 [info] > git show --textconv :utils/nn.py [14ms]\n2025-07-21 15:37:24.646 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [10ms]\n2025-07-21 15:37:24.669 [info] > git cat-file -s 5364cc158733e0116840aeb9108f9d09544e78d6 [11ms]\n2025-07-21 15:37:24.960 [info] > git show --textconv HEAD:utils/nn.py [15ms]\n2025-07-21 15:37:24.971 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [12ms]\n2025-07-21 15:37:27.096 [info] > git add -A -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [17ms]\n2025-07-21 15:37:27.131 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [18ms]\n2025-07-21 15:37:27.178 [info] > git status -z -uall [35ms]\n2025-07-21 15:37:27.204 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [48ms]\n2025-07-21 15:37:27.813 [info] > git check-ignore -v -z --stdin [11ms]\n2025-07-21 15:37:28.366 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [14ms]\n2025-07-21 15:37:28.379 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [42ms]\n2025-07-21 15:37:28.380 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [55ms]\n2025-07-21 15:37:28.394 [info] > git cat-file -s 5be4bf9c217c0d7d75a6e787d4cbcb3381b05ff0 [16ms]\n2025-07-21 15:37:28.395 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [29ms]\n2025-07-21 15:37:28.403 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [10ms]\n2025-07-21 15:37:28.470 [info] > git show --textconv HEAD:models/dynamics.py [47ms]\n2025-07-21 15:37:28.471 [info] > git show --textconv :utils/nn.py [32ms]\n2025-07-21 15:37:28.471 [info] > git show --textconv HEAD:utils/nn.py [17ms]\n2025-07-21 15:37:28.483 [info] > git show --textconv :models/dynamics.py [13ms]\n2025-07-21 15:37:29.347 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [19ms]\n2025-07-21 15:37:29.389 [info] > git status -z -uall [30ms]\n2025-07-21 15:37:29.404 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [29ms]\n2025-07-21 15:37:38.501 [info] > git -c user.useConfigOnly=true commit --quiet --allow-empty-message --file - [151ms]\n2025-07-21 15:37:38.522 [info] > git config --get commit.template [7ms]\n2025-07-21 15:37:38.553 [info] > git config --get commit.template [15ms]\n2025-07-21 15:37:38.562 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [10ms]\n2025-07-21 15:37:38.605 [info] > git status -z -uall [30ms]\n2025-07-21 15:37:38.643 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [54ms]\n2025-07-21 15:37:38.774 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [22ms]\n2025-07-21 15:37:38.774 [info] > git config --get commit.template [38ms]\n2025-07-21 15:37:38.813 [info] > git status -z -uall [28ms]\n2025-07-21 15:37:38.838 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [42ms]\n2025-07-21 15:37:39.001 [info] > git blame --root --incremental 02cdf9cfd8949692c5d95a7e4f864329228e4c50 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [70ms]\n2025-07-21 15:37:39.348 [info] > git log --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z --shortstat --diff-merges=first-parent -n50 --skip=0 --topo-order --decorate=full --stdin [628ms]\n2025-07-21 15:37:39.847 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [13ms]\n2025-07-21 15:37:39.848 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [25ms]\n2025-07-21 15:37:39.860 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [13ms]\n2025-07-21 15:37:39.882 [info] > git cat-file -s 2f308d4205bf623974a1422521d5c21578ab9e33 [23ms]\n2025-07-21 15:37:39.896 [info] > git show --textconv HEAD:utils/nn.py [14ms]\n2025-07-21 15:37:39.909 [info] > git show --textconv HEAD:utils/nn.py [14ms]\n2025-07-21 15:37:39.918 [info] > git show --textconv :models/dynamics.py [9ms]\n2025-07-21 15:37:40.838 [info] > git config --get commit.template [16ms]\n2025-07-21 15:37:40.846 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [10ms]\n2025-07-21 15:37:40.886 [info] > git status -z -uall [27ms]\n2025-07-21 15:37:40.930 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [57ms]\n2025-07-21 15:37:44.406 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 15:37:44.419 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [14ms]\n2025-07-21 15:37:44.429 [info] > git cat-file -s 06bdeecdc51e91f229101ac3f172615b480a1b51 [11ms]\n2025-07-21 15:37:44.481 [info] > git show --textconv HEAD:utils/nn.py [23ms]\n2025-07-21 15:37:44.492 [info] > git show --textconv :models/dynamics.py [11ms]\n2025-07-21 15:37:45.969 [info] > git config --get commit.template [15ms]\n2025-07-21 15:37:45.979 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [11ms]\n2025-07-21 15:37:46.016 [info] > git status -z -uall [25ms]\n2025-07-21 15:37:46.051 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [48ms]\n2025-07-21 15:37:46.461 [info] > git show --textconv HEAD:models/dynamics.py [16ms]\n2025-07-21 15:37:46.472 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [13ms]\n2025-07-21 15:37:51.663 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z MERGE_HEAD [16ms]\n2025-07-21 15:37:51.663 [info] fatal: ambiguous argument 'MERGE_HEAD': unknown revision or path not in the working tree.\nUse '--' to separate paths from revisions, like this:\n'git [...] -- [...]'\n2025-07-21 15:37:51.673 [info] > git diff --name-status -z --diff-filter=ADMR MERGE_HEAD...HEAD [10ms]\n2025-07-21 15:37:51.673 [info] fatal: ambiguous argument 'MERGE_HEAD...HEAD': unknown revision or path not in the working tree.\nUse '--' to separate paths from revisions, like this:\n'git [...] -- [...]'\n2025-07-21 15:37:51.698 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [67ms]\n2025-07-21 15:37:51.844 [info] > git show --textconv :1:utils/nn.py [84ms]\n2025-07-21 15:37:51.845 [info] > git show --textconv :3:utils/nn.py [69ms]\n2025-07-21 15:37:51.845 [info] > git show --textconv :2:utils/nn.py [54ms]\n2025-07-21 15:37:51.845 [info] > git ls-tree -l :3 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [20ms]\n2025-07-21 15:37:51.845 [info] fatal: Not a valid object name :3\n2025-07-21 15:37:51.846 [info] > git ls-tree -l :1 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [38ms]\n2025-07-21 15:37:51.846 [info] fatal: Not a valid object name :1\n2025-07-21 15:37:51.848 [info] > git ls-tree -l :2 -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [11ms]\n2025-07-21 15:37:51.848 [info] fatal: Not a valid object name :2\n2025-07-21 15:37:54.034 [info] > git config --get commit.template [14ms]\n2025-07-21 15:37:54.058 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [13ms]\n2025-07-21 15:37:54.097 [info] > git status -z -uall [27ms]\n2025-07-21 15:37:54.134 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [49ms]\n2025-07-21 15:38:01.917 [info] > git config --get commit.template [16ms]\n2025-07-21 15:38:01.929 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:38:01.972 [info] > git status -z -uall [30ms]\n2025-07-21 15:38:02.021 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [63ms]\n2025-07-21 15:38:49.033 [info] > git config --get commit.template [20ms]\n2025-07-21 15:38:49.049 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [17ms]\n2025-07-21 15:38:49.096 [info] > git status -z -uall [29ms]\n2025-07-21 15:38:49.127 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [41ms]\n2025-07-21 15:38:56.196 [info] > git config --get commit.template [21ms]\n2025-07-21 15:38:56.200 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [11ms]\n2025-07-21 15:38:56.244 [info] > git status -z -uall [31ms]\n2025-07-21 15:38:56.274 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [47ms]\n2025-07-21 15:39:01.315 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:01.328 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [15ms]\n2025-07-21 15:39:01.372 [info] > git status -z -uall [31ms]\n2025-07-21 15:39:01.405 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [50ms]\n2025-07-21 15:39:07.693 [info] > git config --get commit.template [17ms]\n2025-07-21 15:39:07.708 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:39:07.752 [info] > git status -z -uall [31ms]\n2025-07-21 15:39:07.787 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [52ms]\n2025-07-21 15:39:09.393 [info] > git add -A -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [30ms]\n2025-07-21 15:39:09.421 [info] > git config --get commit.template [12ms]\n2025-07-21 15:39:09.434 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:39:09.477 [info] > git status -z -uall [28ms]\n2025-07-21 15:39:09.505 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [43ms]\n2025-07-21 15:39:09.917 [info] > git show --textconv :utils/nn.py [43ms]\n2025-07-21 15:39:09.917 [info] > git show --textconv HEAD:utils/nn.py [30ms]\n2025-07-21 15:39:09.917 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [17ms]\n2025-07-21 15:39:09.934 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [19ms]\n2025-07-21 15:39:09.944 [info] > git cat-file -s f820a522b81e3512aa3e021f09f975c3c56a7ce3 [12ms]\n2025-07-21 15:39:10.701 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [18ms]\n2025-07-21 15:39:10.715 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [53ms]\n2025-07-21 15:39:10.716 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [69ms]\n2025-07-21 15:39:10.732 [info] > git cat-file -s f820a522b81e3512aa3e021f09f975c3c56a7ce3 [17ms]\n2025-07-21 15:39:10.733 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [35ms]\n2025-07-21 15:39:10.741 [info] > git cat-file -s 06bdeecdc51e91f229101ac3f172615b480a1b51 [11ms]\n2025-07-21 15:39:10.799 [info] > git show --textconv HEAD:models/dynamics.py [45ms]\n2025-07-21 15:39:10.799 [info] > git show --textconv :utils/nn.py [32ms]\n2025-07-21 15:39:10.799 [info] > git show --textconv HEAD:utils/nn.py [19ms]\n2025-07-21 15:39:10.807 [info] > git show --textconv :models/dynamics.py [12ms]\n2025-07-21 15:39:12.825 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:12.838 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:39:12.880 [info] > git status -z -uall [30ms]\n2025-07-21 15:39:12.912 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [48ms]\n2025-07-21 15:39:21.246 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:21.259 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [14ms]\n2025-07-21 15:39:21.301 [info] > git status -z -uall [30ms]\n2025-07-21 15:39:21.339 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [52ms]\n2025-07-21 15:39:30.686 [info] > git check-ignore -v -z --stdin [19ms]\n2025-07-21 15:39:37.090 [info] > git show --textconv :utils/nn.py [18ms]\n2025-07-21 15:39:37.096 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [9ms]\n2025-07-21 15:39:37.120 [info] > git cat-file -s f820a522b81e3512aa3e021f09f975c3c56a7ce3 [11ms]\n2025-07-21 15:39:37.401 [info] > git show --textconv HEAD:utils/nn.py [18ms]\n2025-07-21 15:39:37.416 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [16ms]\n2025-07-21 15:39:44.553 [info] > git hash-object --stdin -w --path utils/nn.py [14ms]\n2025-07-21 15:39:44.647 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [77ms]\n2025-07-21 15:39:44.673 [info] > git ls-tree -l HEAD -- utils/nn.py [12ms]\n2025-07-21 15:39:44.697 [info] > git update-index --cacheinfo 100644 5f601738b438d65f4ac4a20303329aef00d56c99\n utils/nn.py [12ms]\n2025-07-21 15:39:44.697 [info] Ignoring path \n2025-07-21 15:39:44.728 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:44.744 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [17ms]\n2025-07-21 15:39:44.786 [info] > git status -z -uall [29ms]\n2025-07-21 15:39:44.821 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [47ms]\n2025-07-21 15:39:44.854 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [9ms]\n2025-07-21 15:39:44.880 [info] > git cat-file -s 5f601738b438d65f4ac4a20303329aef00d56c99 [10ms]\n2025-07-21 15:39:44.945 [info] > git show --textconv :utils/nn.py [24ms]\n2025-07-21 15:39:45.860 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:45.874 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:39:45.916 [info] > git status -z -uall [29ms]\n2025-07-21 15:39:45.947 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [46ms]\n2025-07-21 15:39:46.027 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [16ms]\n2025-07-21 15:39:46.039 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [46ms]\n2025-07-21 15:39:46.040 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [59ms]\n2025-07-21 15:39:46.081 [info] > git cat-file -s 06bdeecdc51e91f229101ac3f172615b480a1b51 [30ms]\n2025-07-21 15:39:46.082 [info] > git cat-file -s 5f601738b438d65f4ac4a20303329aef00d56c99 [43ms]\n2025-07-21 15:39:46.082 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [59ms]\n2025-07-21 15:39:46.091 [info] > git show --textconv HEAD:models/dynamics.py [10ms]\n2025-07-21 15:39:46.140 [info] > git show --textconv :models/dynamics.py [28ms]\n2025-07-21 15:39:46.140 [info] > git show --textconv :utils/nn.py [16ms]\n2025-07-21 15:39:46.152 [info] > git show --textconv HEAD:utils/nn.py [13ms]\n2025-07-21 15:39:46.622 [info] > git hash-object --stdin -w --path utils/nn.py [18ms]\n2025-07-21 15:39:46.684 [info] > git show -s --decorate=full --shortstat --format=%H%n%aN%n%aE%n%at%n%ct%n%P%n%D%n%B -z HEAD [51ms]\n2025-07-21 15:39:46.708 [info] > git ls-tree -l HEAD -- utils/nn.py [12ms]\n2025-07-21 15:39:46.730 [info] > git update-index --cacheinfo 100644 77fb135e5f05d81b3475e8593702978430968de3\n utils/nn.py [12ms]\n2025-07-21 15:39:46.730 [info] Ignoring path \n2025-07-21 15:39:46.758 [info] > git config --get commit.template [13ms]\n2025-07-21 15:39:46.779 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [22ms]\n2025-07-21 15:39:46.809 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [31ms]\n2025-07-21 15:39:46.826 [info] > git status -z -uall [35ms]\n2025-07-21 15:39:46.832 [info] > git cat-file -s 77fb135e5f05d81b3475e8593702978430968de3 [8ms]\n2025-07-21 15:39:46.862 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [55ms]\n2025-07-21 15:39:46.873 [info] > git show --textconv :utils/nn.py [12ms]\n2025-07-21 15:39:47.997 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [14ms]\n2025-07-21 15:39:48.012 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [41ms]\n2025-07-21 15:39:48.012 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [54ms]\n2025-07-21 15:39:48.030 [info] > git cat-file -s 77fb135e5f05d81b3475e8593702978430968de3 [19ms]\n2025-07-21 15:39:48.031 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [36ms]\n2025-07-21 15:39:48.037 [info] > git cat-file -s 06bdeecdc51e91f229101ac3f172615b480a1b51 [10ms]\n2025-07-21 15:39:48.097 [info] > git show --textconv HEAD:models/dynamics.py [40ms]\n2025-07-21 15:39:48.097 [info] > git show --textconv :utils/nn.py [30ms]\n2025-07-21 15:39:48.097 [info] > git show --textconv HEAD:utils/nn.py [18ms]\n2025-07-21 15:39:48.108 [info] > git show --textconv :models/dynamics.py [12ms]\n2025-07-21 15:39:50.984 [info] > git config --get commit.template [15ms]\n2025-07-21 15:39:51.000 [info] > git for-each-ref --format=%(refname)%00%(upstream:short)%00%(objectname)%00%(upstream:track)%00%(upstream:remotename)%00%(upstream:remoteref) refs/heads/new-arch-sampling refs/remotes/new-arch-sampling [16ms]\n2025-07-21 15:39:51.042 [info] > git status -z -uall [29ms]\n2025-07-21 15:39:51.058 [info] > git for-each-ref --sort -committerdate --format %(refname) %(objectname) %(*objectname) [30ms]\n2025-07-21 15:39:52.982 [info] > git show --textconv HEAD:utils/nn.py [48ms]\n2025-07-21 15:39:52.982 [info] > git show --textconv :utils/nn.py [32ms]\n2025-07-21 15:39:52.983 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [17ms]\n2025-07-21 15:39:52.990 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [11ms]\n2025-07-21 15:39:53.012 [info] > git cat-file -s 77fb135e5f05d81b3475e8593702978430968de3 [10ms]\n2025-07-21 15:39:53.508 [info] > git check-ignore -v -z --stdin [10ms]\n2025-07-21 15:40:14.025 [info] > git rev-parse --show-toplevel [9ms]\n2025-07-21 15:40:14.025 [info] fatal: this operation must be run in a work tree\n2025-07-21 15:40:15.002 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [16ms]\n2025-07-21 15:40:15.042 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py [40ms]\n2025-07-21 15:40:15.043 [info] > git ls-tree -l HEAD -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [15ms]\n2025-07-21 15:40:15.043 [info] > git cat-file -s 06bdeecdc51e91f229101ac3f172615b480a1b51 [28ms]\n2025-07-21 15:40:15.051 [info] > git ls-files --stage -- /home/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py [9ms]\n2025-07-21 15:40:15.079 [info] > git cat-file -s 77fb135e5f05d81b3475e8593702978430968de3 [15ms]\n2025-07-21 15:40:15.091 [info] > git show --textconv HEAD:models/dynamics.py [13ms]\n2025-07-21 15:40:15.153 [info] > git show --textconv HEAD:utils/nn.py [27ms]\n2025-07-21 15:40:15.153 [info] > git show --textconv :models/dynamics.py [16ms]\n2025-07-21 15:40:15.165 [info] > git show --textconv :utils/nn.py [14ms]\n",log,tab +1845,4589057,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",0,0,"",git-commit,tab +1846,4589687,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",470,0,"",git-commit,selection_mouse +1847,4589699,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",469,0,"",git-commit,selection_command +1848,4595278,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",0,0,"",git-commit,selection_mouse +1849,4596887,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",0,0,"m",git-commit,content +1850,4596888,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",1,0,"",git-commit,selection_keyboard +1851,4596969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",1,0,"e",git-commit,content +1852,4596970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",2,0,"",git-commit,selection_keyboard +1853,4597082,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",2,0,"r",git-commit,content +1854,4597083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",3,0,"",git-commit,selection_keyboard +1855,4597137,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",3,0,"g",git-commit,content +1856,4597138,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",4,0,"",git-commit,selection_keyboard +1857,4597250,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",4,0,"e",git-commit,content +1858,4597251,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",5,0,"",git-commit,selection_keyboard +1859,4597337,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",5,0,"d",git-commit,content +1860,4597338,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",6,0,"",git-commit,selection_keyboard +1861,4597440,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",6,0," ",git-commit,content +1862,4597440,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",7,0,"",git-commit,selection_keyboard +1863,4598191,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",7,0,"m",git-commit,content +1864,4598192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",8,0,"",git-commit,selection_keyboard +1865,4598285,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",8,0,"a",git-commit,content +1866,4598286,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",9,0,"",git-commit,selection_keyboard +1867,4598417,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",9,0,"i",git-commit,content +1868,4598418,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",10,0,"",git-commit,selection_keyboard +1869,4598420,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",10,0,"n",git-commit,content +1870,4598420,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",11,0,"",git-commit,selection_keyboard +1871,4598525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",11,0," ",git-commit,content +1872,4598526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",12,0,"",git-commit,selection_keyboard +1873,4598738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",12,0,"i",git-commit,content +1874,4598739,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",13,0,"",git-commit,selection_keyboard +1875,4598803,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",13,0,"n",git-commit,content +1876,4598803,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",14,0,"",git-commit,selection_keyboard +1877,4599042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",14,0,"o",git-commit,content +1878,4599043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",15,0,"",git-commit,selection_keyboard +1879,4599353,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",14,1,"",git-commit,content +1880,4599467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",14,0,"t",git-commit,content +1881,4599468,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",15,0,"",git-commit,selection_keyboard +1882,4599525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",15,0,"o",git-commit,content +1883,4599525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",16,0,"",git-commit,selection_keyboard +1884,4599615,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",16,0," ",git-commit,content +1885,4599616,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",17,0,"",git-commit,selection_keyboard +1886,4599793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",17,0,"i",git-commit,content +1887,4599794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",18,0,"",git-commit,selection_keyboard +1888,4600252,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",17,1,"",git-commit,content +1889,4600970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",17,0,"s",git-commit,content +1890,4600971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",18,0,"",git-commit,selection_keyboard +1891,4601140,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",18,0,"a",git-commit,content +1892,4601140,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",19,0,"",git-commit,selection_keyboard +1893,4601209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",19,0,"m",git-commit,content +1894,4601209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",20,0,"",git-commit,selection_keyboard +1895,4601403,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",20,0,"p",git-commit,content +1896,4601404,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",21,0,"",git-commit,selection_keyboard +1897,4601457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",21,0,"l",git-commit,content +1898,4601458,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",22,0,"",git-commit,selection_keyboard +1899,4601690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",22,0,"i",git-commit,content +1900,4601690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",23,0,"",git-commit,selection_keyboard +1901,4601792,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",23,0,"n",git-commit,content +1902,4601793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",24,0,"",git-commit,selection_keyboard +1903,4601991,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",24,0,"g",git-commit,content +1904,4601992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",25,0,"",git-commit,selection_keyboard +1905,4602243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",25,0," ",git-commit,content +1906,4602243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",26,0,"",git-commit,selection_keyboard +1907,4603485,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",25,1,"",git-commit,content +1908,4609182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.git/COMMIT_EDITMSG",555,0,"",git-commit,selection_mouse +1909,4623223,"TERMINAL",0,0,"srun",,terminal_focus +1910,4630139,"TERMINAL",0,0,"bash",,terminal_focus +1911,4630629,"TERMINAL",0,0,"srun",,terminal_focus +1912,4672524,"TERMINAL",0,0,"bash",,terminal_focus +1913,4675394,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +1914,4675905,"TERMINAL",0,0,"ls",,terminal_command +1915,4675931,"TERMINAL",0,0,"]633;E;2025-07-21 15:43:42 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;Ccheckpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +1916,4680376,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # videos = np.load(""overfit_dir/corner_8repl.npy"")\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +1917,4684822,"models/dynamics.py",0,0,"",python,tab +1918,4685029,"Untitled-1",0,0,"",plaintext,tab +1919,4686446,"models/dynamics.py",0,0,"",python,tab +1920,4690438,"models/dynamics.py",0,0,"",python,tab +1921,4692127,"genie.py",0,0,"",python,tab +1922,4694492,"genie.py",0,0,"",python,tab +1923,4695696,"sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\n# grain_iterator = _get_dataloader_iterator()\n# video_batch = next(grain_iterator)\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n\nvideo_batch = video_batch.astype(args.dtype) #/ 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +1924,4699054,"sample.py",0,0,"",python,tab +1925,4699735,"sample.py",4515,0,"",python,selection_mouse +1926,4699736,"sample.py",4514,0,"",python,selection_command +1927,4700486,"sample.py",4649,0,"",python,selection_mouse +1928,4700497,"sample.py",4648,0,"",python,selection_command +1929,4705464,"TERMINAL",0,0,"srun",,terminal_focus +1930,4708709,"TERMINAL",0,0,"bash",,terminal_focus +1931,4711608,"TERMINAL",0,0,"queue",,terminal_command +1932,4711657,"TERMINAL",0,0,"]633;E;2025-07-21 15:44:18 queue;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C",,terminal_output +1933,4711720,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 15:44:18 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365091 accelerat interact tum_cte0 R 1:16:53\t 1 hkn0707",,terminal_output +1934,4712722,"TERMINAL",0,0,"94",,terminal_output +1935,4713485,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1936,4715141,"TERMINAL",0,0,"idling",,terminal_command +1937,4715206,"TERMINAL",0,0,"]633;E;2025-07-21 15:44:21 idling;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:44:21 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 25 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1938,4716256,"TERMINAL",0,0,"2",,terminal_output +1939,4717280,"TERMINAL",0,0,"3",,terminal_output +1940,4717415,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1941,4720689,"TERMINAL",0,0,"n --overlap --jobid=3333584 --pty /bin/bash",,terminal_command +1942,4720705,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ae54d867-0567-4fb2-95c9-86b7932c267e]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D",,terminal_output +1943,4735598,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +1944,4735656,"TERMINAL",0,0,"]633;E;2025-07-21 15:44:42 salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;ae54d867-0567-4fb2-95c9-86b7932c267e]633;Csalloc: Pending job allocation 3365333\r\nsalloc: job 3365333 queued and waiting for resources\r\n",,terminal_output +1945,4743424,"TERMINAL",0,0,"srun",,terminal_focus +1946,4745334,"TERMINAL",0,0,"salloc",,terminal_focus +1947,4759071,"TERMINAL",0,0,"",,terminal_focus +1948,4761173,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/bin/activate",,terminal_command +1949,4761180,"TERMINAL",0,0,"]633;E;2025-07-21 15:45:07 source /home/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/bin/activate;2d5a2855-c93a-4fd7-924c-9ba734144a47]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1950,4762451,"TERMINAL",0,0,"idling",,terminal_command +1951,4762536,"TERMINAL",0,0,"]633;E;2025-07-21 15:45:09 idling;2d5a2855-c93a-4fd7-924c-9ba734144a47]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:45:09 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 26 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1952,4763559,"TERMINAL",0,0,"10",,terminal_output +1953,4764690,"TERMINAL",0,0,"1",,terminal_output +1954,4765655,"TERMINAL",0,0,"2",,terminal_output +1955,4766283,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1956,4773844,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +1957,4773866,"TERMINAL",0,0,"]633;E;2025-07-21 15:45:20 salloc --time=10:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;2d5a2855-c93a-4fd7-924c-9ba734144a47]633;Csalloc: error: Job submit/allocate failed: Requested time limit is invalid (missing or exceeds some limit)\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;1",,terminal_output +1958,4778310,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +1959,4778369,"TERMINAL",0,0,"]633;E;2025-07-21 15:45:24 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;2d5a2855-c93a-4fd7-924c-9ba734144a47]633;Csalloc: Pending job allocation 3365334\r\nsalloc: job 3365334 queued and waiting for resources\r\n",,terminal_output +1960,4781135,"TERMINAL",0,0,"bash",,terminal_focus +1961,4782120,"TERMINAL",0,0,"idling",,terminal_command +1962,4782170,"TERMINAL",0,0,"]633;E;2025-07-21 15:45:28 idling;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1963,4782233,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:45:28 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 26 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1964,4783329,"TERMINAL",0,0,"9",,terminal_output +1965,4784296,"TERMINAL",0,0,"30",,terminal_output +1966,4785072,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:45:31 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 26 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1967,4785125,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:45:31 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 26 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1968,4786196,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1990.localdomain: Mon Jul 21 15:45:32 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 26 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +1969,4786354,"TERMINAL",0,0,"salloc: job 3365334 has been allocated resources\r\nsalloc: Granted job allocation 3365334\r\n",,terminal_output +1970,4786479,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +1971,4787158,"TERMINAL",0,0,"3",,terminal_output +1972,4788245,"TERMINAL",0,0,"47",,terminal_output +1973,4789223,"TERMINAL",0,0,"5",,terminal_output +1974,4790294,"TERMINAL",0,0,"6",,terminal_output +1975,4790460,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +1976,4792031,"TERMINAL",0,0,"srun",,terminal_focus +1977,4794415,"TERMINAL",0,0,"salloc",,terminal_focus +1978,4801918,"TERMINAL",0,0,"srun",,terminal_focus +1979,4806261,"TERMINAL",0,0,"^C",,terminal_output +1980,4807415,"TERMINAL",0,0,"^C",,terminal_output +1981,4807611,"TERMINAL",0,0,"^C",,terminal_output +1982,4807761,"TERMINAL",0,0,"^C",,terminal_output +1983,4808526,"TERMINAL",0,0,"^C^C^C^C^C^C^C^C^C^C^C",,terminal_output +1984,4808613,"TERMINAL",0,0,"^C^C^C",,terminal_output +1985,4808668,"TERMINAL",0,0,"^C",,terminal_output +1986,4809359,"TERMINAL",0,0,"^C",,terminal_output +1987,4809561,"TERMINAL",0,0,"^C",,terminal_output +1988,4809759,"TERMINAL",0,0,"^C",,terminal_output +1989,4809867,"TERMINAL",0,0,"^C",,terminal_output +1990,4810048,"TERMINAL",0,0,"^C",,terminal_output +1991,4813527,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +1992,4813611,"TERMINAL",0,0,"bash",,terminal_focus +1993,4814352,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h[tum_cte0515@hkn0401 jafar]$ ",,terminal_output +1994,4814629,"TERMINAL",0,0,"queue",,terminal_command +1995,4814678,"TERMINAL",0,0,"]633;E;2025-07-21 15:46:01 queue;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +1996,4814736,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 15:46:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3365091 accelerat interact tum_cte0 R 1:18:36\t 1 hkn07073365334 dev_accel interact tum_cte0 R\t0:28\t 1 hkn0401",,terminal_output +1997,4815751,"TERMINAL",0,0,"279",,terminal_output +1998,4816792,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +1999,4820681,"TERMINAL",0,0,"pkill python",,terminal_command +2000,4820727,"TERMINAL",0,0,"]633;E;2025-07-21 15:46:07 pkill python;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +2001,4820802,"TERMINAL",0,0,"salloc: job 3365333 has been allocated resources\r\nsalloc: Granted job allocation 3365333\r\n",,terminal_output +2002,4820835,"TERMINAL",0,0,"pkill: killing pid 3772 failed: Operation not permitted\r\npkill: killing pid 2527842 failed: Operation not permitted\r\npkill: killing pid 2527939 failed: Operation not permitted\r\npkill: killing pid 2755770 failed: Operation not permitted\r\npkill: killing pid 2756101 failed: Operation not permitted\r\npkill: killing pid 2835162 failed: Operation not permitted\r\npkill: killing pid 3108349 failed: Operation not permitted\r\npkill: killing pid 3150321 failed: Operation not permitted\r\npkill: killing pid 3195858 failed: Operation not permitted\r\npkill: killing pid 3195984 failed: Operation not permitted\r\npkill: killing pid 3196126 failed: Operation not permitted\r\npkill: killing pid 3536484 failed: Operation not permitted\r\npkill: killing pid 3550436 failed: Operation not permitted\r\n",,terminal_output +2003,4820949,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +2004,4840766,"TERMINAL",0,0,"srun",,terminal_focus +2005,4842297,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\npython sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $array_records_dir\n\n",shellscript,tab +2006,4843928,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,0,"",shellscript,selection_mouse +2007,4844072,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,6,"python",shellscript,selection_mouse +2008,4844251,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,21,"python sample.py \\n ",shellscript,selection_mouse +2009,4844268,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,24,"python sample.py \\n -",shellscript,selection_mouse +2010,4844283,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,35,"python sample.py \\n --checkpoint",shellscript,selection_mouse +2011,4844338,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,71,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim",shellscript,selection_mouse +2012,4844339,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,72,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=",shellscript,selection_mouse +2013,4844339,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,75,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128",shellscript,selection_mouse +2014,4844342,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,77,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \",shellscript,selection_mouse +2015,4844398,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,103,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \",shellscript,selection_mouse +2016,4844458,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,128,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \",shellscript,selection_mouse +2017,4844675,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,146,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \",shellscript,selection_mouse +2018,4844774,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,180,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $array_records_dir",shellscript,selection_mouse +2019,4845061,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",774,0,"",shellscript,selection_mouse +2020,4845062,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",773,0,"",shellscript,selection_command +2021,4845427,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",757,17,"array_records_dir",shellscript,selection_mouse +2022,4845430,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",758,16,"rray_records_dir",shellscript,selection_command +2023,4845629,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",740,18,"\n --data_dir $a",shellscript,selection_mouse +2024,4845706,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",720,38," \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2025,4845729,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",704,54,"dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2026,4845783,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",678,80,"dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2027,4845873,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",657,101,"dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2028,4845931,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",656,102,"-dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2029,4845938,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",655,103,"--dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2030,4845992,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",654,104," --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2031,4845993,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",616,142," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2032,4846080,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",615,143," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2033,4846155,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",614,144," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2034,4846209,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",613,145," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2035,4846262,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,164,"python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $a",shellscript,selection_mouse +2036,4846702,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,18,"python sample.py \",shellscript,selection_command +2037,4849248,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",596,0,"",shellscript,selection_mouse +2038,4849837,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,0,"",shellscript,selection_mouse +2039,4853486,"TERMINAL",0,0,"salloc: Nodes hkn0509 are ready for job\r\n",,terminal_output +2040,4854327,"TERMINAL",0,0,"]0;tum_cte0515@hkn0509:~/Projects/jafar[?2004h[tum_cte0515@hkn0509 jafar]$ ",,terminal_output +2041,4868425,"TERMINAL",0,0,"srun",,terminal_focus +2042,4868503,"TERMINAL",0,0,"\r[tum_cte0515@hkn0509 jafar]$ ",,terminal_output +2043,4871175,"TERMINAL",0,0,"srun",,terminal_focus +2044,4874273,"TERMINAL",0,0,"srun",,terminal_focus +2045,4883435,"TERMINAL",0,0,"srun",,terminal_focus +2046,4884658,"TERMINAL",0,0,"so",,terminal_output +2047,4884769,"TERMINAL",0,0,"u",,terminal_output +2048,4884823,"TERMINAL",0,0,"r",,terminal_output +2049,4884998,"TERMINAL",0,0,"c",,terminal_output +2050,4885157,"TERMINAL",0,0,"e",,terminal_output +2051,4885218,"TERMINAL",0,0," ",,terminal_output +2052,4885280,"TERMINAL",0,0,".",,terminal_output +2053,4885406,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +2054,4885515,"TERMINAL",0,0,"env/",,terminal_output +2055,4885699,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2056,4885809,"TERMINAL",0,0,"in/",,terminal_output +2057,4886008,"TERMINAL",0,0,"[?25lac[?25h",,terminal_output +2058,4886255,"TERMINAL",0,0,"tivate",,terminal_output +2059,4886716,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0509:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0509 jafar]$ ",,terminal_output +2060,4887455,"TERMINAL",0,0,"[?25lqu[?25h[?25lu[?25h",,terminal_output +2061,4887635,"TERMINAL",0,0,"[?25leu[?25h",,terminal_output +2062,4887750,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2063,4887811,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0509.localdomain: Mon Jul 21 15:47:14 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 R\t1:07\t 1 hkn05093365091 accelerat interact tum_cte0 R 1:19:49\t 1 hkn07073365334 dev_accel interact tum_cte0 R\t1:41\t 1 hkn0401",,terminal_output +2064,4888810,"TERMINAL",0,0,"58502",,terminal_output +2065,4889826,"TERMINAL",0,0,"6913",,terminal_output +2066,4890847,"TERMINAL",0,0,"71024",,terminal_output +2067,4891872,"TERMINAL",0,0,"8135",,terminal_output +2068,4891991,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0509:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0509 jafar]$ ",,terminal_output +2069,4893905,"TERMINAL",0,0,"queue",,terminal_output +2070,4894647,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0509.localdomain: Mon Jul 21 15:47:21 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 R\t1:14\t 1 hkn05093365091 accelerat interact tum_cte0 R 1:19:56\t 1 hkn07073365334 dev_accel interact tum_cte0 R\t1:48\t 1 hkn0401",,terminal_output +2071,4895659,"TERMINAL",0,0,"2579",,terminal_output +2072,4896683,"TERMINAL",0,0,"36850",,terminal_output +2073,4897707,"TERMINAL",0,0,"4791",,terminal_output +2074,4898733,"TERMINAL",0,0,"5820:002",,terminal_output +2075,4899550,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0509:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0509 jafar]$ ",,terminal_output +2076,4900712,"TERMINAL",0,0,"srun",,terminal_focus +2077,4902045,"TERMINAL",0,0,"q",,terminal_output +2078,4902109,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2079,4902241,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +2080,4902396,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2081,4902485,"TERMINAL",0,0,"[?25l[?2004l\r[?25h[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Mon Jul 21 15:47:29 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 R\t1:22\t 1 hkn05093365091 accelerat interact tum_cte0 R 1:20:04\t 1 hkn07073365334 dev_accel interact tum_cte0 R\t1:56\t 1 hkn0401",,terminal_output +2082,4903481,"TERMINAL",0,0,"30357",,terminal_output +2083,4904500,"TERMINAL",0,0,"1468",,terminal_output +2084,4905606,"TERMINAL",0,0,"2579",,terminal_output +2085,4906531,"TERMINAL",0,0,"3682:00",,terminal_output +2086,4907640,"TERMINAL",0,0,"4791",,terminal_output +2087,4908010,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h[tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2088,4909575,"TERMINAL",0,0,"[?25lso[?25h",,terminal_output +2089,4909636,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +2090,4909701,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2091,4909805,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2092,4909911,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2093,4910065,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2094,4910127,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2095,4910299,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +2096,4910864,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2097,4910926,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +2098,4911061,"TERMINAL",0,0,"env/",,terminal_output +2099,4911211,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2100,4911485,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2101,4912801,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2102,4912903,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2103,4912965,"TERMINAL",0,0,"n/",,terminal_output +2104,4913232,"TERMINAL",0,0,"",,terminal_output +2105,4913745,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2106,4913908,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2107,4914025,"TERMINAL",0,0,"tivate",,terminal_output +2108,4914399,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2109,4915764,"TERMINAL",0,0,"salloc",,terminal_focus +2110,4919083,"genie.py",0,0,"",python,tab +2111,4921808,"genie.py",5844,0,"",python,selection_mouse +2112,4922494,"genie.py",5733,0,"",python,selection_mouse +2113,4922495,"genie.py",5732,0,"",python,selection_command +2114,4923354,"genie.py",5731,0,"",python,selection_command +2115,4923476,"genie.py",5730,0,"",python,selection_command +2116,4923605,"genie.py",5729,0,"",python,selection_command +2117,4923749,"genie.py",5728,0,"",python,selection_command +2118,4924064,"genie.py",5730,0,"",python,selection_command +2119,4925465,"genie.py",5730,1,"",python,content +2120,4926658,"genie.py",5729,0,"",python,selection_command +2121,4926777,"genie.py",5728,0,"",python,selection_command +2122,4927259,"genie.py",5728,2,"",python,content +2123,4929792,"genie.py",5728,0,"5",python,content +2124,4929793,"genie.py",5729,0,"",python,selection_keyboard +2125,4929803,"genie.py",5729,0,"0",python,content +2126,4929804,"genie.py",5730,0,"",python,selection_keyboard +2127,4930199,"genie.py",5729,0,"",python,selection_command +2128,4930379,"genie.py",5761,0,"",python,selection_command +2129,4930489,"genie.py",5793,0,"",python,selection_command +2130,4930855,"genie.py",5761,0,"",python,selection_command +2131,4931039,"genie.py",5729,0,"",python,selection_command +2132,4931573,"genie.py",5732,0,"\n ",python,content +2133,4932118,"genie.py",5749,0,"j",python,content +2134,4932120,"genie.py",5750,0,"",python,selection_keyboard +2135,4932175,"genie.py",5750,0,"a",python,content +2136,4932178,"genie.py",5751,0,"",python,selection_keyboard +2137,4932371,"genie.py",5751,0,"x",python,content +2138,4932373,"genie.py",5752,0,"",python,selection_keyboard +2139,4932446,"genie.py",5752,0,".",python,content +2140,4932448,"genie.py",5753,0,"",python,selection_keyboard +2141,4932657,"genie.py",5753,0,"d",python,content +2142,4932658,"genie.py",5754,0,"",python,selection_keyboard +2143,4932920,"genie.py",5754,0,"e",python,content +2144,4932921,"genie.py",5755,0,"",python,selection_keyboard +2145,4933091,"genie.py",5755,0,"b",python,content +2146,4933091,"genie.py",5756,0,"",python,selection_keyboard +2147,4933361,"genie.py",5753,3,"debug",python,content +2148,4933613,"genie.py",5758,0,".",python,content +2149,4933615,"genie.py",5759,0,"",python,selection_keyboard +2150,4933911,"genie.py",5759,0,"b",python,content +2151,4933912,"genie.py",5760,0,"",python,selection_keyboard +2152,4934011,"genie.py",5760,0,"r",python,content +2153,4934013,"genie.py",5761,0,"",python,selection_keyboard +2154,4934174,"genie.py",5761,0,"e",python,content +2155,4934176,"genie.py",5762,0,"",python,selection_keyboard +2156,4935292,"genie.py",5759,3,"breakpoint",python,content +2157,4935872,"genie.py",5769,0,"()",python,content +2158,4935873,"genie.py",5770,0,"",python,selection_keyboard +2159,4936026,"genie.py",5770,0,"""""",python,content +2160,4936027,"genie.py",5771,0,"",python,selection_keyboard +2161,4938795,"genie.py",5771,0,"S",python,content +2162,4938796,"genie.py",5772,0,"",python,selection_keyboard +2163,4938957,"genie.py",5772,0,"a",python,content +2164,4938958,"genie.py",5773,0,"",python,selection_keyboard +2165,4939063,"genie.py",5773,0,"m",python,content +2166,4939064,"genie.py",5774,0,"",python,selection_keyboard +2167,4939236,"genie.py",5774,0,"p",python,content +2168,4939237,"genie.py",5775,0,"",python,selection_keyboard +2169,4939306,"genie.py",5775,0,"l",python,content +2170,4939308,"genie.py",5776,0,"",python,selection_keyboard +2171,4939528,"genie.py",5776,0,"i",python,content +2172,4939530,"genie.py",5777,0,"",python,selection_keyboard +2173,4939601,"genie.py",5777,0,"n",python,content +2174,4939603,"genie.py",5778,0,"",python,selection_keyboard +2175,4939661,"genie.py",5778,0,"g",python,content +2176,4939662,"genie.py",5779,0,"",python,selection_keyboard +2177,4939787,"genie.py",5779,0," ",python,content +2178,4939788,"genie.py",5780,0,"",python,selection_keyboard +2179,4942917,"genie.py",5759,10,"",python,content +2180,4943686,"genie.py",5759,0,"p",python,content +2181,4943687,"genie.py",5760,0,"",python,selection_keyboard +2182,4943756,"genie.py",5760,0,"r",python,content +2183,4943757,"genie.py",5761,0,"",python,selection_keyboard +2184,4943865,"genie.py",5761,0,"i",python,content +2185,4943866,"genie.py",5762,0,"",python,selection_keyboard +2186,4943918,"genie.py",5762,0,"n",python,content +2187,4943919,"genie.py",5763,0,"",python,selection_keyboard +2188,4943956,"genie.py",5763,0,"t",python,content +2189,4943957,"genie.py",5764,0,"",python,selection_keyboard +2190,4945218,"genie.py",5776,0,"",python,selection_command +2191,4945384,"genie.py",5775,0,"",python,selection_command +2192,4947275,"genie.py",5775,0,"t",python,content +2193,4947276,"genie.py",5776,0,"",python,selection_keyboard +2194,4947384,"genie.py",5776,0,"o",python,content +2195,4947385,"genie.py",5777,0,"",python,selection_keyboard +2196,4947458,"genie.py",5777,0,"k",python,content +2197,4947459,"genie.py",5778,0,"",python,selection_keyboard +2198,4947558,"genie.py",5778,0,"e",python,content +2199,4947559,"genie.py",5779,0,"",python,selection_keyboard +2200,4947669,"genie.py",5779,0,"n",python,content +2201,4947670,"genie.py",5780,0,"",python,selection_keyboard +2202,4948960,"genie.py",5780,0," ",python,content +2203,4948961,"genie.py",5781,0,"",python,selection_keyboard +2204,4949275,"genie.py",5781,0,"{}",python,content +2205,4949276,"genie.py",5782,0,"",python,selection_keyboard +2206,4951267,"genie.py",5784,0,"",python,selection_command +2207,4951819,"genie.py",5784,0,",",python,content +2208,4951821,"genie.py",5785,0,"",python,selection_keyboard +2209,4951883,"genie.py",5785,0," ",python,content +2210,4951884,"genie.py",5786,0,"",python,selection_keyboard +2211,4953093,"genie.py",5786,0,"n",python,content +2212,4953094,"genie.py",5787,0,"",python,selection_keyboard +2213,4954120,"genie.py",5861,0,"",python,selection_mouse +2214,4954795,"genie.py",5870,0,"",python,selection_mouse +2215,4955352,"genie.py",5873,0,"",python,selection_mouse +2216,4955908,"genie.py",5788,0,"",python,selection_mouse +2217,4957254,"genie.py",5786,0,"",python,selection_mouse +2218,4957819,"genie.py",5787,0,"",python,selection_mouse +2219,4958729,"genie.py",5781,0,"",python,selection_mouse +2220,4960745,"genie.py",5784,0,"",python,selection_mouse +2221,4961398,"genie.py",5783,0,"",python,selection_mouse +2222,4962884,"genie.py",5781,4,"{}"",",python,selection_mouse +2223,4963012,"genie.py",5733,56," jax.debug.print(""Sampling token {}"", n)\n",python,selection_mouse +2224,4963811,"genie.py",5749,0,"",python,selection_command +2225,4970593,"genie.py",5733,0,"",python,selection_command +2226,4973573,"genie.py",5733,0," jax.debug.print(""Sampling token {} from frame {}"", n, t)\n",python,content +2227,4973578,"genie.py",5806,56,"",python,content +2228,4982013,"TERMINAL",0,0,"srun",,terminal_focus +2229,4983340,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +2230,4983810,"TERMINAL",0,0,"queue",,terminal_output +2231,4984206,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +2232,4984490,"TERMINAL",0,0,"",,terminal_output +2233,4994268,"TERMINAL",0,0,"srun",,terminal_focus +2234,4997149,"TERMINAL",0,0,"srun",,terminal_focus +2235,4998281,"TERMINAL",0,0,"i",,terminal_output +2236,4998503,"TERMINAL",0,0,"[?25ld[?25h[?25ll[?25h",,terminal_output +2237,4998726,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2238,4998778,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2239,4998837,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +2240,4999672,"TERMINAL",0,0,"",,terminal_output +2241,5001019,"TERMINAL",0,0,"[?25lq[?25h[?25lu[?25h",,terminal_output +2242,5001199,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +2243,5001336,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2244,5001397,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +2245,5001456,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Mon Jul 21 15:49:08 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 R\t3:01\t 1 hkn05093365091 accelerat interact tum_cte0 R 1:21:43\t 1 hkn07073365334 dev_accel interact tum_cte0 R\t3:35\t 1 hkn0401",,terminal_output +2246,5002480,"TERMINAL",0,0,"9246",,terminal_output +2247,5003486,"TERMINAL",0,0,"10357",,terminal_output +2248,5004515,"TERMINAL",0,0,"1468",,terminal_output +2249,5005539,"TERMINAL",0,0,"2579",,terminal_output +2250,5006563,"TERMINAL",0,0,"36840",,terminal_output +2251,5007587,"TERMINAL",0,0,"4791",,terminal_output +2252,5008536,"TERMINAL",0,0,"58502",,terminal_output +2253,5009635,"TERMINAL",0,0,"6913",,terminal_output +2254,5010659,"TERMINAL",0,0,"71024",,terminal_output +2255,5011685,"TERMINAL",0,0,"8135",,terminal_output +2256,5012605,"TERMINAL",0,0,"9246",,terminal_output +2257,5013618,"TERMINAL",0,0,"20357",,terminal_output +2258,5014652,"TERMINAL",0,0,"1468",,terminal_output +2259,5015676,"TERMINAL",0,0,"2579",,terminal_output +2260,5016701,"TERMINAL",0,0,"36850",,terminal_output +2261,5017725,"TERMINAL",0,0,"4791",,terminal_output +2262,5017952,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2263,5022303,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +2264,5023131,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",550,0,"",shellscript,selection_mouse +2265,5023134,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",549,0,"",shellscript,selection_command +2266,5023803,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",592,0,"",shellscript,selection_mouse +2267,5023805,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",591,0,"",shellscript,selection_command +2268,5024365,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",671,0,"",shellscript,selection_mouse +2269,5024366,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",670,0,"",shellscript,selection_command +2270,5024998,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",740,0,"",shellscript,selection_mouse +2271,5024999,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",739,0,"",shellscript,selection_command +2272,5025566,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",740,0,"",shellscript,selection_mouse +2273,5025568,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",739,0,"",shellscript,selection_command +2274,5026069,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",740,0,"",shellscript,selection_mouse +2275,5026072,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",739,0,"",shellscript,selection_command +2276,5026610,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",774,0,"",shellscript,selection_mouse +2277,5026612,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",773,0,"",shellscript,selection_command +2278,5027372,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",592,0,"",shellscript,selection_mouse +2279,5027373,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",591,0,"",shellscript,selection_command +2280,5027977,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",612,0,"",shellscript,selection_mouse +2281,5027978,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",611,0,"",shellscript,selection_command +2282,5028524,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",593,0,"",shellscript,selection_mouse +2283,5029334,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",550,0,"",shellscript,selection_mouse +2284,5029335,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",549,0,"",shellscript,selection_command +2285,5029872,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",550,0,"",shellscript,selection_mouse +2286,5029873,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",549,0,"",shellscript,selection_command +2287,5030507,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",574,0,"",shellscript,selection_mouse +2288,5030508,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",573,0,"",shellscript,selection_command +2289,5031194,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",574,0,"",shellscript,selection_mouse +2290,5031195,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",573,0,"",shellscript,selection_command +2291,5031911,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",575,0,"",shellscript,selection_mouse +2292,5084962,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +2293,5086663,"TERMINAL",0,0,"s': source .venv/bin/activate\r",,terminal_output +2294,5086866,"TERMINAL",0,0,"[?25lsa': salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5\r[?25h",,terminal_output +2295,5087143,"TERMINAL",0,0,"[?25lsm': git add sample.py[?25h",,terminal_output +2296,5087389,"TERMINAL",0,0,"[?25lsp': git add sample.py[?25h",,terminal_output +2297,5087789,"TERMINAL",0,0,"[?25lsl': git add sample.py[?25h",,terminal_output +2298,5088024,"TERMINAL",0,0,"[?25ls\rfailed reverse-i-search)`sampli': git add sample.py[?25h",,terminal_output +2299,5088086,"TERMINAL",0,0,"[?25ls[1@n[?25h",,terminal_output +2300,5088310,"TERMINAL",0,0,"[?25ls[1@g[?25h",,terminal_output +2301,5088543,"TERMINAL",0,0,"[?25ls[1@.[?25h",,terminal_output +2302,5088931,"TERMINAL",0,0,"[?25ls[1@s[?25h",,terminal_output +2303,5089154,"TERMINAL",0,0,"[?25ls[1@h[?25h",,terminal_output +2304,5090018,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2305,5098427,"TERMINAL",0,0,"srun",,terminal_focus +2306,5099815,"TERMINAL",0,0,"srun",,terminal_focus +2307,5104493,"TERMINAL",0,0,"bash",,terminal_focus +2308,5105125,"TERMINAL",0,0,"ls",,terminal_command +2309,5105136,"TERMINAL",0,0,"]633;E;2025-07-21 15:50:51 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;Ccheckpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +2310,5106768,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +2311,5107006,"TERMINAL",0,0,"ls",,terminal_command +2312,5107056,"TERMINAL",0,0,"]633;E;2025-07-21 15:50:53 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +2313,5107213,"TERMINAL",0,0,"0000 3290392 3292329 3292337 3296540 3297577 3297727 3299258 3300672 3307618 3311672 3316022 lam train_dynamics_lr_schedule_wsd\r\n3290283 3290439 3292330 3292338 3296571 3297578 3299016 3299259 3301025 3307619 3313562 big-runs lam-1-action train_dyn_new_arch-bugfixed-spatial-shift\r\n3290284 3290440 3292331 3292339 3296573 3297582 3299062 3299272 3301026 3309662 3313563 checkpoints_alfred lam_ckpt_dir train_dyn_new_arch-bugfixed-temporal-shift\r\n3290295 3291405 3292332 3294600 3296574 3297586 3299063 3299579 3301027 3309663 3313564 coinrun lam_main_test train_dyn_yolorun_new_arch\r\n3290296 3292213 3292333 3294601 3296575 3297606 3299065 3300233 3301029 3309699 3313565 debug tokenizer train_lam_minecraft_overfit_sample\r\n3290366 3292221 3292334 3294602 3297569 3297671 3299066 3300290 3301030 3310436 3313570 dyn tokenizer_ckpt_dir train_tokenizer_batch_size_scaling_16_node\r\n3290367 3292258 3292335 3294603 3297575 3297693 3299068 3300658 3301031 3310437 3313571 dynamics_ckpt_dir train_dynamics_lr_schedule_const train_tokenizer_minecraft_overfit_sample\r\n3290391 3292328 3292336 3296502 3297576 3297706 3299069 3300663 3306801 3311671 3313572 interactive train_dynamics_lr_schedule_cos wrap\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0]633;P;Cwd=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +2314,5108932,"TERMINAL",0,0,"cd interactive/",,terminal_command +2315,5109269,"TERMINAL",0,0,"ls",,terminal_command +2316,5109318,"TERMINAL",0,0,"]633;E;2025-07-21 15:50:55 ls;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +2317,5109408,"TERMINAL",0,0,"3347289 3350418 3352994 3352996 3353884 3353924 3355596 3357147 3357893 3357894 3359231 3359232 3359275 3365091 3365095 shift-spatial shift-temporal\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive]633;D;0",,terminal_output +2318,5113214,"TERMINAL",0,0,"cd shift-spatial/",,terminal_command +2319,5115553,"TERMINAL",0,0,"pwd",,terminal_command +2320,5117783,"TERMINAL",0,0,"srun",,terminal_focus +2321,5118621,"TERMINAL",0,0,"s",,terminal_output +2322,5118683,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2323,5118789,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2324,5120279,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +2325,5122725,"TERMINAL",0,0,"srun",,terminal_focus +2326,5123552,"TERMINAL",0,0,"srun",,terminal_focus +2327,5124668,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",,terminal_output +2328,5125514,"TERMINAL",0,0,"[?25l\rslurm/jobs/mihir/horeka/yolo-runs/sampling.sh [?25h",,terminal_output +2329,5126590,"TERMINAL",0,0,"bash",,terminal_focus +2330,5128279,"TERMINAL",0,0,"srun",,terminal_focus +2331,5129209,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +2332,5129917,"TERMINAL",0,0,"[?25l\r/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\n[?2004l\r[?25h\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +2333,5130080,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2334,5143910,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 19, in \r\n from genie import Genie\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 203, in \r\n class STTransformer(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n",,terminal_output +2335,5143973,"TERMINAL",0,0," return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'use_flash_attention' follows default argument\r\n",,terminal_output +2336,5144091,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2337,5155932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(50):\n jax.debug.print(""Sampling token {} from frame {}"", n, t)\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +2338,5158915,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2118,0,"",python,selection_mouse +2339,5158916,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2117,0,"",python,selection_command +2340,5159043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2118,0,"",python,selection_mouse +2341,5159053,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2117,0,"",python,selection_command +2342,5159191,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2109,10," )\n",python,selection_mouse +2343,5159197,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2110,9," )\n",python,selection_command +2344,5159739,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2095,0,"",python,selection_mouse +2345,5159885,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2088,19,"use_flash_attention",python,selection_mouse +2346,5160024,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2051,58," use_flash_attention=self.use_flash_attention,\n",python,selection_mouse +2347,5163889,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2994,0,"",python,selection_mouse +2348,5163890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2993,0,"",python,selection_command +2349,5164724,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2676,0,"",python,selection_mouse +2350,5164863,"models/dynamics.py",0,0,"",python,tab +2351,5168406,"models/dynamics.py",2521,0,"",python,selection_mouse +2352,5168408,"models/dynamics.py",2520,0,"",python,selection_command +2353,5168571,"models/dynamics.py",2521,0,"",python,selection_mouse +2354,5168572,"models/dynamics.py",2520,0,"",python,selection_command +2355,5169478,"models/dynamics.py",2498,0,"",python,selection_mouse +2356,5169640,"models/dynamics.py",2492,12,"spacial_bert",python,selection_mouse +2357,5172060,"models/dynamics.py",388,0,"",python,selection_mouse +2358,5176752,"models/dynamics.py",2224,0,"",python,selection_mouse +2359,5177247,"models/dynamics.py",2215,0,"",python,selection_mouse +2360,5177630,"models/dynamics.py",2223,0,"\n use_flash_attention: bool",python,content +2361,5177631,"models/dynamics.py",2228,0,"",python,selection_command +2362,5180335,"models/dynamics.py",2530,0,"",python,selection_mouse +2363,5181174,"models/dynamics.py",2308,0,"",python,selection_mouse +2364,5181368,"utils/nn.py",0,0,"",python,tab +2365,5182809,"utils/nn.py",6478,0,"",python,selection_mouse +2366,5182954,"utils/nn.py",6472,12,"spatial_bert",python,selection_mouse +2367,5184241,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",0,0,"",python,tab +2368,5186792,"utils/nn.py",0,0,"",python,tab +2369,5192075,"utils/nn.py",6310,0,"",python,selection_mouse +2370,5198827,"models/dynamics.py",0,0,"",python,tab +2371,5200602,"models/dynamics.py",678,0,"",python,selection_mouse +2372,5216905,"models/dynamics.py",678,1,"t",python,content +2373,5217146,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,tab +2374,5217148,"models/dynamics.py",2522,7,"spatial",python,content +2375,5217182,"models/dynamics.py",0,3553,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,content +2376,5217183,"models/dynamics.py",2522,7,"spatial",python,content +2377,5218052,"diff.log",0,0,"diff --git a/genie.py b/genie.py\nindex 0e66676..8186d03 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -7,7 +7,7 @@ import flax.linen as nn\n from flax.training.train_state import TrainState\n import orbax.checkpoint as ocp\n \n-from models.dynamics import DynamicsMaskGIT\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\n from models.lam import LatentActionModel\n from models.tokenizer import TokenizerVQVAE\n \n@@ -38,6 +38,7 @@ class Genie(nn.Module):\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n+ use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n- self.dynamics = DynamicsMaskGIT(\n- model_dim=self.dyna_dim,\n- num_latents=self.num_patch_latents,\n- num_blocks=self.dyna_num_blocks,\n- num_heads=self.dyna_num_heads,\n- dropout=self.dropout,\n- mask_limit=self.mask_limit,\n- param_dtype=self.param_dtype,\n- dtype=self.dtype,\n- )\n+\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\ndiff --git a/models/dynamics.py b/models/dynamics.py\nindex 8b183dc..76af7a1 100644\n--- a/models/dynamics.py\n+++ b/models/dynamics.py\n@@ -28,6 +28,7 @@ class DynamicsMaskGIT(nn.Module):\n self.dropout,\n self.param_dtype,\n self.dtype,\n+ spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n@@ -58,3 +59,41 @@ class DynamicsMaskGIT(nn.Module):\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n+\n+\n+class DynamicsAutoregressive(nn.Module):\n+ """"""Autoregressive (causal) dynamics model""""""\n+\n+ model_dim: int\n+ num_latents: int\n+ num_blocks: int\n+ num_heads: int\n+ dropout: float\n+ param_dtype: jnp.dtype\n+ dtype: jnp.dtype\n+\n+ def setup(self):\n+ self.dynamics = STTransformer(\n+ self.model_dim,\n+ self.num_latents,\n+ self.num_blocks,\n+ self.num_heads,\n+ self.dropout,\n+ self.param_dtype,\n+ self.dtype,\n+ spacial_bert=False,\n+ )\n+ self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n+ self.action_up = nn.Dense(\n+ self.model_dim,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n+\n+ def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n+ vid_embed = self.patch_embed(batch[""video_tokens""])\n+ act_embed = self.action_up(batch[""latent_actions""])\n+ vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n+ logits = self.dynamics(vid_embed)\n+ mask = jnp.ones(vid_embed.shape[:-1])\n+ return dict(token_logits=logits, mask=mask)\n\ No newline at end of file\ndiff --git a/train_dynamics.py b/train_dynamics.py\ndeleted file mode 100644\nindex a8e6a2a..0000000\n--- a/train_dynamics.py\n+++ /dev/null\n@@ -1,373 +0,0 @@\n-from dataclasses import dataclass, field\n-import os\n-\n-import einops\n-from flax.training.train_state import TrainState\n-from jax.sharding import Mesh, PartitionSpec, NamedSharding\n-from jax.experimental.mesh_utils import create_device_mesh\n-import optax\n-import orbax.checkpoint as ocp\n-import numpy as np\n-import dm_pix as pix\n-import jax\n-import jax.numpy as jnp\n-import tyro\n-import wandb\n-import grain\n-\n-from genie import Genie, restore_genie_components\n-from utils.dataloader import get_dataloader\n-from utils.lr_utils import get_lr_schedule\n-from utils.parameter_utils import count_parameters_by_component\n-\n-@dataclass\n-class Args:\n- # Experiment\n- num_steps: int = 200_000\n- seed: int = 0\n- seq_len: int = 16\n- image_channels: int = 3\n- image_height: int = 90\n- image_width: int = 160\n- data_dir: str = """"\n- save_ckpt: bool = False\n- restore_ckpt: bool = False\n- # Optimization\n- batch_size: int = 36\n- init_lr: float = 0.0\n- max_lr: float = 3e-5\n- decay_end: float = 0.0\n- wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n- warmup_steps: int = 5000\n- lr_schedule : str = ""wsd"" # supported options: wsd, cos\n- # Tokenizer\n- tokenizer_dim: int = 512\n- latent_patch_dim: int = 32\n- num_patch_latents: int = 1024\n- patch_size: int = 4\n- tokenizer_num_blocks: int = 8\n- tokenizer_num_heads: int = 8\n- tokenizer_checkpoint: str = """"\n- # LAM\n- lam_dim: int = 512\n- latent_action_dim: int = 32\n- num_latent_actions: int = 6\n- lam_patch_size: int = 16\n- lam_num_blocks: int = 8\n- lam_num_heads: int = 8\n- lam_checkpoint: str = """"\n- # Dynamics\n- dyna_dim: int = 512\n- dyna_num_blocks: int = 12\n- dyna_num_heads: int = 8\n- dropout: float = 0.0\n- mask_limit: float = 0.5\n- param_dtype: jnp.dtype = jnp.float32\n- dtype: jnp.dtype = jnp.bfloat16\n- # Logging\n- log: bool = False\n- entity: str = """"\n- project: str = """"\n- name: str = ""train_dynamics""\n- tags: list[str] = field(default_factory=lambda: [""dynamics""])\n- log_interval: int = 5\n- log_image_interval: int = 250\n- ckpt_dir: str = """"\n- log_checkpoint_interval: int = 25000\n- log_checkpoint_keep_period: int = 20000\n- log_gradients: bool = False\n- wandb_id: str = """"\n-\n-\n-args = tyro.cli(Args)\n-\n-\n-def dynamics_loss_fn(params, state, inputs):\n- """"""Compute masked dynamics loss""""""\n- inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n- outputs = state.apply_fn(\n- params,\n- inputs,\n- training=True,\n- rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n- )\n- mask = outputs[""mask""]\n- outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n- ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n- outputs[""token_logits""], outputs[""video_tokens""]\n- )\n- ce_loss = (mask * ce_loss).sum() / mask.sum()\n- acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n- acc = (mask * acc).sum() / mask.sum()\n- select_probs = jax.nn.softmax(outputs[""token_logits""])\n- gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n- recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n- psnr = pix.psnr(gt, recon).mean() # type: ignore\n- ssim = pix.ssim(gt, recon).mean() # type: ignore\n- _, index_counts_lam = jnp.unique_counts(\n- jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n- )\n- _, index_counts_tokenizer = jnp.unique_counts(\n- jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n- )\n- codebook_usage_lam = (index_counts_lam != 0).mean()\n- codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n- metrics = dict(\n- cross_entropy_loss=ce_loss,\n- masked_token_accuracy=acc,\n- select_logit=outputs[""token_logits""].max(-1).mean(),\n- select_p=select_probs.max(-1).mean(),\n- entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n- psnr=psnr,\n- ssim=ssim,\n- codebook_usage_lam=codebook_usage_lam,\n- codebook_usage_tokenizer=codebook_usage_tokenizer,\n- )\n- return ce_loss, (outputs[""recon""], metrics)\n-\n-\n-@jax.jit\n-def train_step(state, inputs):\n- """"""Update state and compute metrics""""""\n- grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n- (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n- state = state.apply_gradients(grads=grads)\n- if args.log_gradients:\n- metrics[""gradients_std/""] = jax.tree.map(\n- lambda x: x.std(), grads[""params""][""dynamics""]\n- )\n- return state, loss, recon, metrics\n-\n-\n-if __name__ == ""__main__"":\n- jax.distributed.initialize()\n- num_devices = jax.device_count()\n- if num_devices == 0:\n- raise ValueError(""No JAX devices found."")\n- print(f""Running on {num_devices} devices."")\n-\n- if args.batch_size % num_devices != 0:\n- raise ValueError(\n- f""Global batch size {args.batch_size} must be divisible by ""\n- f""number of devices {num_devices}.""\n- )\n-\n- per_device_batch_size_for_init = args.batch_size // num_devices\n-\n- rng = jax.random.PRNGKey(args.seed)\n-\n- # --- Initialize model ---\n- genie = Genie(\n- # Tokenizer\n- in_dim=args.image_channels,\n- tokenizer_dim=args.tokenizer_dim,\n- latent_patch_dim=args.latent_patch_dim,\n- num_patch_latents=args.num_patch_latents,\n- patch_size=args.patch_size,\n- tokenizer_num_blocks=args.tokenizer_num_blocks,\n- tokenizer_num_heads=args.tokenizer_num_heads,\n- # LAM\n- lam_dim=args.lam_dim,\n- latent_action_dim=args.latent_action_dim,\n- num_latent_actions=args.num_latent_actions,\n- lam_patch_size=args.lam_patch_size,\n- lam_num_blocks=args.lam_num_blocks,\n- lam_num_heads=args.lam_num_heads,\n- lam_co_train=not args.lam_checkpoint,\n- # Dynamics\n- dyna_dim=args.dyna_dim,\n- dyna_num_blocks=args.dyna_num_blocks,\n- dyna_num_heads=args.dyna_num_heads,\n- dropout=args.dropout,\n- mask_limit=args.mask_limit,\n- param_dtype=args.param_dtype,\n- dtype=args.dtype,\n- )\n- rng, _rng = jax.random.split(rng)\n- image_shape = (args.image_height, args.image_width, args.image_channels)\n- dummy_inputs = dict(\n- videos=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len, *image_shape),\n- dtype=args.dtype,\n- ),\n- action=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n- ),\n- mask_rng=_rng,\n- )\n- rng, _rng = jax.random.split(rng)\n- init_params = genie.init(_rng, dummy_inputs)\n-\n- param_counts = count_parameters_by_component(init_params)\n-\n- if args.log and jax.process_index() == 0:\n- wandb_init_kwargs = {\n- ""entity"": args.entity,\n- ""project"": args.project,\n- ""name"": args.name,\n- ""tags"": args.tags,\n- ""group"": ""debug"",\n- ""config"": args,\n- }\n-\n- if args.wandb_id:\n- wandb_init_kwargs.update(\n- {\n- ""id"": args.wandb_id,\n- ""resume"": ""allow"",\n- }\n- )\n- wandb.init(**wandb_init_kwargs)\n-\n- wandb.config.update({""model_param_count"": param_counts})\n-\n- print(""Parameter counts:"")\n- print(param_counts)\n-\n- # --- Initialize optimizer ---\n- lr_schedule = get_lr_schedule(args.lr_schedule, \n- args.init_lr, \n- args.max_lr, \n- args.decay_end, \n- args.num_steps, \n- args.warmup_steps, \n- args.wsd_decay_steps)\n- tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n- train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n-\n- device_mesh_arr = create_device_mesh((num_devices,))\n- mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n-\n- replicated_sharding = NamedSharding(mesh, PartitionSpec())\n- videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n- train_state = jax.device_put(train_state, replicated_sharding)\n-\n- # --- Initialize checkpoint manager ---\n- step = 0\n- handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n-\n- checkpoint_options = ocp.CheckpointManagerOptions(\n- save_interval_steps=args.log_checkpoint_interval,\n- max_to_keep=3,\n- keep_period=args.log_checkpoint_keep_period,\n- step_format_fixed_length=6,\n- cleanup_tmp_directories=True,\n- )\n-\n- checkpoint_manager = ocp.CheckpointManager(\n- args.ckpt_dir,\n- options=checkpoint_options,\n- handler_registry=handler_registry,\n- )\n-\n- # --- Create DataLoaderIterator from dataloader ---\n- array_record_files = [\n- os.path.join(args.data_dir, x)\n- for x in os.listdir(args.data_dir)\n- if x.endswith("".array_record"")\n- ]\n- grain_dataloader = get_dataloader(\n- array_record_files,\n- args.seq_len,\n- # NOTE: We deliberately pass the global batch size\n- # The dataloader shards the dataset across all processes\n- args.batch_size,\n- *image_shape,\n- num_workers=8,\n- prefetch_buffer_size=1,\n- seed=args.seed,\n- )\n- initial_state = grain_dataloader._create_initial_state()\n- grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n-\n- # --- Restore checkpoint ---\n- if args.restore_ckpt:\n- # Restore full dynamics model\n- abstract_train_state = jax.tree_util.tree_map(\n- ocp.utils.to_shape_dtype_struct, train_state\n- )\n- restored = checkpoint_manager.restore(\n- checkpoint_manager.latest_step(),\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardRestore(abstract_train_state),\n- dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n- ),\n- )\n- train_state = restored[""model_state""]\n- grain_iterator = restored[""dataloader_state""]\n- step = checkpoint_manager.latest_step() or 0\n- print(f""Restored dataloader and model state from step {step}"")\n- else:\n- # Restore from pre-trained tokenizer (and LAM)\n- train_state = restore_genie_components(\n- train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n- )\n-\n- # --- TRAIN LOOP ---\n- dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n- while step < args.num_steps:\n- for videos in dataloader:\n- # --- Train step ---\n- rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n-\n- inputs = dict(\n- videos=videos,\n- rng=_rng,\n- dropout_rng=_rng_dropout,\n- mask_rng=_rng_mask,\n- )\n- train_state, loss, recon, metrics = train_step(train_state, inputs)\n- metrics[""lr""] = lr_schedule(step)\n- print(f""Step {step}, loss: {loss}"")\n- step += 1\n-\n- # --- Logging ---\n- if args.log:\n- if step % args.log_interval == 0 and jax.process_index() == 0:\n- wandb.log(\n- {\n- ""loss"": loss,\n- ""step"": step,\n- **metrics,\n- }\n- )\n- if step % args.log_image_interval == 0:\n- gt_seq = inputs[""videos""][0]\n- recon_seq = recon[0].clip(0, 1)\n- comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n- comparison_seq = einops.rearrange(\n- comparison_seq * 255, ""t h w c -> h (t w) c""\n- )\n- if jax.process_index() == 0:\n- log_images = dict(\n- image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n- recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n- true_vs_recon=wandb.Image(\n- np.asarray(comparison_seq.astype(np.uint8))\n- ),\n- )\n- wandb.log(log_images)\n- # --- Checkpointing ---\n- if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n- checkpoint_manager.save(\n- step,\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardSave(train_state),\n- dataloader_state=grain.checkpoint.CheckpointSave(\n- grain_iterator\n- ),\n- ),\n- )\n- print(f""Saved checkpoint at step {step}"")\n- if step >= args.num_steps:\n- break\n-\n- checkpoint_manager.close()\ndiff --git a/utils/nn.py b/utils/nn.py\nindex b7bec9f..3b64fa0 100644\n--- a/utils/nn.py\n+++ b/utils/nn.py\n@@ -26,6 +26,112 @@ class PositionalEncoding(nn.Module):\n x = x + self.pe[: x.shape[2]]\n return x\n \n+# class STBlock2(nn.Module):\n+ # dim: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.remat\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # --- Spatial attention ---\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+\n+ # # --- Temporal attention ---\n+ # x = x.swapaxes(1, 2)\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+ # x = x.swapaxes(1, 2)\n+\n+ # # --- Feedforward ---\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n+ # z = nn.Dense(\n+ # self.dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # z = nn.gelu(z)\n+ # x = x + z\n+\n+ # return x\n+\n+# class CausalTransformer(nn.Module):\n+ # model_dim: int\n+ # out_dim: int\n+ # num_blocks: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # Input projection and normalization\n+ # x = nn.Sequential(\n+ # [\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.Dense(self.model_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # ]\n+ # )(x)\n+ # # Causal transformer blocks\n+ # for _ in range(self.num_blocks):\n+ # x = STBlock2(\n+ # dim=self.model_dim,\n+ # num_heads=self.num_heads,\n+ # dropout=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+\n+ # # Output projection\n+ # x = nn.Dense(\n+ # self.out_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # return x # (B, T, E)\n+\n \n class STBlock(nn.Module):\n dim: int\n@@ -33,6 +139,7 @@ class STBlock(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.remat\n @nn.compact\n@@ -43,13 +150,14 @@ class STBlock(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n+ spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n- )(z)\n+ )(z, mask=spacial_mask)\n x = x + z\n \n # --- Temporal attention ---\n@@ -95,6 +203,7 @@ class STTransformer(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n@@ -121,6 +230,7 @@ class STTransformer(nn.Module):\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n+ spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n",log,tab +2378,5218055,"diff.log",22937,7,"spatial",log,content +2379,5218056,"diff.log",22919,7,"spatial",log,content +2380,5218056,"diff.log",22632,7,"spatial",log,content +2381,5218056,"diff.log",22433,7,"spatial",log,content +2382,5218057,"diff.log",22136,7,"spatial",log,content +2383,5218057,"diff.log",22108,7,"spatial",log,content +2384,5218057,"diff.log",21904,7,"spatial",log,content +2385,5218057,"diff.log",3343,7,"spatial",log,content +2386,5218057,"diff.log",2449,7,"spatial",log,content +2387,5218123,"diff.log",2436,0,"",log,selection_command +2388,5220420,"models/dynamics.py",0,0,"",python,tab +2389,5232252,"models/dynamics.py",495,0,"",python,selection_mouse +2390,5232253,"models/dynamics.py",494,0,"",python,selection_command +2391,5232400,"models/dynamics.py",495,0,"",python,selection_mouse +2392,5232411,"models/dynamics.py",494,0,"",python,selection_command +2393,5232926,"models/dynamics.py",462,0,"",python,selection_mouse +2394,5233087,"models/dynamics.py",453,13,"STTransformer",python,selection_mouse +2395,5245589,"models/dynamics.py",690,0,"",python,selection_mouse +2396,5245754,"models/dynamics.py",688,4,"True",python,selection_mouse +2397,5251989,"models/dynamics.py",718,0,"",python,selection_mouse +2398,5252856,"models/dynamics.py",690,0,"",python,selection_mouse +2399,5258954,"models/dynamics.py",2540,0,"",python,selection_mouse +2400,5259495,"models/dynamics.py",2538,0,"",python,selection_mouse +2401,5259641,"models/dynamics.py",2535,5,"False",python,selection_mouse +2402,5265053,"models/dynamics.py",2535,0,"",python,selection_mouse +2403,5265054,"models/dynamics.py",2535,5,"False",python,selection_mouse +2404,5265674,"models/dynamics.py",2535,0,"",python,selection_mouse +2405,5266047,"models/dynamics.py",2525,0,"",python,selection_mouse +2406,5266196,"models/dynamics.py",2522,12,"spatial_bert",python,selection_mouse +2407,5266445,"models/dynamics.py",2522,18,"spatial_bert=False",python,selection_mouse +2408,5267155,"models/dynamics.py",2540,0,"",python,selection_mouse +2409,5267617,"models/dynamics.py",2539,1,"e",python,selection_mouse +2410,5267631,"models/dynamics.py",2538,2,"se",python,selection_mouse +2411,5267685,"models/dynamics.py",2537,3,"lse",python,selection_mouse +2412,5267691,"models/dynamics.py",2535,5,"False",python,selection_mouse +2413,5267692,"models/dynamics.py",2532,8,"rt=False",python,selection_mouse +2414,5267708,"models/dynamics.py",2530,10,"bert=False",python,selection_mouse +2415,5267722,"models/dynamics.py",2529,11,"_bert=False",python,selection_mouse +2416,5267775,"models/dynamics.py",2527,13,"al_bert=False",python,selection_mouse +2417,5267776,"models/dynamics.py",2525,15,"tial_bert=False",python,selection_mouse +2418,5267776,"models/dynamics.py",2524,16,"atial_bert=False",python,selection_mouse +2419,5267780,"models/dynamics.py",2523,17,"patial_bert=False",python,selection_mouse +2420,5267796,"models/dynamics.py",2522,18,"spatial_bert=False",python,selection_mouse +2421,5267811,"models/dynamics.py",2521,19," spatial_bert=False",python,selection_mouse +2422,5267829,"models/dynamics.py",2520,20," spatial_bert=False",python,selection_mouse +2423,5267890,"models/dynamics.py",2519,21," spatial_bert=False",python,selection_mouse +2424,5268200,"models/dynamics.py",2519,0,"",python,selection_mouse +2425,5270254,"models/dynamics.py",2307,0,"",python,selection_mouse +2426,5270406,"utils/nn.py",0,0,"",python,tab +2427,5276914,"utils/nn.py",7415,0,"",python,selection_mouse +2428,5277034,"utils/nn.py",7404,19,"use_flash_attention",python,selection_mouse +2429,5278759,"utils/nn.py",6514,0,"",python,selection_mouse +2430,5278906,"utils/nn.py",6502,19,"use_flash_attention",python,selection_mouse +2431,5280840,"utils/nn.py",7413,0,"",python,selection_mouse +2432,5295041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"import math\nfrom typing import Tuple\nfrom functools import partial\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n spatial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=not self.spatial_bert),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=True),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spatial_bert: bool = True\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spatial_bert=self.spatial_bert,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n \n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = 'cudnn' if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, '... l h d -> (...) l h d')\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False)\n return jnp.logical_and(attention_mask, expanded_mask)\n \n original_shape = query.shape\n original_seq_len = query.shape[-3]\n \n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n \n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n \n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n \n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n \n return attention_fn\n\n",python,tab +2433,5296244,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6528,0,"",python,selection_mouse +2434,5296781,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6516,0,"",python,selection_mouse +2435,5297001,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6502,19,"use_flash_attention",python,selection_mouse +2436,5297614,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6274,0,"",python,selection_mouse +2437,5298455,"TERMINAL",0,0,"srun",,terminal_focus +2438,5299785,"TERMINAL",0,0,"srun",,terminal_focus +2439,5301769,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +2440,5302905,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\npython sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +2441,5303021,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2442,5303938,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 19, in \r\n from genie import Genie\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 203, in \r\n class STTransformer(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'use_flash_attention' follows default argument\r\n",,terminal_output +2443,5304050,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2444,5313820,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +2445,5314990,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",576,0,"",shellscript,selection_command +2446,5315174,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",593,0,"",shellscript,selection_command +2447,5315308,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,0,"",shellscript,selection_command +2448,5316516,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",594,0,"s",shellscript,content +2449,5316517,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",595,0,"",shellscript,selection_keyboard +2450,5316728,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",595,0,"r",shellscript,content +2451,5316729,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",596,0,"",shellscript,selection_keyboard +2452,5316875,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",596,0,"u",shellscript,content +2453,5316876,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",597,0,"",shellscript,selection_keyboard +2454,5316943,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",597,0,"n",shellscript,content +2455,5316944,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",598,0,"",shellscript,selection_keyboard +2456,5317039,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",598,0," ",shellscript,content +2457,5317040,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",599,0,"",shellscript,selection_keyboard +2458,5317530,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",598,0,"",shellscript,selection_command +2459,5333689,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"",python,tab +2460,5335119,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6527,0,"",python,selection_mouse +2461,5335120,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6526,0,"",python,selection_command +2462,5336049,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6498,30,"",python,content +2463,5336170,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6468,0,"",python,selection_command +2464,5336471,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6447,0,"",python,selection_command +2465,5336654,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6467,0,"\n use_flash_attention: bool",python,content +2466,5336662,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",6472,0,"",python,selection_command +2467,5351193,"models/dynamics.py",0,0,"",python,tab +2468,5352528,"models/dynamics.py",662,0,"",python,selection_mouse +2469,5352528,"models/dynamics.py",661,0,"",python,selection_command +2470,5353105,"models/dynamics.py",688,0,"",python,selection_mouse +2471,5359386,"utils/nn.py",0,0,"",python,tab +2472,5371762,"models/dynamics.py",0,0,"",python,tab +2473,5374532,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +2474,5374851,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +2475,5374999,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2476,5375120,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2477,5376231,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2478,5378990,"TERMINAL",0,0,"2025-07-21 15:55:25.656432: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2479,5390465,"TERMINAL",0,0,"2025-07-21 15:55:37.100986: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2480,5397941,"TERMINAL",0,0,"2025-07-21 15:55:44.532451: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2481,5400161,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 101, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 114, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 80, in setup\r\n self.dynamics = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'use_flash_attention'\r\n",,terminal_output +2482,5401118,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2483,6890527,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,tab +2484,6893997,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2315,0,"",python,selection_command +2485,6894508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2343,0,"",python,selection_command +2486,6894518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2373,0,"",python,selection_command +2487,6894584,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2402,0,"",python,selection_command +2488,6894587,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2430,0,"",python,selection_command +2489,6894725,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2456,0,"",python,selection_command +2490,6894886,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2486,0,"",python,selection_command +2491,6895658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2510,0,"",python,selection_command +2492,6895899,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2541,0,"\n ",python,content +2493,6897404,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2554,0,"u",python,content +2494,6897405,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2555,0,"",python,selection_keyboard +2495,6898285,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2555,0,"^",python,content +2496,6898286,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,0,"",python,selection_keyboard +2497,6899088,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2555,1,"",python,content +2498,6899089,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2555,0,"",python,selection_keyboard +2499,6899586,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2555,0,"s",python,content +2500,6899587,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,0,"",python,selection_keyboard +2501,6900223,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,0,"^",python,content +2502,6900224,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2557,0,"",python,selection_keyboard +2503,6900731,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,1,"",python,content +2504,6900732,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,0,"",python,selection_keyboard +2505,6901279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2556,0,"e",python,content +2506,6901280,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2557,0,"",python,selection_keyboard +2507,6902136,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2554,3,"use_flash_attention=",python,content +2508,6902970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"u",python,content +2509,6902971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_keyboard +2510,6904390,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"s",python,content +2511,6904391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2576,0,"",python,selection_keyboard +2512,6905440,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_command +2513,6905619,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"",python,selection_command +2514,6905979,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"s",python,content +2515,6905980,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_keyboard +2516,6908119,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,1,"",python,content +2517,6908301,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,1,"",python,content +2518,6909720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,1,"",python,content +2519,6910212,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"i",python,content +2520,6910213,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_keyboard +2521,6911266,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,1,"",python,content +2522,6911521,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"u",python,content +2523,6911522,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_keyboard +2524,6911599,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"s",python,content +2525,6911600,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2576,0,"",python,selection_keyboard +2526,6911761,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2576,0,"e",python,content +2527,6911762,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2577,0,"",python,selection_keyboard +2528,6911905,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2577,0,"_",python,content +2529,6911906,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2578,0,"",python,selection_keyboard +2530,6914560,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2577,1,"",python,content +2531,6914713,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2576,1,"",python,content +2532,6914863,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,1,"",python,content +2533,6914995,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,1,"",python,content +2534,6915211,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,0,"s",python,content +2535,6915212,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"",python,selection_keyboard +2536,6915454,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2575,0,"e",python,content +2537,6915455,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2576,0,"",python,selection_keyboard +2538,6916020,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2574,2,"self",python,content +2539,6917087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2578,0,".",python,content +2540,6917088,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2579,0,"",python,selection_keyboard +2541,6917307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2579,0,"u",python,content +2542,6917308,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2580,0,"",python,selection_keyboard +2543,6917349,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2580,0,"s",python,content +2544,6917351,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2581,0,"",python,selection_keyboard +2545,6918257,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2579,2,"use_flash_attention",python,content +2546,6918796,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2597,0,"",python,selection_command +2547,6919546,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2542,57,"",python,content +2548,6919561,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2550,0,"",python,selection_command +2549,6919623,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2518,0,"",python,selection_command +2550,6919794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2494,0,"",python,selection_command +2551,6920050,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2509,0,"\n use_flash_attention=self.use_flash_attention",python,content +2552,6920051,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2522,0,"",python,selection_command +2553,6921440,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2522,19,"",python,content +2554,6921705,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2522,1,"",python,content +2555,6922628,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2546,0,"",python,selection_command +2556,6922771,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2546,0,",",python,content +2557,6922772,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2547,0,"",python,selection_keyboard +2558,6923070,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2546,0,"",python,selection_command +2559,6924555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",2309,0,"",python,selection_mouse +2560,6924744,"utils/nn.py",0,0,"",python,tab +2561,6927829,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py",0,0,"",python,tab +2562,6948647,"models/dynamics.py",0,0,"",python,tab +2563,6958445,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +2564,6968560,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +2565,6971651,"utils/nn.py",0,0,"",python,tab +2566,6976495,"models/tokenizer.py",0,0,"",python,tab +2567,6979586,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +2568,6980545,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +2569,6980672,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2570,6980909,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2571,6981687,"TERMINAL",0,0,"bash",,terminal_focus +2572,6982301,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2573,6984275,"TERMINAL",0,0,"queue",,terminal_command +2574,6984311,"TERMINAL",0,0,"2025-07-21 16:22:10.931725: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2575,6984321,"TERMINAL",0,0,"]633;E;2025-07-21 16:22:10 queue;a9849893-e54a-428b-8c10-ef7068f4e31e]633;C",,terminal_output +2576,6984388,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Mon Jul 21 16:22:10 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3365092 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)3365333 accelerat interact tum_cte0 R36:04\t 1 hkn05093365091 accelerat interact tum_cte0 R 1:54:46\t 1 hkn07073365334 dev_accel interact tum_cte0 R36:38\t 1 hkn0401",,terminal_output +2577,6985444,"TERMINAL",0,0,"2579",,terminal_output +2578,6986465,"TERMINAL",0,0,"36840",,terminal_output +2579,6987514,"TERMINAL",0,0,"4791",,terminal_output +2580,6988564,"TERMINAL",0,0,"58502",,terminal_output +2581,6989602,"TERMINAL",0,0,"6913",,terminal_output +2582,6990657,"TERMINAL",0,0,"71024",,terminal_output +2583,6990815,"TERMINAL",0,0,"srun",,terminal_focus +2584,6991705,"TERMINAL",0,0,"8135",,terminal_output +2585,6992755,"TERMINAL",0,0,"9246",,terminal_output +2586,6993862,"TERMINAL",0,0,"20357",,terminal_output +2587,6994169,"TERMINAL",0,0,"2025-07-21 16:22:20.824547: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2588,6995707,"TERMINAL",0,0,"1579",,terminal_output +2589,6996673,"TERMINAL",0,0,"36850",,terminal_output +2590,6997754,"TERMINAL",0,0,"4791",,terminal_output +2591,6998779,"TERMINAL",0,0,"585:002",,terminal_output +2592,6999905,"TERMINAL",0,0,"6913",,terminal_output +2593,7000930,"TERMINAL",0,0,"72024",,terminal_output +2594,7001037,"TERMINAL",0,0,"2025-07-21 16:22:27.665706: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2595,7001954,"TERMINAL",0,0,"8135",,terminal_output +2596,7002975,"TERMINAL",0,0,"9246",,terminal_output +2597,7003414,"genie.py",0,0,"",python,tab +2598,7004015,"TERMINAL",0,0,"30357",,terminal_output +2599,7005128,"TERMINAL",0,0,"1468",,terminal_output +2600,7005191,"genie.py",5749,0,"",python,selection_mouse +2601,7005313,"genie.py",5749,3,"jax",python,selection_mouse +2602,7005459,"genie.py",5749,9,"jax.debug",python,selection_mouse +2603,7005509,"genie.py",5722,30,"range(50):\n jax",python,selection_mouse +2604,7005510,"genie.py",5728,24,"50):\n jax",python,selection_mouse +2605,7005510,"genie.py",5730,22,"):\n jax",python,selection_mouse +2606,7005522,"genie.py",5732,20,"\n jax",python,selection_mouse +2607,7005638,"genie.py",5749,31,"jax.debug.print(""Sampling token",python,selection_mouse +2608,7005655,"genie.py",5749,32,"jax.debug.print(""Sampling token ",python,selection_mouse +2609,7005706,"genie.py",5749,33,"jax.debug.print(""Sampling token {",python,selection_mouse +2610,7005707,"genie.py",5749,35,"jax.debug.print(""Sampling token {} ",python,selection_mouse +2611,7005707,"genie.py",5749,39,"jax.debug.print(""Sampling token {} from",python,selection_mouse +2612,7005770,"genie.py",5749,40,"jax.debug.print(""Sampling token {} from ",python,selection_mouse +2613,7005823,"genie.py",5749,45,"jax.debug.print(""Sampling token {} from frame",python,selection_mouse +2614,7005906,"genie.py",5749,46,"jax.debug.print(""Sampling token {} from frame ",python,selection_mouse +2615,7005960,"genie.py",5749,47,"jax.debug.print(""Sampling token {} from frame {",python,selection_mouse +2616,7005971,"genie.py",5749,48,"jax.debug.print(""Sampling token {} from frame {}",python,selection_mouse +2617,7006023,"genie.py",5749,49,"jax.debug.print(""Sampling token {} from frame {}""",python,selection_mouse +2618,7006024,"genie.py",5749,50,"jax.debug.print(""Sampling token {} from frame {}"",",python,selection_mouse +2619,7006038,"genie.py",5749,51,"jax.debug.print(""Sampling token {} from frame {}"", ",python,selection_mouse +2620,7006059,"genie.py",5749,52,"jax.debug.print(""Sampling token {} from frame {}"", n",python,selection_mouse +2621,7006123,"genie.py",5749,53,"jax.debug.print(""Sampling token {} from frame {}"", n,",python,selection_mouse +2622,7006124,"genie.py",5749,54,"jax.debug.print(""Sampling token {} from frame {}"", n, ",python,selection_mouse +2623,7006124,"TERMINAL",0,0,"2579",,terminal_output +2624,7006139,"genie.py",5749,55,"jax.debug.print(""Sampling token {} from frame {}"", n, t",python,selection_mouse +2625,7006197,"genie.py",5749,56,"jax.debug.print(""Sampling token {} from frame {}"", n, t)",python,selection_mouse +2626,7006905,"genie.py",5805,0,"",python,selection_mouse +2627,7007159,"TERMINAL",0,0,"3687:00",,terminal_output +2628,7008303,"TERMINAL",0,0,"4791",,terminal_output +2629,7009259,"TERMINAL",0,0,"58102",,terminal_output +2630,7010354,"TERMINAL",0,0,"6913",,terminal_output +2631,7011376,"TERMINAL",0,0,"73135",,terminal_output +2632,7011484,"TERMINAL",0,0,"2025-07-21 16:22:38.110623: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2633,7012534,"TERMINAL",0,0,"9246",,terminal_output +2634,7013536,"TERMINAL",0,0,"40357",,terminal_output +2635,7014521,"TERMINAL",0,0,"srun",,terminal_focus +2636,7014582,"TERMINAL",0,0,"1468",,terminal_output +2637,7014777,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\n",,terminal_output +2638,7015542,"TERMINAL",0,0,"2579",,terminal_output +2639,7016589,"TERMINAL",0,0,"36810",,terminal_output +2640,7017640,"TERMINAL",0,0,"4791",,terminal_output +2641,7018745,"TERMINAL",0,0,"58202",,terminal_output +2642,7019771,"TERMINAL",0,0,"6913",,terminal_output +2643,7020795,"TERMINAL",0,0,"74024",,terminal_output +2644,7021711,"TERMINAL",0,0,"watch",,terminal_focus +2645,7021711,"TERMINAL",0,0,"srun",,terminal_focus +2646,7021829,"TERMINAL",0,0,"8135",,terminal_output +2647,7022945,"TERMINAL",0,0,"91CG 1:55:23\t 1 hkn0707092PD 0:00\t 2 (Priority)333 36:425096",,terminal_output +2648,7023968,"TERMINAL",0,0,"5037",,terminal_output +2649,7024996,"TERMINAL",0,0,"148",,terminal_output +2650,7026018,"TERMINAL",0,0,"259",,terminal_output +2651,7027089,"TERMINAL",0,0,"3620",,terminal_output +2652,7028106,"TERMINAL",0,0,"471",,terminal_output +2653,7029193,"TERMINAL",0,0,"582",,terminal_output +2654,7030216,"TERMINAL",0,0,"693",,terminal_output +2655,7031342,"TERMINAL",0,0,"7504",,terminal_output +2656,7032367,"TERMINAL",0,0,"815",,terminal_output +2657,7033391,"TERMINAL",0,0,"937",,terminal_output +2658,7034416,"TERMINAL",0,0,"3:0148",,terminal_output +2659,7035542,"TERMINAL",0,0,"259",,terminal_output +2660,7036566,"TERMINAL",0,0,"3630",,terminal_output +2661,7037554,"TERMINAL",0,0,"471",,terminal_output +2662,7038601,"TERMINAL",0,0,"582",,terminal_output +2663,7039647,"TERMINAL",0,0,"693",,terminal_output +2664,7039708,"TERMINAL",0,0,"2025-07-21 16:23:06.346526: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 16:23:06.346923: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-21 16:23:06.347221: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2665,7040696,"TERMINAL",0,0,"77:004",,terminal_output +2666,7041788,"TERMINAL",0,0,"815",,terminal_output +2667,7042812,"TERMINAL",0,0,"926",,terminal_output +2668,7043836,"TERMINAL",0,0,"1037",,terminal_output +2669,7044883,"TERMINAL",0,0,"148",,terminal_output +2670,7045065,"TERMINAL",0,0,"watch",,terminal_focus +2671,7045973,"TERMINAL",0,0,"259",,terminal_output +2672,7047068,"TERMINAL",0,0,"3640",,terminal_output +2673,7048237,"TERMINAL",0,0,"471",,terminal_output +2674,7049087,"TERMINAL",0,0,"582",,terminal_output +2675,7050138,"TERMINAL",0,0,"693",,terminal_output +2676,7051209,"TERMINAL",0,0,"7104",,terminal_output +2677,7051653,"TERMINAL",0,0,"srun",,terminal_focus +2678,7052236,"TERMINAL",0,0,"815",,terminal_output +2679,7053282,"TERMINAL",0,0,"926",,terminal_output +2680,7054327,"TERMINAL",0,0,"2037",,terminal_output +2681,7055386,"TERMINAL",0,0,"159",,terminal_output +2682,7056436,"TERMINAL",0,0,"3650",,terminal_output +2683,7057559,"TERMINAL",0,0,"471",,terminal_output +2684,7058519,"TERMINAL",0,0,"582",,terminal_output +2685,7059564,"TERMINAL",0,0,"693",,terminal_output +2686,7060603,"TERMINAL",0,0,"7204",,terminal_output +2687,7061638,"TERMINAL",0,0,"815",,terminal_output +2688,7062684,"TERMINAL",0,0,"\r926",,terminal_output +2689,7063264,"genie.py",0,0,"",python,tab +2690,7063726,"TERMINAL",0,0,"3037",,terminal_output +2691,7064828,"TERMINAL",0,0,"148",,terminal_output +2692,7065852,"TERMINAL",0,0,"259",,terminal_output +2693,7066278,"sample.py",0,0,"",python,tab +2694,7066861,"TERMINAL",0,0,"368:00",,terminal_output +2695,7067937,"TERMINAL",0,0,"471",,terminal_output +2696,7068966,"TERMINAL",0,0,"582",,terminal_output +2697,7069177,"sample.py",4034,0,"",python,selection_mouse +2698,7070742,"TERMINAL",0,0,"6304",,terminal_output +2699,7071297,"sample.py",4128,0,"",python,selection_mouse +2700,7071806,"TERMINAL",0,0,"815",,terminal_output +2701,7071840,"sample.py",4135,0,"",python,selection_mouse +2702,7072917,"TERMINAL",0,0,"926",,terminal_output +2703,7073023,"sample.py",4173,0,"\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) ",python,content +2704,7073034,"sample.py",4178,0,"",python,selection_command +2705,7073729,"sample.py",4115,0,"",python,selection_command +2706,7073928,"TERMINAL",0,0,"4037",,terminal_output +2707,7074588,"sample.py",4115,0,"#",python,content +2708,7074589,"sample.py",4116,0,"",python,selection_keyboard +2709,7074633,"sample.py",4116,0," ",python,content +2710,7074633,"sample.py",4117,0,"",python,selection_keyboard +2711,7074955,"sample.py",4116,0,"",python,selection_command +2712,7074965,"TERMINAL",0,0,"148",,terminal_output +2713,7075204,"sample.py",4181,0,"",python,selection_command +2714,7075368,"sample.py",4244,0,"",python,selection_command +2715,7075500,"sample.py",4282,0,"",python,selection_command +2716,7075675,"sample.py",4244,0,"",python,selection_command +2717,7075816,"sample.py",4181,0,"",python,selection_command +2718,7075997,"TERMINAL",0,0,"259",,terminal_output +2719,7076042,"sample.py",4182,0,"",python,selection_command +2720,7076540,"sample.py",4183,0,"",python,selection_command +2721,7076597,"sample.py",4184,0,"",python,selection_command +2722,7076598,"sample.py",4185,0,"",python,selection_command +2723,7076651,"sample.py",4186,0,"",python,selection_command +2724,7076652,"sample.py",4187,0,"",python,selection_command +2725,7076712,"sample.py",4188,0,"",python,selection_command +2726,7076713,"sample.py",4189,0,"",python,selection_command +2727,7076770,"sample.py",4190,0,"",python,selection_command +2728,7076778,"sample.py",4191,0,"",python,selection_command +2729,7076884,"sample.py",4192,0,"",python,selection_command +2730,7077046,"TERMINAL",0,0,"3610",,terminal_output +2731,7077057,"sample.py",4193,0,"",python,selection_command +2732,7077171,"sample.py",4194,0,"",python,selection_command +2733,7077369,"sample.py",4194,3,"",python,content +2734,7077602,"sample.py",4194,1,"",python,content +2735,7077770,"sample.py",4194,1,"",python,content +2736,7077943,"sample.py",4194,1,"",python,content +2737,7078093,"TERMINAL",0,0,"471",,terminal_output +2738,7078103,"sample.py",4194,1,"",python,content +2739,7078418,"sample.py",4194,1,"",python,content +2740,7079224,"sample.py",4230,0,"",python,selection_command +2741,7079247,"sample.py",4229,1,"",python,content +2742,7079427,"TERMINAL",0,0,"582",,terminal_output +2743,7079438,"sample.py",4228,0,"",python,selection_command +2744,7080188,"TERMINAL",0,0,"693",,terminal_output +2745,7080342,"sample.py",4228,1,"",python,content +2746,7080343,"sample.py",4227,0,"",python,selection_command +2747,7081232,"TERMINAL",0,0,"7404",,terminal_output +2748,7082381,"TERMINAL",0,0,"815",,terminal_output +2749,7083363,"TERMINAL",0,0,"926",,terminal_output +2750,7084367,"TERMINAL",0,0,"5048",,terminal_output +2751,7085402,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nautoreg sampling done. calculating ssim and saving video\r\nSSIM: 0.47774630784988403\r\n",,terminal_output +2752,7085463,"TERMINAL",0,0,"259",,terminal_output +2753,7086539,"TERMINAL",0,0,"3620",,terminal_output +2754,7086921,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +2755,7087509,"TERMINAL",0,0,"471",,terminal_output +2756,7088561,"TERMINAL",0,0,"582",,terminal_output +2757,7089607,"TERMINAL",0,0,"693",,terminal_output +2758,7090662,"TERMINAL",0,0,"7504",,terminal_output +2759,7091711,"TERMINAL",0,0,"815",,terminal_output +2760,7092753,"TERMINAL",0,0,"926",,terminal_output +2761,7093839,"TERMINAL",0,0,"4:0037",,terminal_output +2762,7094844,"TERMINAL",0,0,"148",,terminal_output +2763,7095890,"TERMINAL",0,0,"259",,terminal_output +2764,7096941,"TERMINAL",0,0,"3630",,terminal_output +2765,7097990,"TERMINAL",0,0,"471",,terminal_output +2766,7099338,"TERMINAL",0,0,"582",,terminal_output +2767,7100158,"TERMINAL",0,0,"693",,terminal_output +2768,7101181,"TERMINAL",0,0,"78:004",,terminal_output +2769,7102179,"TERMINAL",0,0,"815",,terminal_output +2770,7103332,"TERMINAL",0,0,"926",,terminal_output +2771,7104355,"TERMINAL",0,0,"1037",,terminal_output +2772,7105379,"TERMINAL",0,0,"148",,terminal_output +2773,7106355,"TERMINAL",0,0,"2640",,terminal_output +2774,7107427,"TERMINAL",0,0,"471",,terminal_output +2775,7108451,"TERMINAL",0,0,"582",,terminal_output +2776,7109575,"TERMINAL",0,0,"693",,terminal_output +2777,7110582,"TERMINAL",0,0,"7104",,terminal_output +2778,7111585,"TERMINAL",0,0,"815",,terminal_output +2779,7111860,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +2780,7112626,"TERMINAL",0,0,"926",,terminal_output +2781,7113674,"TERMINAL",0,0,"2037",,terminal_output +2782,7114564,"sample.py",0,0,"",python,tab +2783,7114716,"TERMINAL",0,0,"148",,terminal_output +2784,7116483,"sample.py",4180,0,"",python,selection_command +2785,7116538,"TERMINAL",0,0,"2650",,terminal_output +2786,7117037,"sample.py",4180,0,"#",python,content +2787,7117039,"sample.py",4181,0,"",python,selection_keyboard +2788,7117103,"sample.py",4181,0," ",python,content +2789,7117104,"sample.py",4182,0,"",python,selection_keyboard +2790,7117448,"sample.py",4181,0,"",python,selection_command +2791,7117512,"TERMINAL",0,0,"471",,terminal_output +2792,7117577,"sample.py",4116,0,"",python,selection_command +2793,7117852,"sample.py",4115,0,"",python,selection_command +2794,7118397,"sample.py",4115,1,"",python,content +2795,7118547,"sample.py",4115,1,"",python,content +2796,7118557,"TERMINAL",0,0,"582",,terminal_output +2797,7119604,"TERMINAL",0,0,"693",,terminal_output +2798,7119953,"sample.py",4078,0,"",python,selection_mouse +2799,7120530,"sample.py",4187,0,"",python,selection_mouse +2800,7120653,"TERMINAL",0,0,"7204",,terminal_output +2801,7121125,"sample.py",4123,0,"",python,selection_mouse +2802,7121698,"TERMINAL",0,0,"815",,terminal_output +2803,7122787,"TERMINAL",0,0,"926",,terminal_output +2804,7123812,"TERMINAL",0,0,"3037",,terminal_output +2805,7124846,"TERMINAL",0,0,"148",,terminal_output +2806,7125940,"TERMINAL",0,0,"259",,terminal_output +2807,7126934,"TERMINAL",0,0,"369:00",,terminal_output +2808,7127166,"models/dynamics.py",0,0,"",python,tab +2809,7127983,"TERMINAL",0,0,"471",,terminal_output +2810,7128355,"genie.py",0,0,"",python,tab +2811,7129035,"TERMINAL",0,0,"582",,terminal_output +2812,7129695,"genie.py",5728,0,"",python,selection_mouse +2813,7130141,"TERMINAL",0,0,"693",,terminal_output +2814,7131186,"TERMINAL",0,0,"7304",,terminal_output +2815,7131203,"genie.py",5728,1,"",python,content +2816,7131314,"genie.py",5728,1,"",python,content +2817,7131546,"genie.py",5728,0,"N",python,content +2818,7131548,"genie.py",5729,0,"",python,selection_keyboard +2819,7131906,"genie.py",5728,0,"",python,selection_command +2820,7132215,"TERMINAL",0,0,"815",,terminal_output +2821,7133257,"TERMINAL",0,0,"926",,terminal_output +2822,7134356,"TERMINAL",0,0,"4037",,terminal_output +2823,7135380,"TERMINAL",0,0,"148",,terminal_output +2824,7136406,"TERMINAL",0,0,"2610",,terminal_output +2825,7136469,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +2826,7136595,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +2827,7136720,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2828,7137431,"TERMINAL",0,0,"471",,terminal_output +2829,7138151,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2830,7138482,"TERMINAL",0,0,"582",,terminal_output +2831,7139582,"TERMINAL",0,0,"693",,terminal_output +2832,7140082,"TERMINAL",0,0,"2025-07-21 16:24:46.723155: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2833,7140604,"TERMINAL",0,0,"7404",,terminal_output +2834,7141727,"TERMINAL",0,0,"815",,terminal_output +2835,7142765,"TERMINAL",0,0,"926",,terminal_output +2836,7143790,"TERMINAL",0,0,"5037",,terminal_output +2837,7144803,"TERMINAL",0,0,"148",,terminal_output +2838,7145825,"TERMINAL",0,0,"259",,terminal_output +2839,7146936,"TERMINAL",0,0,"3620",,terminal_output +2840,7147979,"TERMINAL",0,0,"471",,terminal_output +2841,7149003,"TERMINAL",0,0,"582",,terminal_output +2842,7149463,"TERMINAL",0,0,"2025-07-21 16:24:56.133837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2843,7150028,"TERMINAL",0,0,"693",,terminal_output +2844,7151150,"TERMINAL",0,0,"7504",,terminal_output +2845,7152175,"TERMINAL",0,0,"815",,terminal_output +2846,7153201,"TERMINAL",0,0,"926",,terminal_output +2847,7154226,"TERMINAL",0,0,"5:0037",,terminal_output +2848,7155246,"TERMINAL",0,0,"148",,terminal_output +2849,7156375,"TERMINAL",0,0,"259",,terminal_output +2850,7156385,"TERMINAL",0,0,"2025-07-21 16:25:02.970478: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2851,7157311,"TERMINAL",0,0,"3630",,terminal_output +2852,7158467,"TERMINAL",0,0,"482",,terminal_output +2853,7159392,"TERMINAL",0,0,"693",,terminal_output +2854,7160472,"TERMINAL",0,0,"79:004",,terminal_output +2855,7161599,"TERMINAL",0,0,"815",,terminal_output +2856,7162622,"TERMINAL",0,0,"926",,terminal_output +2857,7163602,"TERMINAL",0,0,"1037",,terminal_output +2858,7164645,"TERMINAL",0,0,"148",,terminal_output +2859,7165699,"TERMINAL",0,0,"259",,terminal_output +2860,7166207,"TERMINAL",0,0,"2025-07-21 16:25:12.862628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2861,7166746,"TERMINAL",0,0,"3640",,terminal_output +2862,7167789,"TERMINAL",0,0,"471",,terminal_output +2863,7168869,"TERMINAL",0,0,"582",,terminal_output +2864,7169279,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\n",,terminal_output +2865,7169892,"TERMINAL",0,0,"693",,terminal_output +2866,7170921,"TERMINAL",0,0,"7104",,terminal_output +2867,7171958,"TERMINAL",0,0,"815",,terminal_output +2868,7173067,"TERMINAL",0,0,"926",,terminal_output +2869,7174213,"TERMINAL",0,0,"2037",,terminal_output +2870,7175125,"TERMINAL",0,0,"148",,terminal_output +2871,7177502,"TERMINAL",0,0,"2593650",,terminal_output +2872,7178210,"TERMINAL",0,0,"471",,terminal_output +2873,7179260,"TERMINAL",0,0,"582",,terminal_output +2874,7180308,"TERMINAL",0,0,"693",,terminal_output +2875,7181344,"TERMINAL",0,0,"7215",,terminal_output +2876,7182394,"TERMINAL",0,0,"926",,terminal_output +2877,7183467,"TERMINAL",0,0,"3037",,terminal_output +2878,7184495,"TERMINAL",0,0,"148",,terminal_output +2879,7185560,"TERMINAL",0,0,"259",,terminal_output +2880,7186642,"TERMINAL",0,0,"3640:00",,terminal_output +2881,7187662,"TERMINAL",0,0,"471",,terminal_output +2882,7188659,"TERMINAL",0,0,"582",,terminal_output +2883,7189708,"TERMINAL",0,0,"693",,terminal_output +2884,7190736,"TERMINAL",0,0,"7304",,terminal_output +2885,7191791,"TERMINAL",0,0,"815",,terminal_output +2886,7192882,"TERMINAL",0,0,"926",,terminal_output +2887,7193899,"TERMINAL",0,0,"4037",,terminal_output +2888,7195086,"TERMINAL",0,0,"148",,terminal_output +2889,7196058,"TERMINAL",0,0,"259",,terminal_output +2890,7197043,"TERMINAL",0,0,"3610",,terminal_output +2891,7198096,"TERMINAL",0,0,"471",,terminal_output +2892,7199232,"TERMINAL",0,0,"582",,terminal_output +2893,7200186,"TERMINAL",0,0,"693",,terminal_output +2894,7201279,"TERMINAL",0,0,"7404",,terminal_output +2895,7202274,"TERMINAL",0,0,"815",,terminal_output +2896,7203330,"TERMINAL",0,0,"926",,terminal_output +2897,7204454,"TERMINAL",0,0,"5048",,terminal_output +2898,7205478,"TERMINAL",0,0,"259",,terminal_output +2899,7206615,"TERMINAL",0,0,"3620",,terminal_output +2900,7207638,"TERMINAL",0,0,"471",,terminal_output +2901,7208663,"TERMINAL",0,0,"582",,terminal_output +2902,7210074,"TERMINAL",0,0,"693",,terminal_output +2903,7210648,"TERMINAL",0,0,"7504",,terminal_output +2904,7211691,"TERMINAL",0,0,"815",,terminal_output +2905,7212740,"TERMINAL",0,0,"926",,terminal_output +2906,7213790,"TERMINAL",0,0,"6:0037",,terminal_output +2907,7214834,"TERMINAL",0,0,"148",,terminal_output +2908,7215884,"TERMINAL",0,0,"259",,terminal_output +2909,7216923,"TERMINAL",0,0,"3630",,terminal_output +2910,7217966,"TERMINAL",0,0,"471",,terminal_output +2911,7219011,"TERMINAL",0,0,"582",,terminal_output +2912,7220068,"TERMINAL",0,0,"693",,terminal_output +2913,7221197,"TERMINAL",0,0,"740:004",,terminal_output +2914,7222219,"TERMINAL",0,0,"815",,terminal_output +2915,7223244,"TERMINAL",0,0,"926",,terminal_output +2916,7224269,"TERMINAL",0,0,"1037",,terminal_output +2917,7225292,"TERMINAL",0,0,"148",,terminal_output +2918,7226355,"TERMINAL",0,0,"259",,terminal_output +2919,7227443,"TERMINAL",0,0,"3741",,terminal_output +2920,7228411,"TERMINAL",0,0,"582",,terminal_output +2921,7229497,"TERMINAL",0,0,"693",,terminal_output +2922,7230514,"TERMINAL",0,0,"7104",,terminal_output +2923,7231544,"TERMINAL",0,0,"815",,terminal_output +2924,7232590,"TERMINAL",0,0,"926",,terminal_output +2925,7233632,"TERMINAL",0,0,"2037",,terminal_output +2926,7234675,"TERMINAL",0,0,"148",,terminal_output +2927,7235723,"TERMINAL",0,0,"259",,terminal_output +2928,7237278,"TERMINAL",0,0,"3650",,terminal_output +2929,7238399,"TERMINAL",0,0,"471",,terminal_output +2930,7239423,"TERMINAL",0,0,"593",,terminal_output +2931,7240447,"TERMINAL",0,0,"7204",,terminal_output +2932,7241471,"TERMINAL",0,0,"815",,terminal_output +2933,7242495,"TERMINAL",0,0,"926",,terminal_output +2934,7243525,"TERMINAL",0,0,"3037",,terminal_output +2935,7244645,"TERMINAL",0,0,"148",,terminal_output +2936,7245667,"TERMINAL",0,0,"259",,terminal_output +2937,7246694,"TERMINAL",0,0,"361:00",,terminal_output +2938,7247692,"TERMINAL",0,0,"471",,terminal_output +2939,7248772,"TERMINAL",0,0,"582",,terminal_output +2940,7249785,"TERMINAL",0,0,"693",,terminal_output +2941,7250829,"TERMINAL",0,0,"7304",,terminal_output +2942,7251916,"TERMINAL",0,0,"815",,terminal_output +2943,7252939,"TERMINAL",0,0,"926",,terminal_output +2944,7253962,"TERMINAL",0,0,"4037",,terminal_output +2945,7255087,"TERMINAL",0,0,"148",,terminal_output +2946,7256111,"TERMINAL",0,0,"259",,terminal_output +2947,7257102,"TERMINAL",0,0,"3610",,terminal_output +2948,7258152,"TERMINAL",0,0,"471",,terminal_output +2949,7259286,"TERMINAL",0,0,"582",,terminal_output +2950,7260449,"TERMINAL",0,0,"693",,terminal_output +2951,7261394,"TERMINAL",0,0,"7404",,terminal_output +2952,7262360,"TERMINAL",0,0,"826",,terminal_output +2953,7263477,"TERMINAL",0,0,"5037",,terminal_output +2954,7264438,"TERMINAL",0,0,"148",,terminal_output +2955,7265492,"TERMINAL",0,0,"259",,terminal_output +2956,7266540,"TERMINAL",0,0,"3620",,terminal_output +2957,7267582,"TERMINAL",0,0,"471",,terminal_output +2958,7268623,"TERMINAL",0,0,"582",,terminal_output +2959,7269675,"TERMINAL",0,0,"693",,terminal_output +2960,7270724,"TERMINAL",0,0,"7504",,terminal_output +2961,7271773,"TERMINAL",0,0,"815",,terminal_output +2962,7272831,"TERMINAL",0,0,"926",,terminal_output +2963,7273877,"TERMINAL",0,0,"7:0037",,terminal_output +2964,7274959,"TERMINAL",0,0,"148",,terminal_output +2965,7275977,"TERMINAL",0,0,"259",,terminal_output +2966,7277003,"TERMINAL",0,0,"3630",,terminal_output +2967,7278052,"TERMINAL",0,0,"471",,terminal_output +2968,7279152,"TERMINAL",0,0,"582",,terminal_output +2969,7280178,"TERMINAL",0,0,"693",,terminal_output +2970,7281202,"TERMINAL",0,0,"71:004",,terminal_output +2971,7282215,"TERMINAL",0,0,"815",,terminal_output +2972,7283353,"TERMINAL",0,0,"926",,terminal_output +2973,7284377,"TERMINAL",0,0,"1037",,terminal_output +2974,7285401,"TERMINAL",0,0,"159",,terminal_output +2975,7286425,"TERMINAL",0,0,"3640",,terminal_output +2976,7287449,"TERMINAL",0,0,"471",,terminal_output +2977,7288522,"TERMINAL",0,0,"582",,terminal_output +2978,7289571,"TERMINAL",0,0,"693",,terminal_output +2979,7290628,"TERMINAL",0,0,"7104",,terminal_output +2980,7291661,"TERMINAL",0,0,"815",,terminal_output +2981,7292797,"TERMINAL",0,0,"926",,terminal_output +2982,7293808,"TERMINAL",0,0,"2037",,terminal_output +2983,7294807,"TERMINAL",0,0,"148",,terminal_output +2984,7295948,"TERMINAL",0,0,"259",,terminal_output +2985,7296906,"TERMINAL",0,0,"3650",,terminal_output +2986,7297993,"TERMINAL",0,0,"471",,terminal_output +2987,7299017,"TERMINAL",0,0,"582",,terminal_output +2988,7300105,"TERMINAL",0,0,"693",,terminal_output +2989,7301171,"TERMINAL",0,0,"7204",,terminal_output +2990,7302192,"TERMINAL",0,0,"815",,terminal_output +2991,7303216,"TERMINAL",0,0,"926",,terminal_output +2992,7304343,"TERMINAL",0,0,"3037",,terminal_output +2993,7305367,"TERMINAL",0,0,"148",,terminal_output +2994,7306325,"TERMINAL",0,0,"259",,terminal_output +2995,7307366,"TERMINAL",0,0,"372:01",,terminal_output +2996,7308469,"TERMINAL",0,0,"582",,terminal_output +2997,7309463,"TERMINAL",0,0,"693",,terminal_output +2998,7310505,"TERMINAL",0,0,"7304",,terminal_output +2999,7311624,"TERMINAL",0,0,"815",,terminal_output +3000,7312639,"TERMINAL",0,0,"926",,terminal_output +3001,7313655,"TERMINAL",0,0,"4037",,terminal_output +3002,7314703,"TERMINAL",0,0,"148",,terminal_output +3003,7315747,"TERMINAL",0,0,"259",,terminal_output +3004,7316804,"TERMINAL",0,0,"3610",,terminal_output +3005,7317861,"TERMINAL",0,0,"471",,terminal_output +3006,7318933,"TERMINAL",0,0,"582",,terminal_output +3007,7320013,"TERMINAL",0,0,"693",,terminal_output +3008,7321034,"TERMINAL",0,0,"7404",,terminal_output +3009,7322046,"TERMINAL",0,0,"815",,terminal_output +3010,7323092,"TERMINAL",0,0,"926",,terminal_output +3011,7324285,"TERMINAL",0,0,"5037",,terminal_output +3012,7325323,"TERMINAL",0,0,"148",,terminal_output +3013,7326235,"TERMINAL",0,0,"259",,terminal_output +3014,7327284,"TERMINAL",0,0,"3620",,terminal_output +3015,7328408,"TERMINAL",0,0,"471",,terminal_output +3016,7329434,"TERMINAL",0,0,"593",,terminal_output +3017,7330456,"TERMINAL",0,0,"7504",,terminal_output +3018,7331585,"TERMINAL",0,0,"815",,terminal_output +3019,7332621,"TERMINAL",0,0,"926",,terminal_output +3020,7333588,"TERMINAL",0,0,"8:0037",,terminal_output +3021,7334631,"TERMINAL",0,0,"148",,terminal_output +3022,7335670,"TERMINAL",0,0,"259",,terminal_output +3023,7336733,"TERMINAL",0,0,"3630",,terminal_output +3024,7337765,"TERMINAL",0,0,"471",,terminal_output +3025,7338890,"TERMINAL",0,0,"582",,terminal_output +3026,7339877,"TERMINAL",0,0,"693",,terminal_output +3027,7340906,"TERMINAL",0,0,"72:004",,terminal_output +3028,7341954,"TERMINAL",0,0,"815",,terminal_output +3029,7343009,"TERMINAL",0,0,"926",,terminal_output +3030,7344087,"TERMINAL",0,0,"1037",,terminal_output +3031,7345203,"TERMINAL",0,0,"148",,terminal_output +3032,7346227,"TERMINAL",0,0,"259",,terminal_output +3033,7347193,"TERMINAL",0,0,"3640",,terminal_output +3034,7348275,"TERMINAL",0,0,"471",,terminal_output +3035,7349300,"TERMINAL",0,0,"582",,terminal_output +3036,7350335,"TERMINAL",0,0,"693",,terminal_output +3037,7351450,"TERMINAL",0,0,"8115",,terminal_output +3038,7352479,"TERMINAL",0,0,"926",,terminal_output +3039,7353483,"TERMINAL",0,0,"2037",,terminal_output +3040,7354630,"TERMINAL",0,0,"148",,terminal_output +3041,7355648,"TERMINAL",0,0,"259",,terminal_output +3042,7356834,"TERMINAL",0,0,"3650",,terminal_output +3043,7358074,"TERMINAL",0,0,"471",,terminal_output +3044,7359130,"TERMINAL",0,0,"582",,terminal_output +3045,7360256,"TERMINAL",0,0,"693",,terminal_output +3046,7361202,"TERMINAL",0,0,"7204",,terminal_output +3047,7362244,"TERMINAL",0,0,"815",,terminal_output +3048,7363329,"TERMINAL",0,0,"926",,terminal_output +3049,7364352,"TERMINAL",0,0,"3037",,terminal_output +3050,7365384,"TERMINAL",0,0,"159",,terminal_output +3051,7366425,"TERMINAL",0,0,"363:00",,terminal_output +3052,7367303,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3365334.2 task 0: running\r\n",,terminal_output +3053,7367506,"TERMINAL",0,0,"471",,terminal_output +3054,7367516,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3365334.2\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3365334.2 ON hkn0401 CANCELLED AT 2025-07-21T16:28:34 ***\r\n",,terminal_output +3055,7367650,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3365334.2\r\nsrun: job abort in progress\r\n",,terminal_output +3056,7367954,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +3057,7368523,"TERMINAL",0,0,"582",,terminal_output +3058,7369678,"TERMINAL",0,0,"693",,terminal_output +3059,7370702,"TERMINAL",0,0,"7304",,terminal_output +3060,7372139,"TERMINAL",0,0,"815",,terminal_output +3061,7372257,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial",,terminal_output +3062,7373158,"TERMINAL",0,0,"926",,terminal_output +3063,7374183,"TERMINAL",0,0,"4037",,terminal_output +3064,7374703,"models/dynamics.py",0,0,"",python,tab +3065,7375172,"TERMINAL",0,0,"148",,terminal_output +3066,7376022,"genie.py",0,0,"",python,tab +3067,7376239,"TERMINAL",0,0,"259",,terminal_output +3068,7377261,"TERMINAL",0,0,"3610",,terminal_output +3069,7378113,"sample.py",0,0,"",python,tab +3070,7378335,"TERMINAL",0,0,"471",,terminal_output +3071,7379405,"TERMINAL",0,0,"593",,terminal_output +3072,7380033,"sample.py",4115,0,"",python,selection_command +3073,7380398,"sample.py",4115,0,"#",python,content +3074,7380399,"sample.py",4116,0,"",python,selection_keyboard +3075,7380441,"TERMINAL",0,0,"7404",,terminal_output +3076,7380491,"sample.py",4116,0," ",python,content +3077,7380492,"sample.py",4117,0,"",python,selection_keyboard +3078,7381062,"sample.py",4116,0,"",python,selection_command +3079,7381143,"sample.py",4181,0,"",python,selection_command +3080,7381448,"TERMINAL",0,0,"815",,terminal_output +3081,7381470,"sample.py",4180,0,"",python,selection_command +3082,7382038,"sample.py",4180,1,"",python,content +3083,7382146,"sample.py",4180,1,"",python,content +3084,7382504,"TERMINAL",0,0,"926",,terminal_output +3085,7383529,"TERMINAL",0,0,"5037",,terminal_output +3086,7384629,"TERMINAL",0,0,"148",,terminal_output +3087,7385657,"TERMINAL",0,0,"259",,terminal_output +3088,7386680,"TERMINAL",0,0,"3620",,terminal_output +3089,7387806,"TERMINAL",0,0,"471",,terminal_output +3090,7388740,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --data_dir $array_records_dir\r\n\r\n",,terminal_output +3091,7388822,"TERMINAL",0,0,"582",,terminal_output +3092,7388986,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1431282\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1753105533\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753109133\r\nSLURM_PMI2_SRUN_PORT=40535\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3365334\r\nSLURM_PTY_PORT=39479\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=197\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=38425\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3365334\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=38425\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +3093,7389041,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3094,7389863,"TERMINAL",0,0,"693",,terminal_output +3095,7390478,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3096,7390884,"TERMINAL",0,0,"7504",,terminal_output +3097,7392005,"TERMINAL",0,0,"815",,terminal_output +3098,7392222,"TERMINAL",0,0,"2025-07-21 16:28:58.880954: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3099,7393043,"TERMINAL",0,0,"926",,terminal_output +3100,7394048,"TERMINAL",0,0,"9:0037",,terminal_output +3101,7395175,"TERMINAL",0,0,"148",,terminal_output +3102,7396200,"TERMINAL",0,0,"259",,terminal_output +3103,7397223,"TERMINAL",0,0,"3630",,terminal_output +3104,7398217,"TERMINAL",0,0,"471",,terminal_output +3105,7399271,"TERMINAL",0,0,"582",,terminal_output +3106,7400309,"TERMINAL",0,0,"693",,terminal_output +3107,7401423,"TERMINAL",0,0,"73:015",,terminal_output +3108,7401989,"TERMINAL",0,0,"2025-07-21 16:29:08.656674: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3109,7402447,"TERMINAL",0,0,"926",,terminal_output +3110,7403469,"TERMINAL",0,0,"1037",,terminal_output +3111,7404596,"TERMINAL",0,0,"148",,terminal_output +3112,7405553,"TERMINAL",0,0,"259",,terminal_output +3113,7406650,"TERMINAL",0,0,"3640",,terminal_output +3114,7407667,"TERMINAL",0,0,"471",,terminal_output +3115,7408700,"TERMINAL",0,0,"582",,terminal_output +3116,7409103,"TERMINAL",0,0,"2025-07-21 16:29:15.686946: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3117,7409752,"TERMINAL",0,0,"693",,terminal_output +3118,7410801,"TERMINAL",0,0,"7104",,terminal_output +3119,7411864,"TERMINAL",0,0,"815",,terminal_output +3120,7412890,"TERMINAL",0,0,"926",,terminal_output +3121,7413939,"TERMINAL",0,0,"2037",,terminal_output +3122,7414998,"TERMINAL",0,0,"148",,terminal_output +3123,7416041,"TERMINAL",0,0,"259",,terminal_output +3124,7417090,"TERMINAL",0,0,"3650",,terminal_output +3125,7418137,"TERMINAL",0,0,"471",,terminal_output +3126,7418913,"TERMINAL",0,0,"2025-07-21 16:29:25.582397: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3127,7419183,"TERMINAL",0,0,"582",,terminal_output +3128,7420223,"TERMINAL",0,0,"693",,terminal_output +3129,7421293,"TERMINAL",0,0,"7204",,terminal_output +3130,7422380,"TERMINAL",0,0,"815",,terminal_output +3131,7422434,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 900\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000900/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 800\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/000800/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/interactive/shift-spatial/001000/metrics/metrics not found.\r\n",,terminal_output +3132,7423656,"TERMINAL",0,0,"937",,terminal_output +3133,7424410,"TERMINAL",0,0,"3148",,terminal_output +3134,7425453,"TERMINAL",0,0,"259",,terminal_output +3135,7426490,"TERMINAL",0,0,"364:00",,terminal_output +3136,7427537,"TERMINAL",0,0,"471",,terminal_output +3137,7428577,"TERMINAL",0,0,"582",,terminal_output +3138,7429601,"TERMINAL",0,0,"2025-07-21 16:29:36.269846: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3139,7429670,"TERMINAL",0,0,"693",,terminal_output +3140,7430665,"TERMINAL",0,0,"7304",,terminal_output +3141,7431708,"TERMINAL",0,0,"815",,terminal_output +3142,7432760,"TERMINAL",0,0,"926",,terminal_output +3143,7433809,"TERMINAL",0,0,"4037",,terminal_output +3144,7434957,"TERMINAL",0,0,"148",,terminal_output +3145,7435900,"TERMINAL",0,0,"259",,terminal_output +3146,7436156,"TERMINAL",0,0,"2025-07-21 16:29:42.825412: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3147,7436955,"TERMINAL",0,0,"3610",,terminal_output +3148,7438030,"TERMINAL",0,0,"471",,terminal_output +3149,7439048,"TERMINAL",0,0,"582",,terminal_output +3150,7440177,"TERMINAL",0,0,"693",,terminal_output +3151,7441204,"TERMINAL",0,0,"7404",,terminal_output +3152,7442190,"TERMINAL",0,0,"815",,terminal_output +3153,7443249,"TERMINAL",0,0,"926",,terminal_output +3154,7444289,"TERMINAL",0,0,"5037",,terminal_output +3155,7445329,"TERMINAL",0,0,"148",,terminal_output +3156,7446376,"TERMINAL",0,0,"2620",,terminal_output +3157,7447418,"TERMINAL",0,0,"471",,terminal_output +3158,7448478,"TERMINAL",0,0,"582",,terminal_output +3159,7449549,"TERMINAL",0,0,"693",,terminal_output +3160,7450626,"TERMINAL",0,0,"7504",,terminal_output +3161,7451758,"TERMINAL",0,0,"815",,terminal_output +3162,7452680,"TERMINAL",0,0,"926",,terminal_output +3163,7453738,"TERMINAL",0,0,"30:0037",,terminal_output +3164,7454754,"TERMINAL",0,0,"148",,terminal_output +3165,7455800,"TERMINAL",0,0,"259",,terminal_output +3166,7456851,"TERMINAL",0,0,"3630",,terminal_output +3167,7457996,"TERMINAL",0,0,"471",,terminal_output +3168,7458948,"TERMINAL",0,0,"582",,terminal_output +3169,7459994,"TERMINAL",0,0,"693",,terminal_output +3170,7461072,"TERMINAL",0,0,"74:004",,terminal_output +3171,7462093,"TERMINAL",0,0,"815",,terminal_output +3172,7463220,"TERMINAL",0,0,"926",,terminal_output +3173,7464244,"TERMINAL",0,0,"1037",,terminal_output +3174,7465268,"TERMINAL",0,0,"148",,terminal_output +3175,7466252,"TERMINAL",0,0,"259",,terminal_output +3176,7467299,"TERMINAL",0,0,"3640",,terminal_output +3177,7468388,"TERMINAL",0,0,"482",,terminal_output +3178,7469392,"TERMINAL",0,0,"693",,terminal_output +3179,7470491,"TERMINAL",0,0,"7104",,terminal_output +3180,7471466,"TERMINAL",0,0,"815",,terminal_output +3181,7472549,"TERMINAL",0,0,"926",,terminal_output +3182,7473567,"TERMINAL",0,0,"2037",,terminal_output +3183,7474689,"TERMINAL",0,0,"148",,terminal_output +3184,7475717,"TERMINAL",0,0,"259",,terminal_output +3185,7476686,"TERMINAL",0,0,"3650",,terminal_output +3186,7477792,"TERMINAL",0,0,"471",,terminal_output +3187,7478864,"TERMINAL",0,0,"582",,terminal_output +3188,7479912,"TERMINAL",0,0,"693",,terminal_output +3189,7481038,"TERMINAL",0,0,"7204",,terminal_output +3190,7481972,"TERMINAL",0,0,"815",,terminal_output +3191,7483087,"TERMINAL",0,0,"926",,terminal_output +3192,7484021,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nSampling token 50 from frame 1\r\nSampling token 51 from frame 1\r\nSampling token 52 from frame 1\r\nSampling token 53 from frame 1\r\nSampling token 54 from frame 1\r\nSampling token 55 from frame 1\r\nSampling token 56 from frame 1\r\nSampling token 57 from frame 1\r\nSampling token 58 from frame 1\r\nSampling token 59 from frame 1\r\nSampling token 60 from frame 1\r\nSampling token 61 from frame 1\r\nSampling token 62 from frame 1\r\nSampling token 63 from frame 1\r\nSampling token 64 from frame 1\r\nSampling token 65 from frame 1\r\nSampling token 66 from frame 1\r\nSampling token 67 from frame 1\r\nSampling token 68 from frame 1\r\nSampling token 69 from frame 1\r\nSampling token 70 from frame 1\r\nSampling token 71 from frame 1\r\nSampling token 72 from frame 1\r\nSampling token 73 from frame 1\r\nSampling token 74 from frame 1\r\nSampling token 75 from frame 1\r\nSampling token 76 from frame 1\r\nSampling token 77 from frame 1\r\nSampling token 78 from frame 1\r\nSampling token 79 from frame 1\r\nSampling token 80 from frame 1\r\nSampling token 81 from frame 1\r\nSampling token 82 from frame 1\r\nSampling token 83 from frame 1\r\nSampling token 84 from frame 1\r\nSampling token 85 from frame 1\r\nSampling token 86 from frame 1\r\nSampling token 87 from frame 1\r\nSampling token 88 from frame 1\r\nSampling token 89 from frame 1\r\nSampling token 90 from frame 1\r\nSampling token 91 from frame 1\r\nSampling token 92 from frame 1\r\nSampling token 93 from frame 1\r\nSampling token 94 from frame 1\r\nSampling token 95 from frame 1\r\nSampling token 96 from frame 1\r\nSampling token 97 from frame 1\r\nSampling token 98 from frame 1\r\nSampling token 99 from frame 1\r\nSampling token 100 from frame 1\r\nSampling token 101 from frame 1\r\nSampling token 102 from frame 1\r\nSampling token 103 from frame 1\r\nSampling token 104 from frame 1\r\nSampling token 105 from frame 1\r\nSampling token 106 from frame 1\r\nSampling token 107 from frame 1\r\nSampling token 108 from frame 1\r\nSampling token 109 from frame 1\r\nSampling token 110 from frame 1\r\nSampling token 111 from frame 1\r\nSampling token 112 from frame 1\r\nSampling token 113 from frame 1\r\nSampling token 114 from frame 1\r\nSampling token 115 from frame 1\r\nSampling token 116 from frame 1\r\nSampling token 117 from frame 1\r\nSampling token 118 from frame 1\r\nSampling token 119 from frame 1\r\nSampling token 120 from frame 1\r\nSampling token 121 from frame 1\r\nSampling token 122 from frame 1\r\nSampling token 123 from frame 1\r\nSampling token 124 from frame 1\r\nSampling token 125 from frame 1\r\nSampling token 126 from frame 1\r\nSampling token 127 from frame 1\r\nSampling token 128 from frame 1\r\nSampling token 129 from frame 1\r\nSampling token 130 from frame 1\r\nSampling token 131 from frame 1\r\nSampling token 132 from frame 1\r\nSampling token 133 from frame 1\r\nSampling token 134 from frame 1\r\nSampling token 135 from frame 1\r\nSampling token 136 from frame 1\r\nSampling token 137 from frame 1\r\nSampling token 138 from frame 1\r\nSampling token 139 from frame 1\r\nSampling token 140 from frame 1\r\nSampling token 141 from frame 1\r\nSampling token 142 from frame 1\r\nSampling token 143 from frame 1\r\nSampling token 144 from frame 1\r\nSampling token 145 from frame 1\r\nSampling token 146 from frame 1\r\nSampling token 147 from frame 1\r\nSampling token 148 from frame 1\r\nSampling token 149 from frame 1\r\nSampling token 150 from frame 1\r\nSampling token 151 from frame 1\r\nSampling token 152 from frame 1\r\nSampling token 153 from frame 1\r\nSampling token 154 from frame 1\r\nSampling token 155 from frame 1\r\nSampling token 156 from frame 1\r\nSampling token 157 from frame 1\r\nSampling token 158 from frame 1\r\nSampling token 159 from frame 1\r\nSampling token 160 from frame 1\r\nSampling token 161 from frame 1\r\nSampling token 162 from frame 1\r\nSampling token 163 from frame 1\r\nSampling token 164 from frame 1\r\nSampling token 165 from frame 1\r\nSampling token 166 from frame 1\r\nSampling token 167 from frame 1\r\nSampling token 168 from frame 1\r\nSampling token 169 from frame 1\r\nSampling token 170 from frame 1\r\nSampling token 171 from frame 1\r\nSampling token 172 from frame 1\r\nSampling token 173 from frame 1\r\nSampling token 174 from frame 1\r\nSampling token 175 from frame 1\r\nSampling token 176 from frame 1\r\nSampling token 177 from frame 1\r\nSampling token 178 from frame 1\r\nSampling token 179 from frame 1\r\nSampling token 180 from frame 1\r\nSampling token 181 from frame 1\r\nSampling token 182 from frame 1\r\nSampling token 183 from frame 1\r\nSampling token 184 from frame 1\r\nSampling token 185 from frame 1\r\nSampling token 186 from frame 1\r\nSampling token 187 from frame 1\r\nSampling token 188 from frame 1\r\nSampling token 189 from frame 1\r\nSampling token 190 from frame 1\r\nSampling token 191 from frame 1\r\nSampling token 192 from frame 1\r\nSampling token 193 from frame 1\r\nSampling token 194 from frame 1\r\nSampling token 195 from frame 1\r\nSampling token 196 from frame 1\r\nSampling token 197 from frame 1\r\nSampling token 198 from frame 1\r\nSampling token 199 from frame 1\r\nSampling token 200 from frame 1\r\nSampling token 201 from frame 1\r\nSampling token 202 from frame 1\r\nSampling token 203 from frame 1\r\nSampling token 204 from frame 1\r\nSampling token 205 from frame 1\r\nSampling token 206 from frame 1\r\nSampling token 207 from frame 1\r\nSampling token 208 from frame 1\r\nSampling token 209 from frame 1\r\nSampling token 210 from frame 1\r\nSampling token 211 from frame 1\r\nSampling token 212 from frame 1\r\nSampling token 213 from frame 1\r\nSampling token 214 from frame 1\r\nSampling token 215 from frame 1\r\nSampling token 216 from frame 1\r\nSampling token 217 from frame 1\r\nSampling token 218 from frame 1\r\nSampling token 219 from frame 1\r\nSampling token 220 from frame 1\r\nSampling token 221 from frame 1\r\nSampling token 222 from frame 1\r\nSampling token 223 from frame 1\r\nSampling token 224 from frame 1\r\nSampling token 225 from frame 1\r\nSampling token 226 from frame 1\r\nSampling token 227 from frame 1\r\nSampling token 228 from frame 1\r\nSampling token 229 from frame 1\r\nSampling token 230 from frame 1\r\nSampling token 231 from frame 1\r\nSampling token 232 from frame 1\r\nSampling token 233 from frame 1\r\nSampling token 234 from frame 1\r\nSampling token 235 from frame 1\r\nSampling token 236 from frame 1\r\nSampling token 237 from frame 1\r\nSampling token 238 from frame 1\r\nSampling token 239 from frame 1\r\nSampling token 240 from frame 1\r\nSampling token 241 from frame 1\r\nSampling token 242 from frame 1\r\nSampling token 243 from frame 1\r\nSampling token 244 from frame 1\r\nSampling token 245 from frame 1\r\nSampling token 246 from frame 1\r\nSampling token 247 from frame 1\r\nSampling token 248 from frame 1\r\nSampling token 249 from frame 1\r\nSampling token 250 from frame 1\r\nSampling token 251 from frame 1\r\nSampling token 252 from frame 1\r\nSampling token 253 from frame 1\r\nSampling token 254 from frame 1\r\nSampling token 255 from frame 1\r\nSampling token 256 from frame 1\r\nSampling token 257 from frame 1\r\n",,terminal_output +3193,7484083,"TERMINAL",0,0,"3037",,terminal_output +3194,7485134,"TERMINAL",0,0,"148",,terminal_output +3195,7486272,"TERMINAL",0,0,"259",,terminal_output +3196,7487243,"TERMINAL",0,0,"365:00",,terminal_output +3197,7488309,"TERMINAL",0,0,"471",,terminal_output +3198,7489333,"TERMINAL",0,0,"582",,terminal_output +3199,7490360,"TERMINAL",0,0,"693",,terminal_output +3200,7491380,"TERMINAL",0,0,"7315",,terminal_output +3201,7492402,"TERMINAL",0,0,"926",,terminal_output +3202,7493481,"TERMINAL",0,0,"4037",,terminal_output +3203,7494555,"TERMINAL",0,0,"148",,terminal_output +3204,7495512,"TERMINAL",0,0,"259",,terminal_output +3205,7496579,"TERMINAL",0,0,"3610",,terminal_output +3206,7497628,"TERMINAL",0,0,"471",,terminal_output +3207,7498654,"TERMINAL",0,0,"582",,terminal_output +3208,7499778,"TERMINAL",0,0,"693",,terminal_output +3209,7500802,"TERMINAL",0,0,"7404",,terminal_output +3210,7501783,"TERMINAL",0,0,"815",,terminal_output +3211,7502829,"TERMINAL",0,0,"926",,terminal_output +3212,7503869,"TERMINAL",0,0,"5037",,terminal_output +3213,7504998,"TERMINAL",0,0,"148",,terminal_output +3214,7506021,"TERMINAL",0,0,"259",,terminal_output +3215,7507049,"TERMINAL",0,0,"3620",,terminal_output +3216,7508027,"TERMINAL",0,0,"471",,terminal_output +3217,7509093,"TERMINAL",0,0,"582",,terminal_output +3218,7510119,"TERMINAL",0,0,"693",,terminal_output +3219,7511247,"TERMINAL",0,0,"7504",,terminal_output +3220,7512194,"TERMINAL",0,0,"815",,terminal_output +3221,7513295,"TERMINAL",0,0,"926",,terminal_output +3222,7514319,"TERMINAL",0,0,"1:0037",,terminal_output +3223,7515327,"TERMINAL",0,0,"148",,terminal_output +3224,7516368,"TERMINAL",0,0,"2630",,terminal_output +3225,7517493,"TERMINAL",0,0,"471",,terminal_output +3226,7518503,"TERMINAL",0,0,"582",,terminal_output +3227,7519614,"TERMINAL",0,0,"693",,terminal_output +3228,7520564,"TERMINAL",0,0,"75:004",,terminal_output +3229,7521589,"TERMINAL",0,0,"815",,terminal_output +3230,7522612,"TERMINAL",0,0,"926",,terminal_output +3231,7523739,"TERMINAL",0,0,"1037",,terminal_output +3232,7524772,"TERMINAL",0,0,"Sampling token 258 from frame 1\r\nSampling token 259 from frame 1\r\nSampling token 260 from frame 1\r\nSampling token 261 from frame 1\r\nSampling token 262 from frame 1\r\nSampling token 263 from frame 1\r\nSampling token 264 from frame 1\r\nSampling token 265 from frame 1\r\nSampling token 266 from frame 1\r\nSampling token 267 from frame 1\r\nSampling token 268 from frame 1\r\nSampling token 269 from frame 1\r\nSampling token 270 from frame 1\r\nSampling token 271 from frame 1\r\nSampling token 272 from frame 1\r\nSampling token 273 from frame 1\r\nSampling token 274 from frame 1\r\nSampling token 275 from frame 1\r\nSampling token 276 from frame 1\r\nSampling token 277 from frame 1\r\nSampling token 278 from frame 1\r\nSampling token 279 from frame 1\r\nSampling token 280 from frame 1\r\nSampling token 281 from frame 1\r\nSampling token 282 from frame 1\r\nSampling token 283 from frame 1\r\nSampling token 284 from frame 1\r\nSampling token 285 from frame 1\r\nSampling token 286 from frame 1\r\nSampling token 287 from frame 1\r\nSampling token 288 from frame 1\r\nSampling token 289 from frame 1\r\nSampling token 290 from frame 1\r\nSampling token 291 from frame 1\r\nSampling token 292 from frame 1\r\nSampling token 293 from frame 1\r\nSampling token 294 from frame 1\r\nSampling token 295 from frame 1\r\nSampling token 296 from frame 1\r\nSampling token 297 from frame 1\r\nSampling token 298 from frame 1\r\nSampling token 299 from frame 1\r\nSampling token 300 from frame 1\r\nSampling token 301 from frame 1\r\nSampling token 302 from frame 1\r\nSampling token 303 from frame 1\r\nSampling token 304 from frame 1\r\nSampling token 305 from frame 1\r\nSampling token 306 from frame 1\r\nSampling token 307 from frame 1\r\nSampling token 308 from frame 1\r\nSampling token 309 from frame 1\r\nSampling token 310 from frame 1\r\nSampling token 311 from frame 1\r\nSampling token 312 from frame 1\r\nSampling token 313 from frame 1\r\nSampling token 314 from frame 1\r\nSampling token 315 from frame 1\r\nSampling token 316 from frame 1\r\nSampling token 317 from frame 1\r\nSampling token 318 from frame 1\r\nSampling token 319 from frame 1\r\nSampling token 320 from frame 1\r\nSampling token 321 from frame 1\r\nSampling token 322 from frame 1\r\nSampling token 323 from frame 1\r\nSampling token 324 from frame 1\r\nSampling token 325 from frame 1\r\nSampling token 326 from frame 1\r\nSampling token 327 from frame 1\r\nSampling token 328 from frame 1\r\nSampling token 329 from frame 1\r\nSampling token 330 from frame 1\r\nSampling token 331 from frame 1\r\nSampling token 332 from frame 1\r\nSampling token 333 from frame 1\r\nSampling token 334 from frame 1\r\nSampling token 335 from frame 1\r\nSampling token 336 from frame 1\r\nSampling token 337 from frame 1\r\nSampling token 338 from frame 1\r\nSampling token 339 from frame 1\r\nSampling token 340 from frame 1\r\nSampling token 341 from frame 1\r\nSampling token 342 from frame 1\r\nSampling token 343 from frame 1\r\nSampling token 344 from frame 1\r\nSampling token 345 from frame 1\r\nSampling token 346 from frame 1\r\nSampling token 347 from frame 1\r\nSampling token 348 from frame 1\r\nSampling token 349 from frame 1\r\nSampling token 350 from frame 1\r\nSampling token 351 from frame 1\r\nSampling token 352 from frame 1\r\nSampling token 353 from frame 1\r\nSampling token 354 from frame 1\r\nSampling token 355 from frame 1\r\nSampling token 356 from frame 1\r\nSampling token 357 from frame 1\r\nSampling token 358 from frame 1\r\nSampling token 359 from frame 1\r\nSampling token 360 from frame 1\r\nSampling token 361 from frame 1\r\nSampling token 362 from frame 1\r\nSampling token 363 from frame 1\r\nSampling token 364 from frame 1\r\nSampling token 365 from frame 1\r\nSampling token 366 from frame 1\r\nSampling token 367 from frame 1\r\nSampling token 368 from frame 1\r\nSampling token 369 from frame 1\r\nSampling token 370 from frame 1\r\nSampling token 371 from frame 1\r\nSampling token 372 from frame 1\r\nSampling token 373 from frame 1\r\nSampling token 374 from frame 1\r\nSampling token 375 from frame 1\r\nSampling token 376 from frame 1\r\nSampling token 377 from frame 1\r\nSampling token 378 from frame 1\r\nSampling token 379 from frame 1\r\nSampling token 380 from frame 1\r\nSampling token 381 from frame 1\r\nSampling token 382 from frame 1\r\nSampling token 383 from frame 1\r\nSampling token 384 from frame 1\r\nSampling token 385 from frame 1\r\nSampling token 386 from frame 1\r\nSampling token 387 from frame 1\r\nSampling token 388 from frame 1\r\nSampling token 389 from frame 1\r\nSampling token 390 from frame 1\r\nSampling token 391 from frame 1\r\nSampling token 392 from frame 1\r\nSampling token 393 from frame 1\r\nSampling token 394 from frame 1\r\nSampling token 395 from frame 1\r\nSampling token 396 from frame 1\r\nSampling token 397 from frame 1\r\nSampling token 398 from frame 1\r\nSampling token 399 from frame 1\r\nSampling token 400 from frame 1\r\nSampling token 401 from frame 1\r\nSampling token 402 from frame 1\r\nSampling token 403 from frame 1\r\nSampling token 404 from frame 1\r\nSampling token 405 from frame 1\r\nSampling token 406 from frame 1\r\nSampling token 407 from frame 1\r\nSampling token 408 from frame 1\r\nSampling token 409 from frame 1\r\nSampling token 410 from frame 1\r\nSampling token 411 from frame 1\r\nSampling token 412 from frame 1\r\nSampling token 413 from frame 1\r\nSampling token 414 from frame 1\r\nSampling token 415 from frame 1\r\nSampling token 416 from frame 1\r\nSampling token 417 from frame 1\r\nSampling token 418 from frame 1\r\nSampling token 419 from frame 1\r\nSampling token 420 from frame 1\r\nSampling token 421 from frame 1\r\nSampling token 422 from frame 1\r\nSampling token 423 from frame 1\r\nSampling token 424 from frame 1\r\nSampling token 425 from frame 1\r\nSampling token 426 from frame 1\r\nSampling token 427 from frame 1\r\nSampling token 428 from frame 1\r\nSampling token 429 from frame 1\r\nSampling token 430 from frame 1\r\nSampling token 431 from frame 1\r\nSampling token 432 from frame 1\r\nSampling token 433 from frame 1\r\nSampling token 434 from frame 1\r\nSampling token 435 from frame 1\r\nSampling token 436 from frame 1\r\nSampling token 437 from frame 1\r\nSampling token 438 from frame 1\r\nSampling token 439 from frame 1\r\nSampling token 440 from frame 1\r\nSampling token 441 from frame 1\r\nSampling token 442 from frame 1\r\nSampling token 443 from frame 1\r\nSampling token 444 from frame 1\r\nSampling token 445 from frame 1\r\nSampling token 446 from frame 1\r\nSampling token 447 from frame 1\r\nSampling token 448 from frame 1\r\nSampling token 449 from frame 1\r\nSampling token 450 from frame 1\r\nSampling token 451 from frame 1\r\nSampling token 452 from frame 1\r\nSampling token 453 from frame 1\r\nSampling token 454 from frame 1\r\nSampling token 455 from frame 1\r\nSampling token 456 from frame 1\r\nSampling token 457 from frame 1\r\nSampling token 458 from frame 1\r\nSampling token 459 from frame 1\r\nSampling token 460 from frame 1\r\nSampling token 461 from frame 1\r\nSampling token 462 from frame 1\r\nSampling token 463 from frame 1\r\nSampling token 464 from frame 1\r\nSampling token 465 from frame 1\r\nSampling token 466 from frame 1\r\nSampling token 467 from frame 1\r\nSampling token 468 from frame 1\r\nSampling token 469 from frame 1\r\nSampling token 470 from frame 1\r\nSampling token 471 from frame 1\r\nSampling token 472 from frame 1\r\nSampling token 473 from frame 1\r\nSampling token 474 from frame 1\r\nSampling token 475 from frame 1\r\nSampling token 476 from frame 1\r\nSampling token 477 from frame 1\r\nSampling token 478 from frame 1\r\nSampling token 479 from frame 1\r\nSampling token 480 from frame 1\r\nSampling token 481 from frame 1\r\nSampling token 482 from frame 1\r\nSampling token 483 from frame 1\r\nSampling token 484 from frame 1\r\nSampling token 485 from frame 1\r\nSampling token 486 from frame 1\r\nSampling token 487 from frame 1\r\nSampling token 488 from frame 1\r\nSampling token 489 from frame 1\r\nSampling token 490 from frame 1\r\nSampling token 491 from frame 1\r\nSampling token 492 from frame 1\r\nSampling token 493 from frame 1\r\nSampling token 494 from frame 1\r\nSampling token 495 from frame 1\r\nSampling token 496 from frame 1\r\nSampling token 497 from frame 1\r\nSampling token 498 from frame 1\r\nSampling token 499 from frame 1\r\nSampling token 500 from frame 1\r\nSampling token 501 from frame 1\r\nSampling token 502 from frame 1\r\nSampling token 503 from frame 1\r\nSampling token 504 from frame 1\r\nSampling token 505 from frame 1\r\nSampling token 506 from frame 1\r\nSampling token 507 from frame 1\r\nSampling token 508 from frame 1\r\nSampling token 509 from frame 1\r\nSampling token 510 from frame 1\r\nSampling token 511 from frame 1\r\nSampling token 512 from frame 1\r\nSampling token 513 from frame 1\r\n",,terminal_output +3233,7524772,"TERMINAL",0,0,"148",,terminal_output +3234,7525789,"TERMINAL",0,0,"259",,terminal_output +3235,7526775,"TERMINAL",0,0,"3640",,terminal_output +3236,7527814,"TERMINAL",0,0,"471",,terminal_output +3237,7528860,"TERMINAL",0,0,"582",,terminal_output +3238,7529898,"TERMINAL",0,0,"693",,terminal_output +3239,7530939,"TERMINAL",0,0,"7104",,terminal_output +3240,7532035,"TERMINAL",0,0,"815",,terminal_output +3241,7533059,"TERMINAL",0,0,"926",,terminal_output +3242,7534083,"TERMINAL",0,0,"2037",,terminal_output +3243,7535125,"TERMINAL",0,0,"148",,terminal_output +3244,7536138,"TERMINAL",0,0,"259",,terminal_output +3245,7537197,"TERMINAL",0,0,"3650",,terminal_output +3246,7538280,"TERMINAL",0,0,"471",,terminal_output +3247,7539304,"TERMINAL",0,0,"582",,terminal_output +3248,7540329,"TERMINAL",0,0,"693",,terminal_output +3249,7541353,"TERMINAL",0,0,"7215",,terminal_output +3250,7542378,"TERMINAL",0,0,"926",,terminal_output +3251,7543467,"TERMINAL",0,0,"3037",,terminal_output +3252,7544526,"TERMINAL",0,0,"148",,terminal_output +3253,7545550,"TERMINAL",0,0,"259",,terminal_output +3254,7546549,"TERMINAL",0,0,"366:00",,terminal_output +3255,7546961,"TERMINAL",0,0,"watch",,terminal_focus +3256,7547587,"TERMINAL",0,0,"471",,terminal_output +3257,7548724,"TERMINAL",0,0,"582",,terminal_output +3258,7549673,"TERMINAL",0,0,"693",,terminal_output +3259,7550783,"TERMINAL",0,0,"7304",,terminal_output +3260,7551800,"TERMINAL",0,0,"815",,terminal_output +3261,7553037,"TERMINAL",0,0,"926",,terminal_output +3262,7554158,"TERMINAL",0,0,"4037",,terminal_output +3263,7554864,"TERMINAL",0,0,"148",,terminal_output +3264,7555902,"TERMINAL",0,0,"259",,terminal_output +3265,7556938,"TERMINAL",0,0,"3610",,terminal_output +3266,7557985,"TERMINAL",0,0,"471",,terminal_output +3267,7559012,"TERMINAL",0,0,"582",,terminal_output +3268,7560049,"TERMINAL",0,0,"693",,terminal_output +3269,7561113,"TERMINAL",0,0,"7404",,terminal_output +3270,7562183,"TERMINAL",0,0,"815",,terminal_output +3271,7563201,"TERMINAL",0,0,"926",,terminal_output +3272,7564250,"TERMINAL",0,0,"5037",,terminal_output +3273,7565371,"TERMINAL",0,0,"148",,terminal_output +3274,7565425,"TERMINAL",0,0,"Sampling token 514 from frame 1\r\nSampling token 515 from frame 1\r\nSampling token 516 from frame 1\r\nSampling token 517 from frame 1\r\nSampling token 518 from frame 1\r\nSampling token 519 from frame 1\r\nSampling token 520 from frame 1\r\nSampling token 521 from frame 1\r\nSampling token 522 from frame 1\r\nSampling token 523 from frame 1\r\nSampling token 524 from frame 1\r\nSampling token 525 from frame 1\r\nSampling token 526 from frame 1\r\nSampling token 527 from frame 1\r\nSampling token 528 from frame 1\r\nSampling token 529 from frame 1\r\nSampling token 530 from frame 1\r\nSampling token 531 from frame 1\r\nSampling token 532 from frame 1\r\nSampling token 533 from frame 1\r\nSampling token 534 from frame 1\r\nSampling token 535 from frame 1\r\nSampling token 536 from frame 1\r\nSampling token 537 from frame 1\r\nSampling token 538 from frame 1\r\nSampling token 539 from frame 1\r\nSampling token 540 from frame 1\r\nSampling token 541 from frame 1\r\nSampling token 542 from frame 1\r\nSampling token 543 from frame 1\r\nSampling token 544 from frame 1\r\nSampling token 545 from frame 1\r\nSampling token 546 from frame 1\r\nSampling token 547 from frame 1\r\nSampling token 548 from frame 1\r\nSampling token 549 from frame 1\r\nSampling token 550 from frame 1\r\nSampling token 551 from frame 1\r\nSampling token 552 from frame 1\r\nSampling token 553 from frame 1\r\nSampling token 554 from frame 1\r\nSampling token 555 from frame 1\r\nSampling token 556 from frame 1\r\nSampling token 557 from frame 1\r\nSampling token 558 from frame 1\r\nSampling token 559 from frame 1\r\nSampling token 560 from frame 1\r\nSampling token 561 from frame 1\r\nSampling token 562 from frame 1\r\nSampling token 563 from frame 1\r\nSampling token 564 from frame 1\r\nSampling token 565 from frame 1\r\nSampling token 566 from frame 1\r\nSampling token 567 from frame 1\r\nSampling token 568 from frame 1\r\nSampling token 569 from frame 1\r\nSampling token 570 from frame 1\r\nSampling token 571 from frame 1\r\nSampling token 572 from frame 1\r\nSampling token 573 from frame 1\r\nSampling token 574 from frame 1\r\nSampling token 575 from frame 1\r\nSampling token 576 from frame 1\r\nSampling token 577 from frame 1\r\nSampling token 578 from frame 1\r\nSampling token 579 from frame 1\r\nSampling token 580 from frame 1\r\nSampling token 581 from frame 1\r\nSampling token 582 from frame 1\r\nSampling token 583 from frame 1\r\nSampling token 584 from frame 1\r\nSampling token 585 from frame 1\r\nSampling token 586 from frame 1\r\nSampling token 587 from frame 1\r\nSampling token 588 from frame 1\r\nSampling token 589 from frame 1\r\nSampling token 590 from frame 1\r\nSampling token 591 from frame 1\r\nSampling token 592 from frame 1\r\nSampling token 593 from frame 1\r\nSampling token 594 from frame 1\r\nSampling token 595 from frame 1\r\nSampling token 596 from frame 1\r\nSampling token 597 from frame 1\r\nSampling token 598 from frame 1\r\nSampling token 599 from frame 1\r\nSampling token 600 from frame 1\r\nSampling token 601 from frame 1\r\nSampling token 602 from frame 1\r\nSampling token 603 from frame 1\r\nSampling token 604 from frame 1\r\nSampling token 605 from frame 1\r\nSampling token 606 from frame 1\r\nSampling token 607 from frame 1\r\nSampling token 608 from frame 1\r\nSampling token 609 from frame 1\r\nSampling token 610 from frame 1\r\nSampling token 611 from frame 1\r\nSampling token 612 from frame 1\r\nSampling token 613 from frame 1\r\nSampling token 614 from frame 1\r\nSampling token 615 from frame 1\r\nSampling token 616 from frame 1\r\nSampling token 617 from frame 1\r\nSampling token 618 from frame 1\r\nSampling token 619 from frame 1\r\nSampling token 620 from frame 1\r\nSampling token 621 from frame 1\r\nSampling token 622 from frame 1\r\nSampling token 623 from frame 1\r\nSampling token 624 from frame 1\r\nSampling token 625 from frame 1\r\nSampling token 626 from frame 1\r\nSampling token 627 from frame 1\r\nSampling token 628 from frame 1\r\nSampling token 629 from frame 1\r\nSampling token 630 from frame 1\r\nSampling token 631 from frame 1\r\nSampling token 632 from frame 1\r\nSampling token 633 from frame 1\r\nSampling token 634 from frame 1\r\nSampling token 635 from frame 1\r\nSampling token 636 from frame 1\r\nSampling token 637 from frame 1\r\nSampling token 638 from frame 1\r\nSampling token 639 from frame 1\r\nSampling token 640 from frame 1\r\nSampling token 641 from frame 1\r\nSampling token 642 from frame 1\r\nSampling token 643 from frame 1\r\nSampling token 644 from frame 1\r\nSampling token 645 from frame 1\r\nSampling token 646 from frame 1\r\nSampling token 647 from frame 1\r\nSampling token 648 from frame 1\r\nSampling token 649 from frame 1\r\nSampling token 650 from frame 1\r\nSampling token 651 from frame 1\r\nSampling token 652 from frame 1\r\nSampling token 653 from frame 1\r\nSampling token 654 from frame 1\r\nSampling token 655 from frame 1\r\nSampling token 656 from frame 1\r\nSampling token 657 from frame 1\r\nSampling token 658 from frame 1\r\nSampling token 659 from frame 1\r\nSampling token 660 from frame 1\r\nSampling token 661 from frame 1\r\nSampling token 662 from frame 1\r\nSampling token 663 from frame 1\r\nSampling token 664 from frame 1\r\nSampling token 665 from frame 1\r\nSampling token 666 from frame 1\r\nSampling token 667 from frame 1\r\nSampling token 668 from frame 1\r\nSampling token 669 from frame 1\r\nSampling token 670 from frame 1\r\nSampling token 671 from frame 1\r\nSampling token 672 from frame 1\r\nSampling token 673 from frame 1\r\nSampling token 674 from frame 1\r\nSampling token 675 from frame 1\r\nSampling token 676 from frame 1\r\nSampling token 677 from frame 1\r\nSampling token 678 from frame 1\r\nSampling token 679 from frame 1\r\nSampling token 680 from frame 1\r\nSampling token 681 from frame 1\r\nSampling token 682 from frame 1\r\nSampling token 683 from frame 1\r\nSampling token 684 from frame 1\r\nSampling token 685 from frame 1\r\nSampling token 686 from frame 1\r\nSampling token 687 from frame 1\r\nSampling token 688 from frame 1\r\nSampling token 689 from frame 1\r\nSampling token 690 from frame 1\r\nSampling token 691 from frame 1\r\nSampling token 692 from frame 1\r\nSampling token 693 from frame 1\r\nSampling token 694 from frame 1\r\nSampling token 695 from frame 1\r\nSampling token 696 from frame 1\r\nSampling token 697 from frame 1\r\nSampling token 698 from frame 1\r\nSampling token 699 from frame 1\r\nSampling token 700 from frame 1\r\nSampling token 701 from frame 1\r\nSampling token 702 from frame 1\r\nSampling token 703 from frame 1\r\nSampling token 704 from frame 1\r\nSampling token 705 from frame 1\r\nSampling token 706 from frame 1\r\nSampling token 707 from frame 1\r\nSampling token 708 from frame 1\r\nSampling token 709 from frame 1\r\nSampling token 710 from frame 1\r\nSampling token 711 from frame 1\r\nSampling token 712 from frame 1\r\nSampling token 713 from frame 1\r\nSampling token 714 from frame 1\r\nSampling token 715 from frame 1\r\nSampling token 716 from frame 1\r\nSampling token 717 from frame 1\r\nSampling token 718 from frame 1\r\nSampling token 719 from frame 1\r\nSampling token 720 from frame 1\r\nSampling token 721 from frame 1\r\nSampling token 722 from frame 1\r\nSampling token 723 from frame 1\r\nSampling token 724 from frame 1\r\nSampling token 725 from frame 1\r\nSampling token 726 from frame 1\r\nSampling token 727 from frame 1\r\nSampling token 728 from frame 1\r\nSampling token 729 from frame 1\r\nSampling token 730 from frame 1\r\nSampling token 731 from frame 1\r\nSampling token 732 from frame 1\r\nSampling token 733 from frame 1\r\nSampling token 734 from frame 1\r\nSampling token 735 from frame 1\r\nSampling token 736 from frame 1\r\nSampling token 737 from frame 1\r\nSampling token 738 from frame 1\r\nSampling token 739 from frame 1\r\nSampling token 740 from frame 1\r\nSampling token 741 from frame 1\r\nSampling token 742 from frame 1\r\nSampling token 743 from frame 1\r\nSampling token 744 from frame 1\r\nSampling token 745 from frame 1\r\nSampling token 746 from frame 1\r\nSampling token 747 from frame 1\r\nSampling token 748 from frame 1\r\nSampling token 749 from frame 1\r\nSampling token 750 from frame 1\r\nSampling token 751 from frame 1\r\nSampling token 752 from frame 1\r\nSampling token 753 from frame 1\r\nSampling token 754 from frame 1\r\nSampling token 755 from frame 1\r\nSampling token 756 from frame 1\r\nSampling token 757 from frame 1\r\nSampling token 758 from frame 1\r\nSampling token 759 from frame 1\r\nSampling token 760 from frame 1\r\nSampling token 761 from frame 1\r\nSampling token 762 from frame 1\r\nSampling token 763 from frame 1\r\nSampling token 764 from frame 1\r\nSampling token 765 from frame 1\r\nSampling token 766 from frame 1\r\nSampling token 767 from frame 1\r\nSampling token 768 from frame 1\r\nSampling token 769 from frame 1\r\n",,terminal_output +3275,7566389,"TERMINAL",0,0,"259",,terminal_output +3276,7567349,"TERMINAL",0,0,"3721",,terminal_output +3277,7568467,"TERMINAL",0,0,"582",,terminal_output +3278,7569462,"TERMINAL",0,0,"693",,terminal_output +3279,7570486,"TERMINAL",0,0,"7504",,terminal_output +3280,7571509,"TERMINAL",0,0,"815",,terminal_output +3281,7572636,"TERMINAL",0,0,"926",,terminal_output +3282,7573580,"TERMINAL",0,0,"2:0037",,terminal_output +3283,7574684,"TERMINAL",0,0,"148",,terminal_output +3284,7575709,"TERMINAL",0,0,"259",,terminal_output +3285,7576708,"TERMINAL",0,0,"3630",,terminal_output +3286,7577756,"TERMINAL",0,0,"471",,terminal_output +3287,7578781,"TERMINAL",0,0,"582",,terminal_output +3288,7579813,"TERMINAL",0,0,"693",,terminal_output +3289,7580851,"TERMINAL",0,0,"76:004",,terminal_output +3290,7581889,"TERMINAL",0,0,"815",,terminal_output +3291,7582977,"TERMINAL",0,0,"926",,terminal_output +3292,7584060,"TERMINAL",0,0,"1037",,terminal_output +3293,7585142,"TERMINAL",0,0,"148",,terminal_output +3294,7586039,"TERMINAL",0,0,"259",,terminal_output +3295,7587075,"TERMINAL",0,0,"3640",,terminal_output +3296,7588201,"TERMINAL",0,0,"471",,terminal_output +3297,7589175,"TERMINAL",0,0,"582",,terminal_output +3298,7590249,"TERMINAL",0,0,"693",,terminal_output +3299,7591273,"TERMINAL",0,0,"7104",,terminal_output +3300,7592279,"TERMINAL",0,0,"815",,terminal_output +3301,7593322,"TERMINAL",0,0,"926",,terminal_output +3302,7594446,"TERMINAL",0,0,"2048",,terminal_output +3303,7595471,"TERMINAL",0,0,"259",,terminal_output +3304,7596494,"TERMINAL",0,0,"3650",,terminal_output +3305,7596505,"TERMINAL",0,0,"Sampling token 770 from frame 1\r\nSampling token 771 from frame 1\r\nSampling token 772 from frame 1\r\nSampling token 773 from frame 1\r\nSampling token 774 from frame 1\r\nSampling token 775 from frame 1\r\nSampling token 776 from frame 1\r\nSampling token 777 from frame 1\r\nSampling token 778 from frame 1\r\nSampling token 779 from frame 1\r\nSampling token 780 from frame 1\r\nSampling token 781 from frame 1\r\nSampling token 782 from frame 1\r\nSampling token 783 from frame 1\r\nSampling token 784 from frame 1\r\nSampling token 785 from frame 1\r\nSampling token 786 from frame 1\r\nSampling token 787 from frame 1\r\nSampling token 788 from frame 1\r\nSampling token 789 from frame 1\r\nSampling token 790 from frame 1\r\nSampling token 791 from frame 1\r\nSampling token 792 from frame 1\r\nSampling token 793 from frame 1\r\nSampling token 794 from frame 1\r\nSampling token 795 from frame 1\r\nSampling token 796 from frame 1\r\nSampling token 797 from frame 1\r\nSampling token 798 from frame 1\r\nSampling token 799 from frame 1\r\nSampling token 800 from frame 1\r\nSampling token 801 from frame 1\r\nSampling token 802 from frame 1\r\nSampling token 803 from frame 1\r\nSampling token 804 from frame 1\r\nSampling token 805 from frame 1\r\nSampling token 806 from frame 1\r\nSampling token 807 from frame 1\r\nSampling token 808 from frame 1\r\nSampling token 809 from frame 1\r\nSampling token 810 from frame 1\r\nSampling token 811 from frame 1\r\nSampling token 812 from frame 1\r\nSampling token 813 from frame 1\r\nSampling token 814 from frame 1\r\nSampling token 815 from frame 1\r\nSampling token 816 from frame 1\r\nSampling token 817 from frame 1\r\nSampling token 818 from frame 1\r\nSampling token 819 from frame 1\r\nSampling token 820 from frame 1\r\nSampling token 821 from frame 1\r\nSampling token 822 from frame 1\r\nSampling token 823 from frame 1\r\nSampling token 824 from frame 1\r\nSampling token 825 from frame 1\r\nSampling token 826 from frame 1\r\nSampling token 827 from frame 1\r\nSampling token 828 from frame 1\r\nSampling token 829 from frame 1\r\nSampling token 830 from frame 1\r\nSampling token 831 from frame 1\r\nSampling token 832 from frame 1\r\nSampling token 833 from frame 1\r\nSampling token 834 from frame 1\r\nSampling token 835 from frame 1\r\nSampling token 836 from frame 1\r\nSampling token 837 from frame 1\r\nSampling token 838 from frame 1\r\nSampling token 839 from frame 1\r\nSampling token 840 from frame 1\r\nSampling token 841 from frame 1\r\nSampling token 842 from frame 1\r\nSampling token 843 from frame 1\r\nSampling token 844 from frame 1\r\nSampling token 845 from frame 1\r\nSampling token 846 from frame 1\r\nSampling token 847 from frame 1\r\nSampling token 848 from frame 1\r\nSampling token 849 from frame 1\r\nSampling token 850 from frame 1\r\nSampling token 851 from frame 1\r\nSampling token 852 from frame 1\r\nSampling token 853 from frame 1\r\nSampling token 854 from frame 1\r\nSampling token 855 from frame 1\r\nSampling token 856 from frame 1\r\nSampling token 857 from frame 1\r\nSampling token 858 from frame 1\r\nSampling token 859 from frame 1\r\nSampling token 860 from frame 1\r\nSampling token 861 from frame 1\r\nSampling token 862 from frame 1\r\nSampling token 863 from frame 1\r\nSampling token 864 from frame 1\r\nSampling token 865 from frame 1\r\nSampling token 866 from frame 1\r\nSampling token 867 from frame 1\r\nSampling token 868 from frame 1\r\nSampling token 869 from frame 1\r\nSampling token 870 from frame 1\r\nSampling token 871 from frame 1\r\nSampling token 872 from frame 1\r\nSampling token 873 from frame 1\r\nSampling token 874 from frame 1\r\nSampling token 875 from frame 1\r\nSampling token 876 from frame 1\r\nSampling token 877 from frame 1\r\nSampling token 878 from frame 1\r\nSampling token 879 from frame 1\r\nSampling token 880 from frame 1\r\nSampling token 881 from frame 1\r\nSampling token 882 from frame 1\r\nSampling token 883 from frame 1\r\nSampling token 884 from frame 1\r\nSampling token 885 from frame 1\r\nSampling token 886 from frame 1\r\nSampling token 887 from frame 1\r\nSampling token 888 from frame 1\r\nSampling token 889 from frame 1\r\nSampling token 890 from frame 1\r\nSampling token 891 from frame 1\r\nSampling token 892 from frame 1\r\nSampling token 893 from frame 1\r\nSampling token 894 from frame 1\r\nSampling token 895 from frame 1\r\nSampling token 896 from frame 1\r\nSampling token 897 from frame 1\r\nSampling token 898 from frame 1\r\nSampling token 899 from frame 1\r\nSampling token 900 from frame 1\r\nSampling token 901 from frame 1\r\nSampling token 902 from frame 1\r\nSampling token 903 from frame 1\r\nSampling token 904 from frame 1\r\nSampling token 905 from frame 1\r\nSampling token 906 from frame 1\r\nSampling token 907 from frame 1\r\nSampling token 908 from frame 1\r\nSampling token 909 from frame 1\r\nSampling token 910 from frame 1\r\nSampling token 911 from frame 1\r\nSampling token 912 from frame 1\r\nSampling token 913 from frame 1\r\nSampling token 914 from frame 1\r\nSampling token 915 from frame 1\r\nSampling token 916 from frame 1\r\nSampling token 917 from frame 1\r\nSampling token 918 from frame 1\r\nSampling token 919 from frame 1\r\nautoreg sampling done. calculating ssim and saving video\r\nSSIM: 0.5691258311271667\r\n",,terminal_output +3306,7596710,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +3307,7597483,"TERMINAL",0,0,"471",,terminal_output +3308,7598339,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +3309,7598523,"TERMINAL",0,0,"582",,terminal_output +3310,7599964,"TERMINAL",0,0,"693",,terminal_output +3311,7600393,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",714,0,"",shellscript,selection_mouse +3312,7600528,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",709,14,"dyna_num_heads",shellscript,selection_mouse +3313,7601002,"TERMINAL",0,0,"7204",,terminal_output +3314,7601621,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",689,0,"",shellscript,selection_mouse +3315,7601766,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",683,15,"dyna_num_blocks",shellscript,selection_mouse +3316,7601909,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",677,26," --dyna_num_blocks=2 \\n",shellscript,selection_mouse +3317,7602041,"TERMINAL",0,0,"815",,terminal_output +3318,7603036,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",688,0,"",shellscript,selection_mouse +3319,7603088,"TERMINAL",0,0,"926",,terminal_output +3320,7604136,"TERMINAL",0,0,"3037",,terminal_output +3321,7605200,"TERMINAL",0,0,"148",,terminal_output +3322,7606228,"TERMINAL",0,0,"259",,terminal_output +3323,7607291,"TERMINAL",0,0,"367:00",,terminal_output +3324,7608134,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",667,0,"",shellscript,selection_mouse +3325,7608291,"TERMINAL",0,0,"471",,terminal_output +3326,7608353,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",662,8,"dyna_dim",shellscript,selection_mouse +3327,7609403,"TERMINAL",0,0,"593",,terminal_output +3328,7610385,"TERMINAL",0,0,"7304",,terminal_output +3329,7611447,"TERMINAL",0,0,"815",,terminal_output +3330,7612576,"TERMINAL",0,0,"926",,terminal_output +3331,7613510,"TERMINAL",0,0,"4037",,terminal_output +3332,7614621,"TERMINAL",0,0,"148",,terminal_output +3333,7615645,"TERMINAL",0,0,"259",,terminal_output +3334,7616835,"TERMINAL",0,0,"3610",,terminal_output +3335,7617694,"TERMINAL",0,0,"471",,terminal_output +3336,7618818,"TERMINAL",0,0,"582",,terminal_output +3337,7619841,"TERMINAL",0,0,"693",,terminal_output +3338,7620369,"TERMINAL",0,0,"salloc",,terminal_focus +3339,7620808,"TERMINAL",0,0,"7404",,terminal_output +3340,7621846,"TERMINAL",0,0,"815",,terminal_output +3341,7622023,"TERMINAL",0,0,"srun",,terminal_focus +3342,7622896,"TERMINAL",0,0,"926",,terminal_output +3343,7623937,"TERMINAL",0,0,"5037",,terminal_output +3344,7624980,"TERMINAL",0,0,"148",,terminal_output +3345,7626120,"TERMINAL",0,0,"259",,terminal_output +3346,7627111,"TERMINAL",0,0,"3620",,terminal_output +3347,7628100,"TERMINAL",0,0,"471",,terminal_output +3348,7629138,"TERMINAL",0,0,"582",,terminal_output +3349,7630183,"TERMINAL",0,0,"693",,terminal_output +3350,7631415,"TERMINAL",0,0,"7504",,terminal_output +3351,7632281,"TERMINAL",0,0,"815",,terminal_output +3352,7633332,"TERMINAL",0,0,"926",,terminal_output +3353,7634382,"TERMINAL",0,0,"3:0048",,terminal_output +3354,7635405,"TERMINAL",0,0,"259",,terminal_output +3355,7636533,"TERMINAL",0,0,"3630",,terminal_output +3356,7637559,"TERMINAL",0,0,"471",,terminal_output +3357,7638522,"TERMINAL",0,0,"582",,terminal_output +3358,7639565,"TERMINAL",0,0,"693",,terminal_output +3359,7640631,"TERMINAL",0,0,"77:004",,terminal_output +3360,7641655,"TERMINAL",0,0,"815",,terminal_output +3361,7642684,"TERMINAL",0,0,"926",,terminal_output +3362,7643725,"TERMINAL",0,0,"1037",,terminal_output +3363,7644763,"TERMINAL",0,0,"148",,terminal_output +3364,7645855,"TERMINAL",0,0,"259",,terminal_output +3365,7646849,"TERMINAL",0,0,"3640",,terminal_output +3366,7647925,"TERMINAL",0,0,"471",,terminal_output +3367,7649054,"TERMINAL",0,0,"582",,terminal_output +3368,7650052,"TERMINAL",0,0,"693",,terminal_output +3369,7650997,"TERMINAL",0,0,"7104",,terminal_output +3370,7652102,"TERMINAL",0,0,"815",,terminal_output +3371,7653082,"TERMINAL",0,0,"926",,terminal_output +3372,7654148,"TERMINAL",0,0,"2037",,terminal_output +3373,7655179,"TERMINAL",0,0,"148",,terminal_output +3374,7656298,"TERMINAL",0,0,"259",,terminal_output +3375,7657242,"TERMINAL",0,0,"3650",,terminal_output +3376,7658346,"TERMINAL",0,0,"471",,terminal_output +3377,7659329,"TERMINAL",0,0,"582",,terminal_output +3378,7660394,"TERMINAL",0,0,"6204",,terminal_output +3379,7661420,"TERMINAL",0,0,"815",,terminal_output +3380,7662545,"TERMINAL",0,0,"926",,terminal_output +3381,7663482,"TERMINAL",0,0,"3037",,terminal_output +3382,7664597,"TERMINAL",0,0,"148",,terminal_output +3383,7665617,"TERMINAL",0,0,"259",,terminal_output +3384,7666641,"TERMINAL",0,0,"368:00",,terminal_output +3385,7667664,"TERMINAL",0,0,"471",,terminal_output +3386,7668792,"TERMINAL",0,0,"582",,terminal_output +3387,7669918,"TERMINAL",0,0,"693",,terminal_output +3388,7670780,"TERMINAL",0,0,"7304",,terminal_output +3389,7671863,"TERMINAL",0,0,"815",,terminal_output +3390,7672862,"TERMINAL",0,0,"926",,terminal_output +3391,7673903,"TERMINAL",0,0,"4037",,terminal_output +3392,7674942,"TERMINAL",0,0,"148",,terminal_output +3393,7675983,"TERMINAL",0,0,"259",,terminal_output +3394,7677028,"TERMINAL",0,0,"3610",,terminal_output +3395,7678109,"TERMINAL",0,0,"471",,terminal_output +3396,7679152,"TERMINAL",0,0,"582",,terminal_output +3397,7680152,"TERMINAL",0,0,"693",,terminal_output +3398,7681200,"TERMINAL",0,0,"7404",,terminal_output +3399,7682252,"TERMINAL",0,0,"815",,terminal_output +3400,7683332,"TERMINAL",0,0,"926",,terminal_output +3401,7684356,"TERMINAL",0,0,"5048",,terminal_output +3402,7685498,"TERMINAL",0,0,"259",,terminal_output +3403,7686506,"TERMINAL",0,0,"3620",,terminal_output +3404,7687492,"TERMINAL",0,0,"471",,terminal_output +3405,7688543,"TERMINAL",0,0,"582",,terminal_output +3406,7689682,"TERMINAL",0,0,"693",,terminal_output +3407,7690639,"TERMINAL",0,0,"7504",,terminal_output +3408,7691729,"TERMINAL",0,0,"815",,terminal_output +3409,7692778,"TERMINAL",0,0,"926",,terminal_output +3410,7693880,"TERMINAL",0,0,"4:0037",,terminal_output +3411,7694822,"TERMINAL",0,0,"148",,terminal_output +3412,7695860,"TERMINAL",0,0,"259",,terminal_output +3413,7696897,"TERMINAL",0,0,"3630",,terminal_output +3414,7697934,"TERMINAL",0,0,"471",,terminal_output +3415,7698972,"TERMINAL",0,0,"582",,terminal_output +3416,7700127,"TERMINAL",0,0,"693",,terminal_output +3417,7701150,"TERMINAL",0,0,"78:004",,terminal_output +3418,7702175,"TERMINAL",0,0,"815",,terminal_output +3419,7703198,"TERMINAL",0,0,"926",,terminal_output +3420,7704229,"TERMINAL",0,0,"1037",,terminal_output +3421,7705348,"TERMINAL",0,0,"148",,terminal_output +3422,7706373,"TERMINAL",0,0,"259",,terminal_output +3423,7707353,"TERMINAL",0,0,"3741",,terminal_output +3424,7708420,"TERMINAL",0,0,"582",,terminal_output +3425,7709547,"TERMINAL",0,0,"693",,terminal_output +3426,7710501,"TERMINAL",0,0,"7104",,terminal_output +3427,7711595,"TERMINAL",0,0,"815",,terminal_output +3428,7712676,"TERMINAL",0,0,"926",,terminal_output +3429,7713756,"TERMINAL",0,0,"2037",,terminal_output +3430,7714780,"TERMINAL",0,0,"148",,terminal_output +3431,7715792,"TERMINAL",0,0,"259",,terminal_output +3432,7716822,"TERMINAL",0,0,"3650",,terminal_output +3433,7717845,"TERMINAL",0,0,"471",,terminal_output +3434,7718885,"TERMINAL",0,0,"582",,terminal_output +3435,7719927,"TERMINAL",0,0,"693",,terminal_output +3436,7721014,"TERMINAL",0,0,"7204",,terminal_output +3437,7722046,"TERMINAL",0,0,"815",,terminal_output +3438,7723166,"TERMINAL",0,0,"926",,terminal_output +3439,7724190,"TERMINAL",0,0,"3037",,terminal_output +3440,7725213,"TERMINAL",0,0,"148",,terminal_output +3441,7726238,"TERMINAL",0,0,"259",,terminal_output +3442,7727317,"TERMINAL",0,0,"369:00",,terminal_output +3443,7728388,"TERMINAL",0,0,"471",,terminal_output +3444,7729109,"TERMINAL",0,0,"g",,terminal_output +3445,7729215,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3446,7729362,"TERMINAL",0,0,"593",,terminal_output +3447,7729704,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3448,7729771,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3449,7729888,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3450,7730038,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3451,7730112,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3452,7730208,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3453,7730280,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3454,7730341,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3455,7730418,"TERMINAL",0,0,"7304",,terminal_output +3456,7730434,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3457,7730607,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3458,7731069,"TERMINAL",0,0,"On branch new-arch-sampling\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: sample.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3359333.out\r\n\tslurm-3359334.out\r\n\tslurm-3359338.out\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +3459,7731563,"TERMINAL",0,0,"815",,terminal_output +3460,7732150,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3461,7732257,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3462,7732504,"TERMINAL",0,0,"926",,terminal_output +3463,7732802,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3464,7732855,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3465,7732976,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3466,7733037,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3467,7733167,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3468,7733227,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3469,7733334,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3470,7733553,"TERMINAL",0,0,"4037",,terminal_output +3471,7733661,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3472,7734150,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3473,7734211,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3474,7734412,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3475,7734550,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3476,7734606,"TERMINAL",0,0,"148",,terminal_output +3477,7735165,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3478,7735378,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +3479,7735511,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3480,7735582,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3481,7735644,"TERMINAL",0,0,"259",,terminal_output +3482,7735659,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3483,7735868,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +3484,7736413,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3485,7736564,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3486,7736628,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3487,7736740,"TERMINAL",0,0,"3610",,terminal_output +3488,7736809,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3489,7736871,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3490,7737211,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3491,7737715,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3492,7737769,"TERMINAL",0,0,"471",,terminal_output +3493,7737790,"TERMINAL",0,0,"[?25ln[?25h[?25lg[?25h",,terminal_output +3494,7737930,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3495,7738168,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +3496,7738232,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3497,7738467,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3498,7738582,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3499,7738688,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3500,7738793,"TERMINAL",0,0,"582",,terminal_output +3501,7738927,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +3502,7739858,"TERMINAL",0,0,"693",,terminal_output +3503,7740883,"TERMINAL",0,0,"7404",,terminal_output +3504,7741926,"TERMINAL",0,0,"815",,terminal_output +3505,7742205,"TERMINAL",0,0,"[?25l;""[?25h",,terminal_output +3506,7742268,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3507,7742995,"TERMINAL",0,0,"926",,terminal_output +3508,7744056,"TERMINAL",0,0,"5037",,terminal_output +3509,7745214,"TERMINAL",0,0,"148",,terminal_output +3510,7746114,"TERMINAL",0,0,"259",,terminal_output +3511,7747231,"TERMINAL",0,0,"3620",,terminal_output +3512,7748255,"TERMINAL",0,0,"471",,terminal_output +3513,7749280,"TERMINAL",0,0,"582",,terminal_output +3514,7750405,"TERMINAL",0,0,"693",,terminal_output +3515,7751351,"TERMINAL",0,0,"7515",,terminal_output +3516,7752404,"TERMINAL",0,0,"926",,terminal_output +3517,7753477,"TERMINAL",0,0,"5:0037",,terminal_output +3518,7754603,"TERMINAL",0,0,"148",,terminal_output +3519,7755539,"TERMINAL",0,0,"259",,terminal_output +3520,7756652,"TERMINAL",0,0,"3630",,terminal_output +3521,7757676,"TERMINAL",0,0,"471",,terminal_output +3522,7758700,"TERMINAL",0,0,"582",,terminal_output +3523,7759723,"TERMINAL",0,0,"693",,terminal_output +3524,7760748,"TERMINAL",0,0,"79:004",,terminal_output +3525,7761875,"TERMINAL",0,0,"815",,terminal_output +3526,7762898,"TERMINAL",0,0,"926",,terminal_output +3527,7763856,"TERMINAL",0,0,"1037",,terminal_output +3528,7764894,"TERMINAL",0,0,"148",,terminal_output +3529,7765940,"TERMINAL",0,0,"259",,terminal_output +3530,7766993,"TERMINAL",0,0,"3640",,terminal_output +3531,7768017,"TERMINAL",0,0,"471",,terminal_output +3532,7769145,"TERMINAL",0,0,"582",,terminal_output +3533,7769982,"TERMINAL",0,0,"""",,terminal_output +3534,7770108,"TERMINAL",0,0,"693",,terminal_output +3535,7770170,"TERMINAL",0,0,"",,terminal_output +3536,7771088,"TERMINAL",0,0,"[?25l""""[?25h",,terminal_output +3537,7771194,"TERMINAL",0,0,"7104",,terminal_output +3538,7771824,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3539,7772007,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3540,7772069,"TERMINAL",0,0,"[?25lm""[?25h",,terminal_output +3541,7772231,"TERMINAL",0,0,"[?25lp""[?25h",,terminal_output +3542,7772231,"TERMINAL",0,0,"815",,terminal_output +3543,7772282,"TERMINAL",0,0,"l""",,terminal_output +3544,7772492,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3545,7772641,"TERMINAL",0,0,"[?25lg""[?25h",,terminal_output +3546,7772951,"TERMINAL",0,0,"[?25lg""[?25h",,terminal_output +3547,7773140,"TERMINAL",0,0,"[?25ln""[?25hg""",,terminal_output +3548,7773244,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3549,7773255,"TERMINAL",0,0,"926",,terminal_output +3550,7773370,"TERMINAL",0,0,"[?25lo""[?25h",,terminal_output +3551,7773543,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3552,7773606,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3553,7773937,"TERMINAL",0,0,"[?25lo""[?25h",,terminal_output +3554,7774081,"TERMINAL",0,0,"[?25lv""[?25h",,terminal_output +3555,7774244,"TERMINAL",0,0,"[?25le""[?25h",,terminal_output +3556,7774305,"TERMINAL",0,0,"r""",,terminal_output +3557,7774305,"TERMINAL",0,0,"2037",,terminal_output +3558,7774498,"TERMINAL",0,0,"[?25lf""[?25h",,terminal_output +3559,7774558,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3560,7774920,"TERMINAL",0,0,"[?25lt""[?25h",,terminal_output +3561,7775062,"TERMINAL",0,0,"[?25lt""[?25hi""",,terminal_output +3562,7775178,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3563,7775236,"TERMINAL",0,0,"g""",,terminal_output +3564,7775300,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3565,7775355,"TERMINAL",0,0,"159",,terminal_output +3566,7775422,"TERMINAL",0,0,"[?25lr""[?25h",,terminal_output +3567,7775532,"TERMINAL",0,0,"[?25lu""[?25h",,terminal_output +3568,7775595,"TERMINAL",0,0,"n""",,terminal_output +3569,7775924,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3570,7776283,"TERMINAL",0,0,"[?25lw""[?25h",,terminal_output +3571,7776349,"TERMINAL",0,0,"[?25lo""[?25h",,terminal_output +3572,7776406,"TERMINAL",0,0,"3650",,terminal_output +3573,7776422,"TERMINAL",0,0,"[?25lr""[?25h",,terminal_output +3574,7776554,"TERMINAL",0,0,"[?25lk""[?25h",,terminal_output +3575,7776700,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3576,7776761,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3577,7776913,"TERMINAL",0,0,"[?25lp""[?25h",,terminal_output +3578,7777065,"TERMINAL",0,0,"a""",,terminal_output +3579,7777208,"TERMINAL",0,0,"[?25lr""[?25h",,terminal_output +3580,7777307,"TERMINAL",0,0,"[?25lt""[?25h",,terminal_output +3581,7777457,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3582,7777457,"TERMINAL",0,0,"471",,terminal_output +3583,7777549,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3584,7777650,"TERMINAL",0,0,"l""",,terminal_output +3585,7777804,"TERMINAL",0,0,"[?25ll""[?25h",,terminal_output +3586,7777879,"TERMINAL",0,0,"[?25ly""[?25h",,terminal_output +3587,7778063,"TERMINAL",0,0,"[?25l;""[?25h",,terminal_output +3588,7778213,"TERMINAL",0,0," """,,terminal_output +3589,7778551,"TERMINAL",0,0,"582",,terminal_output +3590,7779591,"TERMINAL",0,0,"693",,terminal_output +3591,7780613,"TERMINAL",0,0,"7204",,terminal_output +3592,7780920,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3593,7781143,"TERMINAL",0,0,"[?25lt""[?25h",,terminal_output +3594,7781199,"TERMINAL",0,0,"i""",,terminal_output +3595,7781385,"TERMINAL",0,0,"[?25ll""[?25h",,terminal_output +3596,7781514,"TERMINAL",0,0,"[?25ll""[?25h """,,terminal_output +3597,7781631,"TERMINAL",0,0,"815",,terminal_output +3598,7781647,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3599,7781701,"TERMINAL",0,0,"o""",,terminal_output +3600,7781795,"TERMINAL",0,0,"[?25lm""[?25h",,terminal_output +3601,7782117,"TERMINAL",0,0,"[?25le""[?25h",,terminal_output +3602,7782223,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3603,7782588,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3604,7782688,"TERMINAL",0,0,"926",,terminal_output +3605,7782710,"TERMINAL",0,0,"[?25lr""[?25h",,terminal_output +3606,7782936,"TERMINAL",0,0,"[?25lt""[?25h",,terminal_output +3607,7783052,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3608,7783194,"TERMINAL",0,0,"[?25lf""[?25h",,terminal_output +3609,7783387,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3610,7783566,"TERMINAL",0,0,"[?25lc""[?25h",,terminal_output +3611,7783737,"TERMINAL",0,0,"[?25lt""[?25h",,terminal_output +3612,7783738,"TERMINAL",0,0,"3037",,terminal_output +3613,7783939,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3614,7784032,"TERMINAL",0,0," """,,terminal_output +3615,7784093,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3616,7784207,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3617,7784268,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3618,7784785,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3619,7784785,"TERMINAL",0,0,"148",,terminal_output +3620,7784895,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3621,7784998,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3622,7785171,"TERMINAL",0,0,"[?25lw""[?25h",,terminal_output +3623,7785701,"TERMINAL",0,0,"[?25lh""[?25h",,terminal_output +3624,7785811,"TERMINAL",0,0,"[?25le""[?25h",,terminal_output +3625,7785814,"TERMINAL",0,0,"259",,terminal_output +3626,7785925,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3627,7785987,"TERMINAL",0,0," """,,terminal_output +3628,7786049,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3629,7786263,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3630,7786319,"TERMINAL",0,0,"m""",,terminal_output +3631,7786514,"TERMINAL",0,0,"[?25lp""[?25h",,terminal_output +3632,7786577,"TERMINAL",0,0,"[?25ll""[?25h",,terminal_output +3633,7786788,"TERMINAL",0,0,"[?25li""[?25h",,terminal_output +3634,7786849,"TERMINAL",0,0,"[?25ln""[?25h",,terminal_output +3635,7786865,"TERMINAL",0,0,"3650:00",,terminal_output +3636,7787020,"TERMINAL",0,0,"[?25lg""[?25h",,terminal_output +3637,7787072,"TERMINAL",0,0," """,,terminal_output +3638,7787285,"TERMINAL",0,0,"[?25l2""[?25h",,terminal_output +3639,7787394,"TERMINAL",0,0,"[?25l ""[?25h",,terminal_output +3640,7787528,"TERMINAL",0,0,"[?25lf""[?25h",,terminal_output +3641,7787634,"TERMINAL",0,0,"[?25lr""[?25h",,terminal_output +3642,7787835,"TERMINAL",0,0,"[?25la""[?25h",,terminal_output +3643,7787897,"TERMINAL",0,0,"m""",,terminal_output +3644,7787923,"TERMINAL",0,0,"471",,terminal_output +3645,7787960,"TERMINAL",0,0,"[?25le""[?25h",,terminal_output +3646,7788142,"TERMINAL",0,0,"[?25ls""[?25h",,terminal_output +3647,7788566,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3648,7788829,"TERMINAL",0,0,"[new-arch-sampling e0471d7] sampling on overfitting run works partially; still some artifacts when sampling 2 frames\r\n 4 files changed, 9 insertions(+), 5 deletions(-)\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +3649,7788958,"TERMINAL",0,0,"582",,terminal_output +3650,7790003,"TERMINAL",0,0,"693",,terminal_output +3651,7791052,"TERMINAL",0,0,"7304",,terminal_output +3652,7792122,"TERMINAL",0,0,"815",,terminal_output +3653,7793185,"TERMINAL",0,0,"926",,terminal_output +3654,7794229,"TERMINAL",0,0,"4037",,terminal_output +3655,7794787,"TERMINAL",0,0,"watch",,terminal_focus +3656,7795258,"TERMINAL",0,0,"148",,terminal_output +3657,7796292,"TERMINAL",0,0,"259",,terminal_output +3658,7796393,"TERMINAL",0,0,"srun",,terminal_focus +3659,7797394,"TERMINAL",0,0,"3711",,terminal_output +3660,7798467,"TERMINAL",0,0,"582",,terminal_output +3661,7799464,"TERMINAL",0,0,"693",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-17.29.16.938/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-17.29.16.938/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..327bab1974eb7d32c93a57904f3ebc3a28f98888 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-17.29.16.938/source.csv @@ -0,0 +1,2368 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,551,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:29:16 PM [info] Activating crowd-code\n5:29:16 PM [info] Recording started\n5:29:16 PM [info] Initializing git provider using file system watchers...\n5:29:17 PM [info] Git repository found\n5:29:17 PM [info] Git provider initialized successfully\n",Log,tab +3,822,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"5:29:17 PM [info] Initial git state: [object Object]\n",Log,content +4,2792,"TERMINAL",0,0,"bash",,terminal_focus +5,3257,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +6,3308,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:20 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;42051a78-1109-4e17-a920-97a09bbed6a7]633;C]0;tum_cte0515@hkn1993:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +7,3497,"TERMINAL",0,0,"bash",,terminal_focus +8,5215,"TERMINAL",0,0,"undefinedjafar[tum_cte0515@hkn1993 jafar]$ queue",,terminal_command +9,5265,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:22 queue;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C",,terminal_output +10,5341,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:29:22 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3309662 accelerat train_to tum_cte0 R39:09\t 1 hkn06323309663 accelerat train_la tum_cte0 R39:09\t 1 hkn06323309657 accelerat interact tum_cte0 R45:10\t 1 hkn0701",,terminal_output +11,6379,"TERMINAL",0,0,"310101",,terminal_output +12,7407,"TERMINAL",0,0,"4112",,terminal_output +13,8457,"TERMINAL",0,0,"5223",,terminal_output +14,9500,"TERMINAL",0,0,"6334",,terminal_output +15,10549,"TERMINAL",0,0,"7445",,terminal_output +16,11630,"TERMINAL",0,0,"8556",,terminal_output +17,12657,"TERMINAL",0,0,"9667",,terminal_output +18,13403,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +19,24443,"TERMINAL",0,0,"srun --overlap --jobid=3309657 --pty /bin/bash",,terminal_command +20,24502,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:41 srun --overlap --jobid=33309657--pty /bin/bash;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;Csrun: error: Unable to confirm allocation for job 33309657: Invalid job id specified\r\nsrun: Check SLURM_JOB_ID environment variable. Expired or invalid job 33309657\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +21,29491,"TERMINAL",0,0,"queue",,terminal_command +22,29496,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:46 queue;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C",,terminal_output +23,29574,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:29:46 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3309662 accelerat train_to tum_cte0 R39:33\t 1 hkn06323309663 accelerat train_la tum_cte0 R39:33\t 1 hkn06323309657 accelerat interact tum_cte0 R45:34\t 1 hkn0701",,terminal_output +24,30609,"TERMINAL",0,0,"7445",,terminal_output +25,31667,"TERMINAL",0,0,"8556",,terminal_output +26,32719,"TERMINAL",0,0,"9667",,terminal_output +27,33122,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +28,36499,"TERMINAL",0,0,"queue",,terminal_command +29,36569,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:53 queue;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:29:53 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3309662 accelerat train_to tum_cte0 R39:40\t 1 hkn06323309663 accelerat train_la tum_cte0 R39:40\t 1 hkn06323309657 accelerat interact tum_cte0 R45:41\t 1 hkn0701",,terminal_output +30,37452,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +31,42391,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G",,terminal_command +32,42443,"TERMINAL",0,0,"]633;E;2025-07-01 17:29:59 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;Csalloc: Pending job allocation 3309748\r\nsalloc: job 3309748 queued and waiting for resources\r\n",,terminal_output +33,44545,"TERMINAL",0,0,"^Csalloc: Job allocation 3309748 has been revoked.\r\nsalloc: Job aborted due to signal\r\n",,terminal_output +34,53320,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +35,53404,"TERMINAL",0,0,"]633;E;2025-07-01 17:30:10 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;Csalloc: Pending job allocation 3309749\r\nsalloc: job 3309749 queued and waiting for resources\r\n",,terminal_output +36,166668,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/../checkpoints/3307618/genie_1751322003_15500/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3307619/genie_1751322003_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0\n\n# python sample.py \\n # --checkpoint ""$CHECKPOINT_PATH"" \\n # --data_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/coinrun_episodes\n",shellscript,tab +37,178033,"scripts_horeka/overfit_sample_tiny/sample.sh",1405,0,"",shellscript,selection_mouse +38,178172,"scripts_horeka/overfit_sample_tiny/sample.sh",1402,15,"CHECKPOINT_PATH",shellscript,selection_mouse +39,179507,"scripts_horeka/overfit_sample_tiny/sample.sh",1201,0,"",shellscript,selection_mouse +40,179643,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,15,"CHECKPOINT_PATH",shellscript,selection_mouse +41,180013,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,112,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n",shellscript,selection_mouse +42,180016,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,200,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint",shellscript,selection_mouse +43,180017,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,432,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim",shellscript,selection_mouse +44,180018,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,584,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \",shellscript,selection_mouse +45,180019,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,611,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \",shellscript,selection_mouse +46,180020,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,639,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 ",shellscript,selection_mouse +47,180020,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,640,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \",shellscript,selection_mouse +48,180021,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,659,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \",shellscript,selection_mouse +49,180361,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,679,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0",shellscript,selection_mouse +50,180799,"scripts_horeka/overfit_sample_tiny/sample.sh",1199,680,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0\n",shellscript,selection_mouse +51,183235,"scripts_horeka/overfit_sample_tiny/sample.sh",985,0,"",shellscript,selection_mouse +52,184102,"scripts_horeka/overfit_sample_tiny/sample.sh",525,0,"",shellscript,selection_mouse +53,184835,"scripts_horeka/overfit_sample_tiny/sample.sh",524,0,"",shellscript,selection_mouse +54,184846,"scripts_horeka/overfit_sample_tiny/sample.sh",523,0,"",shellscript,selection_command +55,185001,"scripts_horeka/overfit_sample_tiny/sample.sh",524,0,"",shellscript,selection_mouse +56,185022,"scripts_horeka/overfit_sample_tiny/sample.sh",523,0,"",shellscript,selection_command +57,185191,"scripts_horeka/overfit_sample_tiny/sample.sh",523,1,"}",shellscript,selection_mouse +58,185207,"scripts_horeka/overfit_sample_tiny/sample.sh",524,0,"",shellscript,selection_command +59,185269,"scripts_horeka/overfit_sample_tiny/sample.sh",511,13,"slurm_job_id}",shellscript,selection_mouse +60,185270,"scripts_horeka/overfit_sample_tiny/sample.sh",497,27,"checkpoints/${slurm_job_id}",shellscript,selection_mouse +61,185308,"scripts_horeka/overfit_sample_tiny/sample.sh",490,34,"ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +62,185372,"scripts_horeka/overfit_sample_tiny/sample.sh",429,95,"train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +63,185450,"scripts_horeka/overfit_sample_tiny/sample.sh",417,107,"\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +64,185539,"scripts_horeka/overfit_sample_tiny/sample.sh",396,128,"slurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +65,185575,"scripts_horeka/overfit_sample_tiny/sample.sh",316,208,"Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +66,185602,"scripts_horeka/overfit_sample_tiny/sample.sh",178,346,"Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +67,185632,"scripts_horeka/overfit_sample_tiny/sample.sh",175,349,"\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +68,186220,"scripts_horeka/overfit_sample_tiny/sample.sh",149,375,"source .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +69,186438,"scripts_horeka/overfit_sample_tiny/sample.sh",175,349,"\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +70,186505,"scripts_horeka/overfit_sample_tiny/sample.sh",177,347," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +71,186519,"scripts_horeka/overfit_sample_tiny/sample.sh",178,346,"Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +72,186520,"scripts_horeka/overfit_sample_tiny/sample.sh",249,275,"ws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +73,186629,"scripts_horeka/overfit_sample_tiny/sample.sh",316,208,"Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +74,186824,"scripts_horeka/overfit_sample_tiny/sample.sh",249,275,"ws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +75,186883,"scripts_horeka/overfit_sample_tiny/sample.sh",177,347," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +76,187347,"scripts_horeka/overfit_sample_tiny/sample.sh",177,0,"",shellscript,selection_mouse +77,187844,"scripts_horeka/overfit_sample_tiny/sample.sh",177,1," ",shellscript,selection_mouse +78,188061,"scripts_horeka/overfit_sample_tiny/sample.sh",177,4," Set",shellscript,selection_mouse +79,188099,"scripts_horeka/overfit_sample_tiny/sample.sh",177,5," Set ",shellscript,selection_mouse +80,188100,"scripts_horeka/overfit_sample_tiny/sample.sh",177,81," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/",shellscript,selection_mouse +81,188132,"scripts_horeka/overfit_sample_tiny/sample.sh",177,85," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs",shellscript,selection_mouse +82,188133,"scripts_horeka/overfit_sample_tiny/sample.sh",177,160," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following",shellscript,selection_mouse +83,188205,"scripts_horeka/overfit_sample_tiny/sample.sh",177,165," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with",shellscript,selection_mouse +84,188206,"scripts_horeka/overfit_sample_tiny/sample.sh",177,239," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029",shellscript,selection_mouse +85,188232,"scripts_horeka/overfit_sample_tiny/sample.sh",177,240," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n",shellscript,selection_mouse +86,188314,"scripts_horeka/overfit_sample_tiny/sample.sh",177,296," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny",shellscript,selection_mouse +87,188481,"scripts_horeka/overfit_sample_tiny/sample.sh",177,347," Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}",shellscript,selection_mouse +88,188746,"scripts_horeka/overfit_sample_tiny/sample.sh",524,0,"",shellscript,selection_mouse +89,188773,"scripts_horeka/overfit_sample_tiny/sample.sh",523,0,"",shellscript,selection_command +90,198988,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +91,198989,"extension-output-pdoom-org.crowd-code-#1-crowd-code",298,0,"",Log,selection_mouse +92,199930,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +93,199947,"TERMINAL",0,0,"bash",,terminal_focus +94,202264,"TERMINAL",0,0,"undefinedjafar[tum_cte0515@hkn1993 jafar]$ ls overfit_dir/",,terminal_command +95,202320,"TERMINAL",0,0,"]633;E;2025-07-01 17:32:39 ls overfit_dir/;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;Csingle_batch_12_elems.npy single_batch_3_elems.npy single_batch_6_elems.npy single_sample_axe.npy single_sample_corner.npy\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +96,275523,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n# from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Utility function to save an image as PNG ---\n\ndef save_frame_as_png(img_array, frame_idx, out_dir=""frames"", prefix=""frame""):\n """"""\n Save a single image (numpy or jax array) as a PNG file.\n img_array: shape (H, W, C), values in [0, 1] or [0, 255]\n frame_idx: int, frame number\n out_dir: directory to save images\n prefix: filename prefix\n """"""\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n # Convert to numpy if needed\n if hasattr(img_array, ""device_buffer"") or hasattr(img_array, ""block_until_ready""):\n img_array = np.array(img_array)\n # Clip and convert to uint8\n img_uint8 = (img_array.clip(0, 1) * 255).astype(np.uint8) if img_array.max() <= 1.0 else img_array.astype(np.uint8)\n img = Image.fromarray(img_uint8)\n img.save(os.path.join(out_dir, f""{prefix}_{frame_idx:03d}.png""))\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n # Save the initial video frames before sampling\n for idx in range(vid.shape[1]):\n # Save the first sample in the batch for each initial frame\n save_frame_as_png(vid[0, idx], idx)\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""=""*100)\n print(""Frame"", frame_idx)\n print(""=""*100)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n # Save the first sample in the batch for this frame\n save_frame_as_png(new_frame[0, 0], frame_idx)\n return vid\n\ndef _oneshot_sample(rng, video_batch, action_batch):\n # Pass the full video batch, as in training\n batch = dict(\n videos=video_batch, # full batch, not just first frame\n latent_actions=action_batch, # shape should match what was used in training\n mask_rng=rng,\n )\n outputs = genie.apply(params, batch, False) # training=False for eval\n return outputs[""recon""]\n\n# --- Get video + latent actions ---\n# dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n# video_batch = next(iter(dataloader))\n# video_batch = np.load(""overfit_dir/single_sample_corner.npy"")\nvideo_batch = np.load(""overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1, :args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\naction_batch = jnp.zeros_like(action_batch)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +97,278563,"sample.py",5035,0,"",python,selection_mouse +98,278575,"sample.py",5034,0,"",python,selection_command +99,279094,"sample.py",4980,0,"",python,selection_mouse +100,288801,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir",,terminal_command +101,288849,"TERMINAL",0,0,"]633;E;2025-07-01 17:34:05 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +102,298672,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy",,terminal_command +103,298686,"TERMINAL",0,0,"]633;E;2025-07-01 17:34:15 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy ;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +104,306484,"sample.py",4953,0,"",python,selection_mouse +105,306571,"sample.py",4953,2,"ov",python,selection_mouse +106,306738,"sample.py",4953,8,"overfit_",python,selection_mouse +107,306740,"sample.py",4953,14,"overfit_dir/si",python,selection_mouse +108,306740,"sample.py",4953,17,"overfit_dir/singl",python,selection_mouse +109,306741,"sample.py",4953,21,"overfit_dir/single_ba",python,selection_mouse +110,306761,"sample.py",4953,23,"overfit_dir/single_batc",python,selection_mouse +111,306762,"sample.py",4953,24,"overfit_dir/single_batch",python,selection_mouse +112,306795,"sample.py",4953,26,"overfit_dir/single_batch_1",python,selection_mouse +113,306877,"sample.py",4953,28,"overfit_dir/single_batch_12_",python,selection_mouse +114,306901,"sample.py",4953,29,"overfit_dir/single_batch_12_e",python,selection_mouse +115,306954,"sample.py",4953,30,"overfit_dir/single_batch_12_el",python,selection_mouse +116,306988,"sample.py",4953,32,"overfit_dir/single_batch_12_elem",python,selection_mouse +117,307012,"sample.py",4953,34,"overfit_dir/single_batch_12_elems.",python,selection_mouse +118,307044,"sample.py",4953,35,"overfit_dir/single_batch_12_elems.n",python,selection_mouse +119,307112,"sample.py",4953,36,"overfit_dir/single_batch_12_elems.np",python,selection_mouse +120,307295,"sample.py",4953,37,"overfit_dir/single_batch_12_elems.npy",python,selection_mouse +121,308100,"sample.py",4953,37,"",python,content +122,309077,"sample.py",4953,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy",python,content +123,311349,"TERMINAL",0,0,"salloc",,terminal_focus +124,312882,"sample.py",0,0,"",python,tab +125,315853,"TERMINAL",0,0,"bash",,terminal_focus +126,317805,"TERMINAL",0,0,"undefinedjafar[tum_cte0515@hkn1993 jafar]$ queue",,terminal_command +127,317853,"TERMINAL",0,0,"]633;E;2025-07-01 17:34:34 queue;22f2c0ca-9871-4662-80ba-d628041a7c54]633;C",,terminal_output +128,317912,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:34:34 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3309749 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3309662 accelerat train_to tum_cte0 R44:21\t 1 hkn06323309663 accelerat train_la tum_cte0 R44:21\t 1 hkn06323309657 accelerat interact tum_cte0 R50:22\t 1 hkn0701",,terminal_output +129,318935,"TERMINAL",0,0,"5223",,terminal_output +130,320010,"TERMINAL",0,0,"6334",,terminal_output +131,321040,"TERMINAL",0,0,"7445",,terminal_output +132,322079,"TERMINAL",0,0,"8556",,terminal_output +133,322312,"TERMINAL",0,0,"salloc",,terminal_focus +134,323188,"TERMINAL",0,0,"9667",,terminal_output +135,324179,"TERMINAL",0,0,"41889",,terminal_output +136,324460,"sample.py",0,0,"",python,tab +137,325222,"TERMINAL",0,0,"29930",,terminal_output +138,326360,"TERMINAL",0,0,"330301",,terminal_output +139,326471,"sample.py",4957,0,"",python,selection_mouse +140,326635,"sample.py",4954,4,"hkfs",python,selection_mouse +141,326792,"sample.py",4954,131,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions from first",python,selection_mouse +142,326825,"sample.py",4954,137,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions from first video",python,selection_mouse +143,326826,"sample.py",4954,142,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions from first video only",python,selection_mouse +144,326878,"sample.py",4954,28,"hkfs/work/workspace/scratch/",python,selection_mouse +145,326879,"sample.py",4954,39,"hkfs/work/workspace/scratch/tum_ind3695",python,selection_mouse +146,326879,"sample.py",4954,54,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",python,selection_mouse +147,326912,"sample.py",4954,59,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data",python,selection_mouse +148,326968,"sample.py",4954,71,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir",python,selection_mouse +149,327022,"sample.py",4929,29,"\nvideo_batch = np.load(""/hkfs",python,selection_mouse +150,327050,"sample.py",4954,71,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir",python,selection_mouse +151,327125,"sample.py",4954,72,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/",python,selection_mouse +152,327151,"sample.py",4954,93,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_batch_12_elems",python,selection_mouse +153,327324,"TERMINAL",0,0,"4112",,terminal_output +154,327629,"sample.py",5037,0,"",python,selection_mouse +155,328379,"TERMINAL",0,0,"5223",,terminal_output +156,329227,"sample.py",4953,98,"",python,content +157,329433,"TERMINAL",0,0,"6334",,terminal_output +158,329674,"sample.py",4953,0,"overfit_dir/single_batch_12_elems.npy",python,content +159,330462,"sample.py",5643,0,"",python,selection_mouse +160,330489,"TERMINAL",0,0,"7445",,terminal_output +161,331592,"TERMINAL",0,0,"8556",,terminal_output +162,332568,"TERMINAL",0,0,"9667",,terminal_output +163,333741,"TERMINAL",0,0,"50778",,terminal_output +164,334756,"TERMINAL",0,0,"1889",,terminal_output +165,335765,"TERMINAL",0,0,"29940",,terminal_output +166,336810,"TERMINAL",0,0,"340401",,terminal_output +167,337930,"TERMINAL",0,0,"4112",,terminal_output +168,338915,"TERMINAL",0,0,"5223",,terminal_output +169,339978,"TERMINAL",0,0,"6334",,terminal_output +170,341015,"TERMINAL",0,0,"7445",,terminal_output +171,342067,"TERMINAL",0,0,"8556",,terminal_output +172,343118,"TERMINAL",0,0,"9667",,terminal_output +173,344281,"TERMINAL",0,0,"5:00889",,terminal_output +174,345211,"TERMINAL",0,0,"29950",,terminal_output +175,346330,"TERMINAL",0,0,"350501",,terminal_output +176,347372,"TERMINAL",0,0,"4112",,terminal_output +177,348381,"TERMINAL",0,0,"5223",,terminal_output +178,349385,"TERMINAL",0,0,"6334",,terminal_output +179,350429,"TERMINAL",0,0,"7445",,terminal_output +180,351482,"TERMINAL",0,0,"8556",,terminal_output +181,352577,"TERMINAL",0,0,"9667",,terminal_output +182,353580,"TERMINAL",0,0,"10778",,terminal_output +183,354631,"TERMINAL",0,0,"1889",,terminal_output +184,355673,"TERMINAL",0,0,"2991:00",,terminal_output +185,356711,"TERMINAL",0,0,"35:005:001",,terminal_output +186,357800,"TERMINAL",0,0,"4112",,terminal_output +187,358823,"TERMINAL",0,0,"5223",,terminal_output +188,359875,"TERMINAL",0,0,"6334",,terminal_output +189,360972,"TERMINAL",0,0,"7445",,terminal_output +190,362002,"TERMINAL",0,0,"8556",,terminal_output +191,363121,"TERMINAL",0,0,"9667",,terminal_output +192,364091,"TERMINAL",0,0,"20778",,terminal_output +193,365150,"TERMINAL",0,0,"19910",,terminal_output +194,366297,"TERMINAL",0,0,"310101",,terminal_output +195,367276,"TERMINAL",0,0,"4112",,terminal_output +196,368344,"TERMINAL",0,0,"5223",,terminal_output +197,369384,"TERMINAL",0,0,"6334",,terminal_output +198,370428,"TERMINAL",0,0,"7445",,terminal_output +199,371476,"TERMINAL",0,0,"8556",,terminal_output +200,372526,"TERMINAL",0,0,"9667",,terminal_output +201,373670,"TERMINAL",0,0,"30778",,terminal_output +202,374628,"TERMINAL",0,0,"1889",,terminal_output +203,375681,"TERMINAL",0,0,"29920",,terminal_output +204,376734,"TERMINAL",0,0,"320201",,terminal_output +205,377871,"TERMINAL",0,0,"4112",,terminal_output +206,378893,"TERMINAL",0,0,"5223",,terminal_output +207,379916,"TERMINAL",0,0,"6334",,terminal_output +208,380924,"TERMINAL",0,0,"7445",,terminal_output +209,382067,"TERMINAL",0,0,"8556",,terminal_output +210,383068,"TERMINAL",0,0,"9667",,terminal_output +211,384114,"TERMINAL",0,0,"40778",,terminal_output +212,385138,"TERMINAL",0,0,"1889",,terminal_output +213,386184,"TERMINAL",0,0,"3303031",,terminal_output +214,387289,"TERMINAL",0,0,"4112",,terminal_output +215,388289,"TERMINAL",0,0,"5223",,terminal_output +216,389440,"TERMINAL",0,0,"6334",,terminal_output +217,390398,"TERMINAL",0,0,"7445",,terminal_output +218,391453,"TERMINAL",0,0,"8556",,terminal_output +219,392503,"TERMINAL",0,0,"9667",,terminal_output +220,393559,"TERMINAL",0,0,"50778",,terminal_output +221,394598,"TERMINAL",0,0,"1889",,terminal_output +222,395646,"TERMINAL",0,0,"29940",,terminal_output +223,396705,"TERMINAL",0,0,"340401",,terminal_output +224,397747,"TERMINAL",0,0,"4112",,terminal_output +225,398825,"TERMINAL",0,0,"5223",,terminal_output +226,399886,"TERMINAL",0,0,"6334",,terminal_output +227,400921,"TERMINAL",0,0,"7445",,terminal_output +228,402035,"TERMINAL",0,0,"8556",,terminal_output +229,403038,"TERMINAL",0,0,"9667",,terminal_output +230,404085,"TERMINAL",0,0,"6:00778",,terminal_output +231,405227,"TERMINAL",0,0,"1889",,terminal_output +232,406235,"TERMINAL",0,0,"2505051",,terminal_output +233,407258,"TERMINAL",0,0,"4112",,terminal_output +234,408280,"TERMINAL",0,0,"5223",,terminal_output +235,409347,"TERMINAL",0,0,"6334",,terminal_output +236,410362,"TERMINAL",0,0,"7445",,terminal_output +237,411408,"TERMINAL",0,0,"8556",,terminal_output +238,412460,"TERMINAL",0,0,"9667",,terminal_output +239,413609,"TERMINAL",0,0,"10778",,terminal_output +240,414617,"TERMINAL",0,0,"1889",,terminal_output +241,415670,"TERMINAL",0,0,"2992:00",,terminal_output +242,416744,"TERMINAL",0,0,"36:006:001",,terminal_output +243,417774,"TERMINAL",0,0,"4112",,terminal_output +244,418819,"TERMINAL",0,0,"5223",,terminal_output +245,419954,"TERMINAL",0,0,"6334",,terminal_output +246,420980,"TERMINAL",0,0,"7445",,terminal_output +247,421960,"TERMINAL",0,0,"8556",,terminal_output +248,423030,"TERMINAL",0,0,"9667",,terminal_output +249,424060,"TERMINAL",0,0,"20778",,terminal_output +250,425116,"TERMINAL",0,0,"1889",,terminal_output +251,426203,"TERMINAL",0,0,"2101011",,terminal_output +252,427204,"TERMINAL",0,0,"4112",,terminal_output +253,428356,"TERMINAL",0,0,"5223",,terminal_output +254,429300,"TERMINAL",0,0,"6334",,terminal_output +255,430368,"TERMINAL",0,0,"7445",,terminal_output +256,431426,"TERMINAL",0,0,"8556",,terminal_output +257,432455,"TERMINAL",0,0,"9667",,terminal_output +258,433506,"TERMINAL",0,0,"30778",,terminal_output +259,434556,"TERMINAL",0,0,"1889",,terminal_output +260,435626,"TERMINAL",0,0,"29920",,terminal_output +261,436640,"TERMINAL",0,0,"320201",,terminal_output +262,437772,"TERMINAL",0,0,"4112",,terminal_output +263,438740,"TERMINAL",0,0,"5223",,terminal_output +264,439820,"TERMINAL",0,0,"6334",,terminal_output +265,440831,"TERMINAL",0,0,"7445",,terminal_output +266,441972,"TERMINAL",0,0,"8556",,terminal_output +267,442939,"TERMINAL",0,0,"9667",,terminal_output +268,444006,"TERMINAL",0,0,"40778",,terminal_output +269,445030,"TERMINAL",0,0,"1889",,terminal_output +270,446170,"TERMINAL",0,0,"29930",,terminal_output +271,447199,"TERMINAL",0,0,"331312",,terminal_output +272,448218,"TERMINAL",0,0,"5223",,terminal_output +273,449351,"TERMINAL",0,0,"6334",,terminal_output +274,450370,"TERMINAL",0,0,"7445",,terminal_output +275,451399,"TERMINAL",0,0,"8556",,terminal_output +276,452390,"TERMINAL",0,0,"9667",,terminal_output +277,453445,"TERMINAL",0,0,"50778",,terminal_output +278,454498,"TERMINAL",0,0,"1889",,terminal_output +279,455551,"TERMINAL",0,0,"29940",,terminal_output +280,456598,"TERMINAL",0,0,"340401",,terminal_output +281,457740,"TERMINAL",0,0,"4112",,terminal_output +282,458688,"TERMINAL",0,0,"5223",,terminal_output +283,459662,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom models.tokenizer import TokenizerVQVAE\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Restore checkpoint ---\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n # dataloader = get_dataloader(\n # # NOTE: We deliberately pass the global batch size\n # # The dataloader shards the dataset across all processes\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # *image_shape,\n # )\n step = 0\n while step < args.num_steps:\n # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_3_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +284,459839,"TERMINAL",0,0,"6334",,terminal_output +285,460815,"TERMINAL",0,0,"7445",,terminal_output +286,461849,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +287,461946,"TERMINAL",0,0,"8556",,terminal_output +288,462910,"TERMINAL",0,0,"9667",,terminal_output +289,463963,"TERMINAL",0,0,"7:00778",,terminal_output +290,465016,"TERMINAL",0,0,"1889",,terminal_output +291,466023,"TERMINAL",0,0,"29950",,terminal_output +292,467162,"TERMINAL",0,0,"350501",,terminal_output +293,467678,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # FIXME mihir\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n ############################## \n rng2, _rng = jax.random.split(rng2)\n noise = jax.random.normal(_rng, self.mask_token.shape) # Gaussian noise\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)\n ##############################\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +294,468163,"TERMINAL",0,0,"4112",,terminal_output +295,469157,"TERMINAL",0,0,"5334",,terminal_output +296,470235,"TERMINAL",0,0,"7445",,terminal_output +297,471367,"TERMINAL",0,0,"8556",,terminal_output +298,472385,"TERMINAL",0,0,"9667",,terminal_output +299,473410,"TERMINAL",0,0,"10778",,terminal_output +300,474437,"TERMINAL",0,0,"1889",,terminal_output +301,475477,"TERMINAL",0,0,"2993:00",,terminal_output +302,476506,"TERMINAL",0,0,"37:007:001",,terminal_output +303,477607,"TERMINAL",0,0,"4112",,terminal_output +304,478733,"TERMINAL",0,0,"5223",,terminal_output +305,479759,"TERMINAL",0,0,"6334",,terminal_output +306,480700,"TERMINAL",0,0,"7445",,terminal_output +307,481753,"TERMINAL",0,0,"8556",,terminal_output +308,482829,"TERMINAL",0,0,"9667",,terminal_output +309,483854,"TERMINAL",0,0,"20778",,terminal_output +310,484979,"TERMINAL",0,0,"1889",,terminal_output +311,485940,"TERMINAL",0,0,"29910",,terminal_output +312,487029,"TERMINAL",0,0,"310101",,terminal_output +313,488051,"TERMINAL",0,0,"4112",,terminal_output +314,489071,"TERMINAL",0,0,"5223",,terminal_output +315,490202,"TERMINAL",0,0,"6334",,terminal_output +316,491232,"TERMINAL",0,0,"7556",,terminal_output +317,492251,"TERMINAL",0,0,"9667",,terminal_output +318,493377,"TERMINAL",0,0,"30778",,terminal_output +319,494340,"TERMINAL",0,0,"1889",,terminal_output +320,495427,"TERMINAL",0,0,"29920",,terminal_output +321,496455,"TERMINAL",0,0,"320201",,terminal_output +322,497494,"TERMINAL",0,0,"4112",,terminal_output +323,498537,"TERMINAL",0,0,"5223",,terminal_output +324,499623,"TERMINAL",0,0,"6334",,terminal_output +325,500750,"TERMINAL",0,0,"7445",,terminal_output +326,501695,"TERMINAL",0,0,"8556",,terminal_output +327,502756,"TERMINAL",0,0,"9667",,terminal_output +328,503824,"TERMINAL",0,0,"40778",,terminal_output +329,504948,"TERMINAL",0,0,"1889",,terminal_output +330,505971,"TERMINAL",0,0,"29930",,terminal_output +331,507004,"TERMINAL",0,0,"330301",,terminal_output +332,508020,"TERMINAL",0,0,"4112",,terminal_output +333,509146,"TERMINAL",0,0,"5223",,terminal_output +334,510111,"TERMINAL",0,0,"6334",,terminal_output +335,511297,"TERMINAL",0,0,"7556",,terminal_output +336,512238,"TERMINAL",0,0,"9667",,terminal_output +337,513349,"TERMINAL",0,0,"50778",,terminal_output +338,514359,"TERMINAL",0,0,"1889",,terminal_output +339,515392,"TERMINAL",0,0,"29940",,terminal_output +340,516476,"TERMINAL",0,0,"340401",,terminal_output +341,517498,"TERMINAL",0,0,"4112",,terminal_output +342,518562,"TERMINAL",0,0,"5223",,terminal_output +343,519596,"TERMINAL",0,0,"6334",,terminal_output +344,520651,"TERMINAL",0,0,"7445",,terminal_output +345,521699,"TERMINAL",0,0,"8556",,terminal_output +346,522746,"TERMINAL",0,0,"9667",,terminal_output +347,523893,"TERMINAL",0,0,"8:00778",,terminal_output +348,524916,"TERMINAL",0,0,"1889",,terminal_output +349,525939,"TERMINAL",0,0,"29950",,terminal_output +350,526958,"TERMINAL",0,0,"350501",,terminal_output +351,528013,"TERMINAL",0,0,"4112",,terminal_output +352,529220,"TERMINAL",0,0,"5223",,terminal_output +353,530138,"TERMINAL",0,0,"6334",,terminal_output +354,531266,"TERMINAL",0,0,"7556",,terminal_output +355,532289,"TERMINAL",0,0,"9667",,terminal_output +356,533313,"TERMINAL",0,0,"10778",,terminal_output +357,534338,"TERMINAL",0,0,"1889",,terminal_output +358,535362,"TERMINAL",0,0,"2994:00",,terminal_output +359,536414,"TERMINAL",0,0,"38:008:001",,terminal_output +360,537456,"TERMINAL",0,0,"4112",,terminal_output +361,538507,"TERMINAL",0,0,"5223",,terminal_output +362,539546,"TERMINAL",0,0,"6334",,terminal_output +363,540598,"TERMINAL",0,0,"7445",,terminal_output +364,541645,"TERMINAL",0,0,"8556",,terminal_output +365,542697,"TERMINAL",0,0,"9667",,terminal_output +366,543748,"TERMINAL",0,0,"20778",,terminal_output +367,544887,"TERMINAL",0,0,"1889",,terminal_output +368,545909,"TERMINAL",0,0,"29910",,terminal_output +369,546933,"TERMINAL",0,0,"310101",,terminal_output +370,547940,"TERMINAL",0,0,"4112",,terminal_output +371,548994,"TERMINAL",0,0,"5223",,terminal_output +372,550495,"TERMINAL",0,0,"6445",,terminal_output +373,551534,"TERMINAL",0,0,"8556",,terminal_output +374,552574,"TERMINAL",0,0,"9667",,terminal_output +375,553628,"TERMINAL",0,0,"30778",,terminal_output +376,554677,"TERMINAL",0,0,"1889",,terminal_output +377,555735,"TERMINAL",0,0,"29920",,terminal_output +378,556787,"TERMINAL",0,0,"320201",,terminal_output +379,557842,"TERMINAL",0,0,"4112",,terminal_output +380,558932,"TERMINAL",0,0,"5223",,terminal_output +381,559945,"TERMINAL",0,0,"6334",,terminal_output +382,560999,"TERMINAL",0,0,"7445",,terminal_output +383,562050,"TERMINAL",0,0,"8556",,terminal_output +384,563104,"TERMINAL",0,0,"9667",,terminal_output +385,564237,"TERMINAL",0,0,"40889",,terminal_output +386,565210,"TERMINAL",0,0,"29930",,terminal_output +387,566262,"TERMINAL",0,0,"330301",,terminal_output +388,567312,"TERMINAL",0,0,"4112",,terminal_output +389,568437,"TERMINAL",0,0,"5223",,terminal_output +390,569464,"TERMINAL",0,0,"6334",,terminal_output +391,570443,"TERMINAL",0,0,"7445",,terminal_output +392,571502,"TERMINAL",0,0,"8556",,terminal_output +393,572551,"TERMINAL",0,0,"9667",,terminal_output +394,573593,"TERMINAL",0,0,"50778",,terminal_output +395,574682,"TERMINAL",0,0,"1889",,terminal_output +396,575690,"TERMINAL",0,0,"29940",,terminal_output +397,576732,"TERMINAL",0,0,"340401",,terminal_output +398,577789,"TERMINAL",0,0,"4112",,terminal_output +399,578880,"TERMINAL",0,0,"5223",,terminal_output +400,580007,"TERMINAL",0,0,"6334",,terminal_output +401,581033,"TERMINAL",0,0,"7445",,terminal_output +402,582055,"TERMINAL",0,0,"8556",,terminal_output +403,583062,"TERMINAL",0,0,"9667",,terminal_output +404,584110,"TERMINAL",0,0,"9:00778",,terminal_output +405,585162,"TERMINAL",0,0,"19950",,terminal_output +406,586254,"TERMINAL",0,0,"350501",,terminal_output +407,587258,"TERMINAL",0,0,"4112",,terminal_output +408,588313,"TERMINAL",0,0,"5223",,terminal_output +409,590042,"TERMINAL",0,0,"6334",,terminal_output +410,591068,"TERMINAL",0,0,"7445",,terminal_output +411,592090,"TERMINAL",0,0,"8556",,terminal_output +412,593216,"TERMINAL",0,0,"9778",,terminal_output +413,594182,"TERMINAL",0,0,"11889",,terminal_output +414,595235,"TERMINAL",0,0,"2995:00",,terminal_output +415,596404,"TERMINAL",0,0,"39:009:001",,terminal_output +416,597418,"TERMINAL",0,0,"4112",,terminal_output +417,598440,"TERMINAL",0,0,"5223",,terminal_output +418,599439,"TERMINAL",0,0,"6334",,terminal_output +419,600513,"TERMINAL",0,0,"7445",,terminal_output +420,601555,"TERMINAL",0,0,"8556",,terminal_output +421,602640,"TERMINAL",0,0,"9667",,terminal_output +422,603767,"TERMINAL",0,0,"20778",,terminal_output +423,604713,"TERMINAL",0,0,"1889",,terminal_output +424,605766,"TERMINAL",0,0,"29910",,terminal_output +425,606836,"TERMINAL",0,0,"310101",,terminal_output +426,607966,"TERMINAL",0,0,"4112",,terminal_output +427,608934,"TERMINAL",0,0,"5223",,terminal_output +428,610012,"TERMINAL",0,0,"6334",,terminal_output +429,611035,"TERMINAL",0,0,"7445",,terminal_output +430,612059,"TERMINAL",0,0,"8556",,terminal_output +431,613084,"TERMINAL",0,0,"9667",,terminal_output +432,614211,"TERMINAL",0,0,"30778",,terminal_output +433,615237,"TERMINAL",0,0,"19920",,terminal_output +434,616257,"TERMINAL",0,0,"320201",,terminal_output +435,617274,"TERMINAL",0,0,"4112",,terminal_output +436,618409,"TERMINAL",0,0,"5223",,terminal_output +437,619433,"TERMINAL",0,0,"6334",,terminal_output +438,620494,"TERMINAL",0,0,"7445",,terminal_output +439,621502,"TERMINAL",0,0,"8556",,terminal_output +440,622536,"TERMINAL",0,0,"9667",,terminal_output +441,623635,"TERMINAL",0,0,"40778",,terminal_output +442,624654,"TERMINAL",0,0,"1889",,terminal_output +443,625692,"TERMINAL",0,0,"29930",,terminal_output +444,626740,"TERMINAL",0,0,"330301",,terminal_output +445,627829,"TERMINAL",0,0,"4112",,terminal_output +446,628826,"TERMINAL",0,0,"5223",,terminal_output +447,629887,"TERMINAL",0,0,"6334",,terminal_output +448,631004,"TERMINAL",0,0,"7445",,terminal_output +449,631975,"TERMINAL",0,0,"8556",,terminal_output +450,633019,"TERMINAL",0,0,"9667",,terminal_output +451,634070,"TERMINAL",0,0,"50778",,terminal_output +452,635125,"TERMINAL",0,0,"1889",,terminal_output +453,636226,"TERMINAL",0,0,"2404041",,terminal_output +454,637229,"TERMINAL",0,0,"4112",,terminal_output +455,638277,"TERMINAL",0,0,"5223",,terminal_output +456,639403,"TERMINAL",0,0,"6334",,terminal_output +457,640409,"TERMINAL",0,0,"7445",,terminal_output +458,641419,"TERMINAL",0,0,"8556",,terminal_output +459,642478,"TERMINAL",0,0,"9667",,terminal_output +460,643516,"TERMINAL",0,0,"40:00778",,terminal_output +461,644624,"TERMINAL",0,0,"1889",,terminal_output +462,645614,"TERMINAL",0,0,"29950",,terminal_output +463,646700,"TERMINAL",0,0,"350501",,terminal_output +464,647730,"TERMINAL",0,0,"4112",,terminal_output +465,648824,"TERMINAL",0,0,"5223",,terminal_output +466,649845,"TERMINAL",0,0,"6334",,terminal_output +467,650972,"TERMINAL",0,0,"7445",,terminal_output +468,651995,"TERMINAL",0,0,"8556",,terminal_output +469,652977,"TERMINAL",0,0,"9667",,terminal_output +470,654031,"TERMINAL",0,0,"10778",,terminal_output +471,655063,"TERMINAL",0,0,"1889",,terminal_output +472,656124,"TERMINAL",0,0,"2996:00",,terminal_output +473,657154,"TERMINAL",0,0,"350:0150:012",,terminal_output +474,658204,"TERMINAL",0,0,"5223",,terminal_output +475,659339,"TERMINAL",0,0,"6334",,terminal_output +476,660411,"TERMINAL",0,0,"7445",,terminal_output +477,661419,"TERMINAL",0,0,"8556",,terminal_output +478,662447,"TERMINAL",0,0,"9667",,terminal_output +479,663470,"TERMINAL",0,0,"20778",,terminal_output +480,664516,"TERMINAL",0,0,"1889",,terminal_output +481,665560,"TERMINAL",0,0,"29910",,terminal_output +482,666636,"TERMINAL",0,0,"310101",,terminal_output +483,667695,"TERMINAL",0,0,"4112",,terminal_output +484,668768,"TERMINAL",0,0,"5223",,terminal_output +485,669813,"TERMINAL",0,0,"6334",,terminal_output +486,671047,"TERMINAL",0,0,"774interact69train_dy Rhkn0712445",,terminal_output +487,672096,"TERMINAL",0,0,"81556",,terminal_output +488,673197,"TERMINAL",0,0,"93778",,terminal_output +489,674185,"TERMINAL",0,0,"314889",,terminal_output +490,675331,"TERMINAL",0,0,"259920",,terminal_output +491,676500,"TERMINAL",0,0,"3620201",,terminal_output +492,677504,"TERMINAL",0,0,"47112",,terminal_output +493,678420,"TERMINAL",0,0,"58223",,terminal_output +494,679469,"TERMINAL",0,0,"69334",,terminal_output +495,680508,"TERMINAL",0,0,"710445",,terminal_output +496,681556,"TERMINAL",0,0,"81556",,terminal_output +497,682720,"TERMINAL",0,0,"92667",,terminal_output +498,683653,"TERMINAL",0,0,"403778",,terminal_output +499,684764,"TERMINAL",0,0,"14889",,terminal_output +500,685750,"TERMINAL",0,0,"259930",,terminal_output +501,686804,"TERMINAL",0,0,"3630301",,terminal_output +502,687939,"TERMINAL",0,0,"47112",,terminal_output +503,688945,"TERMINAL",0,0,"58223",,terminal_output +504,689986,"TERMINAL",0,0,"69334",,terminal_output +505,691010,"TERMINAL",0,0,"720445",,terminal_output +506,692136,"TERMINAL",0,0,"81556",,terminal_output +507,693161,"TERMINAL",0,0,"92667",,terminal_output +508,694188,"TERMINAL",0,0,"503778",,terminal_output +509,695209,"TERMINAL",0,0,"159940",,terminal_output +510,696335,"TERMINAL",0,0,"3640401",,terminal_output +511,697274,"TERMINAL",0,0,"47112",,terminal_output +512,698338,"TERMINAL",0,0,"58223",,terminal_output +513,699408,"TERMINAL",0,0,"69334",,terminal_output +514,700532,"TERMINAL",0,0,"730445",,terminal_output +515,701483,"TERMINAL",0,0,"81556",,terminal_output +516,702531,"TERMINAL",0,0,"92667",,terminal_output +517,703585,"TERMINAL",0,0,"1:003778",,terminal_output +518,704636,"TERMINAL",0,0,"14889",,terminal_output +519,705755,"TERMINAL",0,0,"259950",,terminal_output +520,706738,"TERMINAL",0,0,"3650501",,terminal_output +521,707789,"TERMINAL",0,0,"47112",,terminal_output +522,708832,"TERMINAL",0,0,"58223",,terminal_output +523,709956,"TERMINAL",0,0,"69334",,terminal_output +524,710936,"TERMINAL",0,0,"740445",,terminal_output +525,711990,"TERMINAL",0,0,"81556",,terminal_output +526,713129,"TERMINAL",0,0,"92667",,terminal_output +527,714091,"TERMINAL",0,0,"103778",,terminal_output +528,715177,"TERMINAL",0,0,"15997:00",,terminal_output +529,716201,"TERMINAL",0,0,"361:001:001",,terminal_output +530,717327,"TERMINAL",0,0,"47112",,terminal_output +531,718266,"TERMINAL",0,0,"58223",,terminal_output +532,719374,"TERMINAL",0,0,"69334",,terminal_output +533,720399,"TERMINAL",0,0,"750445",,terminal_output +534,721525,"TERMINAL",0,0,"81556",,terminal_output +535,722474,"TERMINAL",0,0,"92667",,terminal_output +536,723527,"TERMINAL",0,0,"203778",,terminal_output +537,724556,"TERMINAL",0,0,"14889",,terminal_output +538,725606,"TERMINAL",0,0,"259910",,terminal_output +539,726658,"TERMINAL",0,0,"3610101",,terminal_output +540,727771,"TERMINAL",0,0,"47112",,terminal_output +541,728796,"TERMINAL",0,0,"58223",,terminal_output +542,729816,"TERMINAL",0,0,"69334",,terminal_output +543,730861,"TERMINAL",0,0,"71:00445",,terminal_output +544,731970,"TERMINAL",0,0,"81556",,terminal_output +545,732996,"TERMINAL",0,0,"92667",,terminal_output +546,734008,"TERMINAL",0,0,"303778",,terminal_output +547,735058,"TERMINAL",0,0,"14889",,terminal_output +548,736170,"TERMINAL",0,0,"259920",,terminal_output +549,737193,"TERMINAL",0,0,"3721212",,terminal_output +550,738320,"TERMINAL",0,0,"58223",,terminal_output +551,739343,"TERMINAL",0,0,"69334",,terminal_output +552,740369,"TERMINAL",0,0,"710445",,terminal_output +553,741381,"TERMINAL",0,0,"81556",,terminal_output +554,742621,"TERMINAL",0,0,"92667",,terminal_output +555,743481,"TERMINAL",0,0,"403778",,terminal_output +556,744532,"TERMINAL",0,0,"14889",,terminal_output +557,745582,"TERMINAL",0,0,"259930",,terminal_output +558,746640,"TERMINAL",0,0,"3630301",,terminal_output +559,747741,"TERMINAL",0,0,"47112",,terminal_output +560,748765,"TERMINAL",0,0,"58223",,terminal_output +561,749890,"TERMINAL",0,0,"69334",,terminal_output +562,750916,"TERMINAL",0,0,"720445",,terminal_output +563,751892,"TERMINAL",0,0,"81556",,terminal_output +564,752943,"TERMINAL",0,0,"92667",,terminal_output +565,754009,"TERMINAL",0,0,"503778",,terminal_output +566,755114,"TERMINAL",0,0,"14889",,terminal_output +567,756146,"TERMINAL",0,0,"259940",,terminal_output +568,757139,"TERMINAL",0,0,"3741412",,terminal_output +569,758292,"TERMINAL",0,0,"58223",,terminal_output +570,759315,"TERMINAL",0,0,"69334",,terminal_output +571,760340,"TERMINAL",0,0,"730445",,terminal_output +572,761364,"TERMINAL",0,0,"81556",,terminal_output +573,762401,"TERMINAL",0,0,"92667",,terminal_output +574,763475,"TERMINAL",0,0,"2:003778",,terminal_output +575,764537,"TERMINAL",0,0,"14889",,terminal_output +576,765563,"TERMINAL",0,0,"259950",,terminal_output +577,766622,"TERMINAL",0,0,"3650501",,terminal_output +578,767709,"TERMINAL",0,0,"47112",,terminal_output +579,768726,"TERMINAL",0,0,"58223",,terminal_output +580,769776,"TERMINAL",0,0,"69334",,terminal_output +581,770886,"TERMINAL",0,0,"740445",,terminal_output +582,771887,"TERMINAL",0,0,"81556",,terminal_output +583,772965,"TERMINAL",0,0,"92667",,terminal_output +584,774003,"TERMINAL",0,0,"103778",,terminal_output +585,775058,"TERMINAL",0,0,"14889",,terminal_output +586,776120,"TERMINAL",0,0,"25998:00",,terminal_output +587,777256,"TERMINAL",0,0,"372:012:012",,terminal_output +588,778220,"TERMINAL",0,0,"58223",,terminal_output +589,779280,"TERMINAL",0,0,"69334",,terminal_output +590,780407,"TERMINAL",0,0,"750445",,terminal_output +591,781430,"TERMINAL",0,0,"81556",,terminal_output +592,782458,"TERMINAL",0,0,"92667",,terminal_output +593,783488,"TERMINAL",0,0,"203778",,terminal_output +594,784493,"TERMINAL",0,0,"14889",,terminal_output +595,785536,"TERMINAL",0,0,"259910",,terminal_output +596,786591,"TERMINAL",0,0,"3610101",,terminal_output +597,787654,"TERMINAL",0,0,"47112",,terminal_output +598,788714,"TERMINAL",0,0,"58223",,terminal_output +599,789756,"TERMINAL",0,0,"69334",,terminal_output +600,790810,"TERMINAL",0,0,"72:00445",,terminal_output +601,791878,"TERMINAL",0,0,"81556",,terminal_output +602,792900,"TERMINAL",0,0,"92667",,terminal_output +603,793969,"TERMINAL",0,0,"303778",,terminal_output +604,795002,"TERMINAL",0,0,"14889",,terminal_output +605,796076,"TERMINAL",0,0,"259920",,terminal_output +606,797101,"TERMINAL",0,0,"3620201",,terminal_output +607,798226,"TERMINAL",0,0,"47112",,terminal_output +608,799351,"TERMINAL",0,0,"69334",,terminal_output +609,800376,"TERMINAL",0,0,"710445",,terminal_output +610,801398,"TERMINAL",0,0,"81556",,terminal_output +611,802447,"TERMINAL",0,0,"92667",,terminal_output +612,803508,"TERMINAL",0,0,"403778",,terminal_output +613,804575,"TERMINAL",0,0,"14889",,terminal_output +614,805607,"TERMINAL",0,0,"259930",,terminal_output +615,806664,"TERMINAL",0,0,"3630301",,terminal_output +616,807690,"TERMINAL",0,0,"47112",,terminal_output +617,808751,"TERMINAL",0,0,"58223",,terminal_output +618,809807,"TERMINAL",0,0,"69334",,terminal_output +619,810925,"TERMINAL",0,0,"720445",,terminal_output +620,811947,"TERMINAL",0,0,"81556",,terminal_output +621,812972,"TERMINAL",0,0,"92667",,terminal_output +622,814010,"TERMINAL",0,0,"503778",,terminal_output +623,815048,"TERMINAL",0,0,"14889",,terminal_output +624,816107,"TERMINAL",0,0,"259940",,terminal_output +625,817160,"TERMINAL",0,0,"3741412",,terminal_output +626,818216,"TERMINAL",0,0,"58223",,terminal_output +627,819318,"TERMINAL",0,0,"69334",,terminal_output +628,820353,"TERMINAL",0,0,"730445",,terminal_output +629,821470,"TERMINAL",0,0,"81556",,terminal_output +630,822444,"TERMINAL",0,0,"92667",,terminal_output +631,823517,"TERMINAL",0,0,"3:003778",,terminal_output +632,824546,"TERMINAL",0,0,"14889",,terminal_output +633,825566,"TERMINAL",0,0,"259950",,terminal_output +634,826609,"TERMINAL",0,0,"3650501",,terminal_output +635,827662,"TERMINAL",0,0,"47112",,terminal_output +636,828741,"TERMINAL",0,0,"58223",,terminal_output +637,829768,"TERMINAL",0,0,"69334",,terminal_output +638,830890,"TERMINAL",0,0,"740445",,terminal_output +639,831836,"TERMINAL",0,0,"81556",,terminal_output +640,832939,"TERMINAL",0,0,"92667",,terminal_output +641,833948,"TERMINAL",0,0,"103778",,terminal_output +642,834986,"TERMINAL",0,0,"14889",,terminal_output +643,836013,"TERMINAL",0,0,"25999:00",,terminal_output +644,837146,"TERMINAL",0,0,"363:003:001",,terminal_output +645,838160,"TERMINAL",0,0,"47112",,terminal_output +646,839154,"TERMINAL",0,0,"59334",,terminal_output +647,840323,"TERMINAL",0,0,"750445",,terminal_output +648,841335,"TERMINAL",0,0,"81556",,terminal_output +649,842298,"TERMINAL",0,0,"92667",,terminal_output +650,843345,"TERMINAL",0,0,"203778",,terminal_output +651,844400,"TERMINAL",0,0,"14889",,terminal_output +652,845532,"TERMINAL",0,0,"259910",,terminal_output +653,846506,"TERMINAL",0,0,"3610101",,terminal_output +654,847584,"TERMINAL",0,0,"47112",,terminal_output +655,848627,"TERMINAL",0,0,"58223",,terminal_output +656,849731,"TERMINAL",0,0,"69334",,terminal_output +657,850721,"TERMINAL",0,0,"73:00445",,terminal_output +658,851770,"TERMINAL",0,0,"81556",,terminal_output +659,852906,"TERMINAL",0,0,"92667",,terminal_output +660,853932,"TERMINAL",0,0,"303778",,terminal_output +661,854953,"TERMINAL",0,0,"14889",,terminal_output +662,855988,"TERMINAL",0,0,"259920",,terminal_output +663,857105,"TERMINAL",0,0,"3620201",,terminal_output +664,858137,"TERMINAL",0,0,"47112",,terminal_output +665,859255,"TERMINAL",0,0,"69334",,terminal_output +666,860279,"TERMINAL",0,0,"710445",,terminal_output +667,861304,"TERMINAL",0,0,"81556",,terminal_output +668,862339,"TERMINAL",0,0,"92667",,terminal_output +669,863453,"TERMINAL",0,0,"403778",,terminal_output +670,864449,"TERMINAL",0,0,"14889",,terminal_output +671,865494,"TERMINAL",0,0,"259930",,terminal_output +672,866629,"TERMINAL",0,0,"3630301",,terminal_output +673,867583,"TERMINAL",0,0,"47112",,terminal_output +674,868636,"TERMINAL",0,0,"58223",,terminal_output +675,869683,"TERMINAL",0,0,"69334",,terminal_output +676,870736,"TERMINAL",0,0,"720445",,terminal_output +677,871780,"TERMINAL",0,0,"81556",,terminal_output +678,872877,"TERMINAL",0,0,"92667",,terminal_output +679,873862,"TERMINAL",0,0,"503778",,terminal_output +680,874914,"TERMINAL",0,0,"14889",,terminal_output +681,875973,"TERMINAL",0,0,"259940",,terminal_output +682,877074,"TERMINAL",0,0,"3640401",,terminal_output +683,878070,"TERMINAL",0,0,"47112",,terminal_output +684,879122,"TERMINAL",0,0,"58223",,terminal_output +685,880175,"TERMINAL",0,0,"630445",,terminal_output +686,881273,"TERMINAL",0,0,"81556",,terminal_output +687,882402,"TERMINAL",0,0,"92667",,terminal_output +688,883432,"TERMINAL",0,0,"4:003778",,terminal_output +689,884448,"TERMINAL",0,0,"14889",,terminal_output +690,885470,"TERMINAL",0,0,"259950",,terminal_output +691,886495,"TERMINAL",0,0,"3650501",,terminal_output +692,887621,"TERMINAL",0,0,"47112",,terminal_output +693,888583,"TERMINAL",0,0,"58223",,terminal_output +694,889629,"TERMINAL",0,0,"69334",,terminal_output +695,890669,"TERMINAL",0,0,"740445",,terminal_output +696,891719,"TERMINAL",0,0,"81556",,terminal_output +697,892843,"TERMINAL",0,0,"92667",,terminal_output +698,893867,"TERMINAL",0,0,"103778",,terminal_output +699,894890,"TERMINAL",0,0,"14889",,terminal_output +700,895922,"TERMINAL",0,0,"25991:00:00",,terminal_output +701,897043,"TERMINAL",0,0,"364:004:001",,terminal_output +702,898025,"TERMINAL",0,0,"47112",,terminal_output +703,899088,"TERMINAL",0,0,"58223",,terminal_output +704,900128,"TERMINAL",0,0,"69334",,terminal_output +705,901239,"TERMINAL",0,0,"751556",,terminal_output +706,902263,"TERMINAL",0,0,"92667",,terminal_output +707,903390,"TERMINAL",0,0,"203778",,terminal_output +708,904415,"TERMINAL",0,0,"14889",,terminal_output +709,905438,"TERMINAL",0,0,"259910",,terminal_output +710,906440,"TERMINAL",0,0,"3610101",,terminal_output +711,907589,"TERMINAL",0,0,"47112",,terminal_output +712,908613,"TERMINAL",0,0,"58223",,terminal_output +713,909643,"TERMINAL",0,0,"69334",,terminal_output +714,910645,"TERMINAL",0,0,"74:00445",,terminal_output +715,912095,"TERMINAL",0,0,"81556",,terminal_output +716,913119,"TERMINAL",0,0,"92667",,terminal_output +717,914143,"TERMINAL",0,0,"303778",,terminal_output +718,915271,"TERMINAL",0,0,"159920",,terminal_output +719,916305,"TERMINAL",0,0,"3620201",,terminal_output +720,917323,"TERMINAL",0,0,"47112",,terminal_output +721,918342,"TERMINAL",0,0,"58223",,terminal_output +722,919470,"TERMINAL",0,0,"69334",,terminal_output +723,920406,"TERMINAL",0,0,"710445",,terminal_output +724,921453,"TERMINAL",0,0,"81556",,terminal_output +725,922548,"TERMINAL",0,0,"92667",,terminal_output +726,923556,"TERMINAL",0,0,"403778",,terminal_output +727,924611,"TERMINAL",0,0,"14889",,terminal_output +728,925439,"TERMINAL",0,0,"watch",,terminal_focus +729,925651,"TERMINAL",0,0,"259930",,terminal_output +730,926707,"TERMINAL",0,0,"3630301",,terminal_output +731,927762,"TERMINAL",0,0,"47112",,terminal_output +732,928794,"TERMINAL",0,0,"58223",,terminal_output +733,929924,"TERMINAL",0,0,"69334",,terminal_output +734,930906,"TERMINAL",0,0,"720445",,terminal_output +735,931959,"TERMINAL",0,0,"81556",,terminal_output +736,933086,"TERMINAL",0,0,"92667",,terminal_output +737,934110,"TERMINAL",0,0,"503778",,terminal_output +738,935134,"TERMINAL",0,0,"14889",,terminal_output +739,936263,"TERMINAL",0,0,"26404041",,terminal_output +740,937285,"TERMINAL",0,0,"47112",,terminal_output +741,938311,"TERMINAL",0,0,"58223",,terminal_output +742,939320,"TERMINAL",0,0,"69334",,terminal_output +743,940377,"TERMINAL",0,0,"730445",,terminal_output +744,941484,"TERMINAL",0,0,"81556",,terminal_output +745,942510,"TERMINAL",0,0,"92667",,terminal_output +746,943524,"TERMINAL",0,0,"5:003778",,terminal_output +747,944661,"TERMINAL",0,0,"14889",,terminal_output +748,945629,"TERMINAL",0,0,"259950",,terminal_output +749,946712,"TERMINAL",0,0,"3650501",,terminal_output +750,947710,"TERMINAL",0,0,"47112",,terminal_output +751,948763,"TERMINAL",0,0,"58223",,terminal_output +752,949815,"TERMINAL",0,0,"69334",,terminal_output +753,950904,"TERMINAL",0,0,"740445",,terminal_output +754,951927,"TERMINAL",0,0,"81556",,terminal_output +755,952952,"TERMINAL",0,0,"92667",,terminal_output +756,954018,"TERMINAL",0,0,"103778",,terminal_output +757,955049,"TERMINAL",0,0,"14889",,terminal_output +758,956127,"TERMINAL",0,0,"25991:00",,terminal_output +759,957173,"TERMINAL",0,0,"375:015:012",,terminal_output +760,958279,"TERMINAL",0,0,"58223",,terminal_output +761,959303,"TERMINAL",0,0,"69334",,terminal_output +762,960416,"TERMINAL",0,0,"750445",,terminal_output +763,961409,"TERMINAL",0,0,"81556",,terminal_output +764,962440,"TERMINAL",0,0,"92667",,terminal_output +765,963489,"TERMINAL",0,0,"203778",,terminal_output +766,964627,"TERMINAL",0,0,"14889",,terminal_output +767,965587,"TERMINAL",0,0,"259910",,terminal_output +768,966643,"TERMINAL",0,0,"3610101",,terminal_output +769,967690,"TERMINAL",0,0,"47112",,terminal_output +770,968823,"TERMINAL",0,0,"58223",,terminal_output +771,969799,"TERMINAL",0,0,"69334",,terminal_output +772,970873,"TERMINAL",0,0,"75:00445",,terminal_output +773,971896,"TERMINAL",0,0,"81556",,terminal_output +774,973022,"TERMINAL",0,0,"92667",,terminal_output +775,973988,"TERMINAL",0,0,"303778",,terminal_output +776,975036,"TERMINAL",0,0,"14889",,terminal_output +777,976198,"TERMINAL",0,0,"259920",,terminal_output +778,977163,"TERMINAL",0,0,"3721212",,terminal_output +779,978247,"TERMINAL",0,0,"58223",,terminal_output +780,979258,"TERMINAL",0,0,"69334",,terminal_output +781,980402,"TERMINAL",0,0,"710445",,terminal_output +782,981350,"TERMINAL",0,0,"81556",,terminal_output +783,982444,"TERMINAL",0,0,"92667",,terminal_output +784,983470,"TERMINAL",0,0,"403778",,terminal_output +785,984594,"TERMINAL",0,0,"14889",,terminal_output +786,985618,"TERMINAL",0,0,"259930",,terminal_output +787,986637,"TERMINAL",0,0,"3630301",,terminal_output +788,987668,"TERMINAL",0,0,"47112",,terminal_output +789,988709,"TERMINAL",0,0,"58223",,terminal_output +790,989764,"TERMINAL",0,0,"69334",,terminal_output +791,990821,"TERMINAL",0,0,"720445",,terminal_output +792,991868,"TERMINAL",0,0,"81556",,terminal_output +793,992925,"TERMINAL",0,0,"92667",,terminal_output +794,993974,"TERMINAL",0,0,"503778",,terminal_output +795,995143,"TERMINAL",0,0,"14889",,terminal_output +796,996089,"TERMINAL",0,0,"259940",,terminal_output +797,996806,"TERMINAL",0,0,"bash",,terminal_focus +798,997148,"TERMINAL",0,0,"3741412",,terminal_output +799,998198,"TERMINAL",0,0,"58223",,terminal_output +800,999340,"TERMINAL",0,0,"69334",,terminal_output +801,1000307,"TERMINAL",0,0,"730445",,terminal_output +802,1001387,"TERMINAL",0,0,"81556",,terminal_output +803,1002509,"TERMINAL",0,0,"92667",,terminal_output +804,1003473,"TERMINAL",0,0,"6:003778",,terminal_output +805,1004566,"TERMINAL",0,0,"14889",,terminal_output +806,1005601,"TERMINAL",0,0,"259950",,terminal_output +807,1006625,"TERMINAL",0,0,"3650501",,terminal_output +808,1007672,"TERMINAL",0,0,"47112",,terminal_output +809,1008724,"TERMINAL",0,0,"58223",,terminal_output +810,1009773,"TERMINAL",0,0,"69334",,terminal_output +811,1010824,"TERMINAL",0,0,"740445",,terminal_output +812,1011937,"TERMINAL",0,0,"81556",,terminal_output +813,1012958,"TERMINAL",0,0,"92667",,terminal_output +814,1013985,"TERMINAL",0,0,"103778",,terminal_output +815,1015020,"TERMINAL",0,0,"14889",,terminal_output +816,1016134,"TERMINAL",0,0,"25992:00",,terminal_output +817,1017120,"TERMINAL",0,0,"366:006:001",,terminal_output +818,1017875,"TERMINAL",0,0,"salloc",,terminal_focus +819,1018182,"TERMINAL",0,0,"48223",,terminal_output +820,1019222,"TERMINAL",0,0,"69334",,terminal_output +821,1020268,"TERMINAL",0,0,"750445",,terminal_output +822,1020824,"TERMINAL",0,0,"watch",,terminal_focus +823,1021317,"TERMINAL",0,0,"81556",,terminal_output +824,1022482,"TERMINAL",0,0,"92667",,terminal_output +825,1023422,"TERMINAL",0,0,"203778",,terminal_output +826,1024532,"TERMINAL",0,0,"14889",,terminal_output +827,1025534,"TERMINAL",0,0,"259910",,terminal_output +828,1026639,"TERMINAL",0,0,"3610101",,terminal_output +829,1027639,"TERMINAL",0,0,"47112",,terminal_output +830,1028552,"TERMINAL",0,0,"salloc",,terminal_focus +831,1028710,"TERMINAL",0,0,"58223",,terminal_output +832,1029757,"TERMINAL",0,0,"69334",,terminal_output +833,1030881,"TERMINAL",0,0,"76:00445",,terminal_output +834,1031852,"TERMINAL",0,0,"81556",,terminal_output +835,1032929,"TERMINAL",0,0,"92667",,terminal_output +836,1033994,"TERMINAL",0,0,"303778",,terminal_output +837,1035079,"TERMINAL",0,0,"14889",,terminal_output +838,1036039,"TERMINAL",0,0,"259920",,terminal_output +839,1037082,"TERMINAL",0,0,"3620201",,terminal_output +840,1037500,"TERMINAL",0,0,"bash",,terminal_focus +841,1038135,"TERMINAL",0,0,"47112",,terminal_output +842,1039347,"TERMINAL",0,0,"69334",,terminal_output +843,1040263,"TERMINAL",0,0,"710445",,terminal_output +844,1041319,"TERMINAL",0,0,"81556",,terminal_output +845,1042341,"TERMINAL",0,0,"92667",,terminal_output +846,1043376,"TERMINAL",0,0,"403778",,terminal_output +847,1044433,"TERMINAL",0,0,"14889",,terminal_output +848,1045442,"TERMINAL",0,0,"259930",,terminal_output +849,1046551,"TERMINAL",0,0,"3630301",,terminal_output +850,1047543,"TERMINAL",0,0,"47112",,terminal_output +851,1048579,"TERMINAL",0,0,"58223",,terminal_output +852,1049608,"TERMINAL",0,0,"69334",,terminal_output +853,1050663,"TERMINAL",0,0,"720445",,terminal_output +854,1051714,"TERMINAL",0,0,"81556",,terminal_output +855,1052756,"TERMINAL",0,0,"92667",,terminal_output +856,1053817,"TERMINAL",0,0,"503778",,terminal_output +857,1054945,"TERMINAL",0,0,"14889",,terminal_output +858,1055972,"TERMINAL",0,0,"259940",,terminal_output +859,1056992,"TERMINAL",0,0,"3640401",,terminal_output +860,1058020,"TERMINAL",0,0,"47112",,terminal_output +861,1059057,"TERMINAL",0,0,"58223",,terminal_output +862,1060270,"TERMINAL",0,0,"630445",,terminal_output +863,1061227,"TERMINAL",0,0,"81556",,terminal_output +864,1062273,"TERMINAL",0,0,"92667",,terminal_output +865,1063329,"TERMINAL",0,0,"7:003778",,terminal_output +866,1064469,"TERMINAL",0,0,"14889",,terminal_output +867,1065430,"TERMINAL",0,0,"259950",,terminal_output +868,1066516,"TERMINAL",0,0,"3650501",,terminal_output +869,1067540,"TERMINAL",0,0,"47112",,terminal_output +870,1068668,"TERMINAL",0,0,"58223",,terminal_output +871,1069682,"TERMINAL",0,0,"69334",,terminal_output +872,1070715,"TERMINAL",0,0,"740445",,terminal_output +873,1071739,"TERMINAL",0,0,"81556",,terminal_output +874,1072792,"TERMINAL",0,0,"92667",,terminal_output +875,1073889,"TERMINAL",0,0,"103778",,terminal_output +876,1074947,"TERMINAL",0,0,"14889",,terminal_output +877,1076035,"TERMINAL",0,0,"25993:00",,terminal_output +878,1077064,"TERMINAL",0,0,"367:007:001",,terminal_output +879,1078184,"TERMINAL",0,0,"47112",,terminal_output +880,1079326,"TERMINAL",0,0,"59334",,terminal_output +881,1080230,"TERMINAL",0,0,"750445",,terminal_output +882,1081393,"TERMINAL",0,0,"salloc",,terminal_focus +883,1081394,"TERMINAL",0,0,"81556",,terminal_output +884,1082309,"TERMINAL",0,0,"92667",,terminal_output +885,1082558,"TERMINAL",0,0,"bash",,terminal_focus +886,1083413,"TERMINAL",0,0,"203778",,terminal_output +887,1084404,"TERMINAL",0,0,"14889",,terminal_output +888,1085475,"TERMINAL",0,0,"259910",,terminal_output +889,1086535,"TERMINAL",0,0,"3610101",,terminal_output +890,1087634,"TERMINAL",0,0,"47112",,terminal_output +891,1088740,"TERMINAL",0,0,"58223",,terminal_output +892,1089686,"TERMINAL",0,0,"69334",,terminal_output +893,1090745,"TERMINAL",0,0,"77:00445",,terminal_output +894,1091784,"TERMINAL",0,0,"81556",,terminal_output +895,1092839,"TERMINAL",0,0,"92667",,terminal_output +896,1093889,"TERMINAL",0,0,"303778",,terminal_output +897,1094966,"TERMINAL",0,0,"14889",,terminal_output +898,1096100,"TERMINAL",0,0,"259920",,terminal_output +899,1097155,"TERMINAL",0,0,"3721212",,terminal_output +900,1098229,"TERMINAL",0,0,"58223",,terminal_output +901,1099291,"TERMINAL",0,0,"69334",,terminal_output +902,1100294,"TERMINAL",0,0,"710445",,terminal_output +903,1101357,"TERMINAL",0,0,"81556",,terminal_output +904,1102393,"TERMINAL",0,0,"92667",,terminal_output +905,1103428,"TERMINAL",0,0,"403778",,terminal_output +906,1104508,"TERMINAL",0,0,"14889",,terminal_output +907,1105542,"TERMINAL",0,0,"259930",,terminal_output +908,1106627,"TERMINAL",0,0,"3630301",,terminal_output +909,1107683,"TERMINAL",0,0,"47112",,terminal_output +910,1108689,"TERMINAL",0,0,"58223",,terminal_output +911,1109147,"TERMINAL",0,0,"sbatch --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G --wrap=""sh scripts_horeka/overfit_sample_tiny/sample.sh""",,terminal_command +912,1109197,"TERMINAL",0,0,"]633;E;2025-07-01 17:47:46 sbatch --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G --wrap=""sh scripts_horeka/overfit_sample_tiny/sample.sh"";ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;CSubmitted batch job 3309772\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +913,1109738,"TERMINAL",0,0,"6772 wrapPD\t0:00(Priority)99dy 7:19712to363train_la 576323309657 accelerat interact tum_cte0 R 1:03:34\t 1 hkn0701",,terminal_output +914,1110795,"TERMINAL",0,0,"720445",,terminal_output +915,1111852,"TERMINAL",0,0,"81556",,terminal_output +916,1111875,"TERMINAL",0,0,"queue",,terminal_command +917,1111941,"TERMINAL",0,0,"]633;E;2025-07-01 17:47:48 queue;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:47:48 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309749 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3309772 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3309699 accelerat train_dy tum_cte0 R\t7:21\t 1 hkn07123309662 accelerat train_to tum_cte0 R57:35\t 1 hkn06323309663 accelerat train_la tum_cte0 R57:35\t 1 hkn06323309657 accelerat interact tum_cte0 R 1:03:36\t 1 hkn0701",,terminal_output +918,1112914,"TERMINAL",0,0,"92667",,terminal_output +919,1113029,"TERMINAL",0,0,"92667",,terminal_output +920,1113969,"TERMINAL",0,0,"503778",,terminal_output +921,1114075,"TERMINAL",0,0,"503778",,terminal_output +922,1114999,"TERMINAL",0,0,"14889",,terminal_output +923,1115102,"TERMINAL",0,0,"14889",,terminal_output +924,1116079,"TERMINAL",0,0,"259940",,terminal_output +925,1116141,"TERMINAL",0,0,"26404041",,terminal_output +926,1116563,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +927,1117104,"TERMINAL",0,0,"3640401",,terminal_output +928,1118230,"TERMINAL",0,0,"48223",,terminal_output +929,1119223,"TERMINAL",0,0,"69334",,terminal_output +930,1119532,"TERMINAL",0,0,"scancel 3309657",,terminal_command +931,1119565,"TERMINAL",0,0,"]633;E;2025-07-01 17:47:56 scancel 3309657;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +932,1120285,"TERMINAL",0,0,"M73309657 accelerat interact tum_cte0 CG 1:03:44\t 1 hkn07013044",,terminal_output +933,1121406,"TERMINAL",0,0,"8155",,terminal_output +934,1122386,"TERMINAL",0,0,"9266",,terminal_output +935,1123454,"TERMINAL",0,0,"8:00377",,terminal_output +936,1124497,"TERMINAL",0,0,"1488",,terminal_output +937,1125600,"TERMINAL",0,0,"2599",,terminal_output +938,1126665,"TERMINAL",0,0,"365050",,terminal_output +939,1127664,"TERMINAL",0,0,"4711",,terminal_output +940,1128713,"TERMINAL",0,0,"5822",,terminal_output +941,1129766,"TERMINAL",0,0,"6933",,terminal_output +942,1130823,"TERMINAL",0,0,"74044",,terminal_output +943,1131950,"TERMINAL",0,0,"8155",,terminal_output +944,1132976,"TERMINAL",0,0,"9266",,terminal_output +945,1133974,"TERMINAL",0,0,"10377",,terminal_output +946,1135032,"TERMINAL",0,0,"1488",,terminal_output +947,1136148,"TERMINAL",0,0,"2599",,terminal_output +948,1137172,"TERMINAL",0,0,"368:008:00",,terminal_output +949,1138195,"TERMINAL",0,0,"4822",,terminal_output +950,1139246,"TERMINAL",0,0,"6933",,terminal_output +951,1140270,"TERMINAL",0,0,"75044",,terminal_output +952,1141335,"TERMINAL",0,0,"8155",,terminal_output +953,1142395,"TERMINAL",0,0,"9266",,terminal_output +954,1143525,"TERMINAL",0,0,"20377",,terminal_output +955,1144463,"TERMINAL",0,0,"1488",,terminal_output +956,1145569,"TERMINAL",0,0,"2599",,terminal_output +957,1146602,"TERMINAL",0,0,"361010",,terminal_output +958,1147724,"TERMINAL",0,0,"4711",,terminal_output +959,1148747,"TERMINAL",0,0,"5822",,terminal_output +960,1149780,"TERMINAL",0,0,"6933",,terminal_output +961,1151009,"TERMINAL",0,0,"78:0044",,terminal_output +962,1152029,"TERMINAL",0,0,"8155",,terminal_output +963,1153249,"TERMINAL",0,0,"9377",,terminal_output +964,1154279,"TERMINAL",0,0,"31488",,terminal_output +965,1155402,"TERMINAL",0,0,"2599",,terminal_output +966,1156359,"TERMINAL",0,0,"362020",,terminal_output +967,1157450,"TERMINAL",0,0,"4711",,terminal_output +968,1158472,"TERMINAL",0,0,"5822",,terminal_output +969,1159598,"TERMINAL",0,0,"6933",,terminal_output +970,1160630,"TERMINAL",0,0,"\r71044",,terminal_output +971,1161668,"TERMINAL",0,0,"8155",,terminal_output +972,1162642,"TERMINAL",0,0,"9266",,terminal_output +973,1163696,"TERMINAL",0,0,"40377",,terminal_output +974,1164738,"TERMINAL",0,0,"1488",,terminal_output +975,1165777,"TERMINAL",0,0,"2599",,terminal_output +976,1166812,"TERMINAL",0,0,"363030",,terminal_output +977,1167867,"TERMINAL",0,0,"4711",,terminal_output +978,1168919,"TERMINAL",0,0,"5822",,terminal_output +979,1169994,"TERMINAL",0,0,"6933",,terminal_output +980,1171066,"TERMINAL",0,0,"72044",,terminal_output +981,1172088,"TERMINAL",0,0,"8155",,terminal_output +982,1173217,"TERMINAL",0,0,"9377",,terminal_output +983,1174186,"TERMINAL",0,0,"51488",,terminal_output +984,1175268,"TERMINAL",0,0,"2599",,terminal_output +985,1176304,"TERMINAL",0,0,"364040",,terminal_output +986,1177335,"TERMINAL",0,0,"4711",,terminal_output +987,1178388,"TERMINAL",0,0,"5822",,terminal_output +988,1179465,"TERMINAL",0,0,"6933",,terminal_output +989,1180496,"TERMINAL",0,0,"73044",,terminal_output +990,1181619,"TERMINAL",0,0,"8155",,terminal_output +991,1182640,"TERMINAL",0,0,"9266",,terminal_output +992,1183647,"TERMINAL",0,0,"9:00377",,terminal_output +993,1184709,"TERMINAL",0,0,"1488",,terminal_output +994,1185749,"TERMINAL",0,0,"2599",,terminal_output +995,1186802,"TERMINAL",0,0,"365050",,terminal_output +996,1187853,"TERMINAL",0,0,"4711",,terminal_output +997,1188901,"TERMINAL",0,0,"5822",,terminal_output +998,1190012,"TERMINAL",0,0,"6933",,terminal_output +999,1191003,"TERMINAL",0,0,"74044",,terminal_output +1000,1192060,"TERMINAL",0,0,"8155",,terminal_output +1001,1193188,"TERMINAL",0,0,"9266",,terminal_output +1002,1194164,"TERMINAL",0,0,"10488",,terminal_output +1003,1195235,"TERMINAL",0,0,"2599",,terminal_output +1004,1196362,"TERMINAL",0,0,"369:009:00",,terminal_output +1005,1197313,"TERMINAL",0,0,"4711",,terminal_output +1006,1198366,"TERMINAL",0,0,"5822",,terminal_output +1007,1199432,"TERMINAL",0,0,"6933",,terminal_output +1008,1200465,"TERMINAL",0,0,"75044",,terminal_output +1009,1201520,"TERMINAL",0,0,"8155",,terminal_output +1010,1202570,"TERMINAL",0,0,"9266",,terminal_output +1011,1203741,"TERMINAL",0,0,"20377",,terminal_output +1012,1204656,"TERMINAL",0,0,"1488",,terminal_output +1013,1205696,"TERMINAL",0,0,"2599",,terminal_output +1014,1206744,"TERMINAL",0,0,"361010",,terminal_output +1015,1207794,"TERMINAL",0,0,"4711",,terminal_output +1016,1208847,"TERMINAL",0,0,"5822",,terminal_output +1017,1209982,"TERMINAL",0,0,"6933",,terminal_output +1018,1211106,"TERMINAL",0,0,"79:0044",,terminal_output +1019,1212072,"TERMINAL",0,0,"8155",,terminal_output +1020,1213119,"TERMINAL",0,0,"9266",,terminal_output +1021,1214175,"TERMINAL",0,0,"30488",,terminal_output +1022,1215304,"TERMINAL",0,0,"2599",,terminal_output +1023,1216328,"TERMINAL",0,0,"362020",,terminal_output +1024,1217334,"TERMINAL",0,0,"4711",,terminal_output +1025,1218480,"TERMINAL",0,0,"5822",,terminal_output +1026,1219427,"TERMINAL",0,0,"6933",,terminal_output +1027,1220538,"TERMINAL",0,0,"71044",,terminal_output +1028,1221554,"TERMINAL",0,0,"8155",,terminal_output +1029,1222677,"TERMINAL",0,0,"9266",,terminal_output +1030,1223634,"TERMINAL",0,0,"40377",,terminal_output +1031,1224727,"TERMINAL",0,0,"1488",,terminal_output +1032,1225737,"TERMINAL",0,0,"2599",,terminal_output +1033,1226805,"TERMINAL",0,0,"363030",,terminal_output +1034,1227840,"TERMINAL",0,0,"4711",,terminal_output +1035,1228895,"TERMINAL",0,0,"5822",,terminal_output +1036,1229948,"TERMINAL",0,0,"6933",,terminal_output +1037,1231076,"TERMINAL",0,0,"72044",,terminal_output +1038,1232042,"TERMINAL",0,0,"8155",,terminal_output +1039,1233078,"TERMINAL",0,0,"9266",,terminal_output +1040,1234136,"TERMINAL",0,0,"50377",,terminal_output +1041,1235274,"TERMINAL",0,0,"2599",,terminal_output +1042,1236266,"TERMINAL",0,0,"364040",,terminal_output +1043,1237298,"TERMINAL",0,0,"4711",,terminal_output +1044,1238349,"TERMINAL",0,0,"5822",,terminal_output +1045,1239420,"TERMINAL",0,0,"6933",,terminal_output +1046,1240467,"TERMINAL",0,0,"73044",,terminal_output +1047,1241504,"TERMINAL",0,0,"8155",,terminal_output +1048,1242561,"TERMINAL",0,0,"9266",,terminal_output +1049,1243670,"TERMINAL",0,0,"50:00377",,terminal_output +1050,1244673,"TERMINAL",0,0,"1488",,terminal_output +1051,1245737,"TERMINAL",0,0,"2599",,terminal_output +1052,1246787,"TERMINAL",0,0,"365050",,terminal_output +1053,1247845,"TERMINAL",0,0,"4711",,terminal_output +1054,1248881,"TERMINAL",0,0,"5822",,terminal_output +1055,1250021,"TERMINAL",0,0,"6933",,terminal_output +1056,1251042,"TERMINAL",0,0,"74044",,terminal_output +1057,1252067,"TERMINAL",0,0,"8155",,terminal_output +1058,1253076,"TERMINAL",0,0,"9266",,terminal_output +1059,1254135,"TERMINAL",0,0,"10377",,terminal_output +1060,1255196,"TERMINAL",0,0,"2599",,terminal_output +1061,1256247,"TERMINAL",0,0,"361:00:001:00:00",,terminal_output +1062,1257298,"TERMINAL",0,0,"4711",,terminal_output +1063,1258355,"TERMINAL",0,0,"5822",,terminal_output +1064,1259439,"TERMINAL",0,0,"6933",,terminal_output +1065,1260444,"TERMINAL",0,0,"75044",,terminal_output +1066,1260677,"TERMINAL",0,0,"salloc",,terminal_focus +1067,1261500,"TERMINAL",0,0,"8155",,terminal_output +1068,1262038,"TERMINAL",0,0,"bash",,terminal_focus +1069,1262544,"TERMINAL",0,0,"9266",,terminal_output +1070,1263598,"TERMINAL",0,0,"20377",,terminal_output +1071,1264631,"TERMINAL",0,0,"1488",,terminal_output +1072,1264892,"TERMINAL",0,0,"queue",,terminal_command +1073,1264943,"TERMINAL",0,0,"]633;E;2025-07-01 17:50:21 queue;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +1074,1265008,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:50:21 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309749 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3309772 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3309699 accelerat train_dy tum_cte0 R\t9:54\t 1 hkn07123309662 accelerat train_to tum_cte0 R 1:00:08\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:00:08\t 1 hkn0632",,terminal_output +1075,1265789,"TERMINAL",0,0,"2599",,terminal_output +1076,1266017,"TERMINAL",0,0,"2599",,terminal_output +1077,1266645,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:50:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309749 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3309772 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3309699 accelerat train_dy tum_cte0 R\t9:56\t 1 hkn07123309662 accelerat train_to tum_cte0 R 1:00:10\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:00:10\t 1 hkn0632",,terminal_output +1078,1266757,"TERMINAL",0,0,"361010",,terminal_output +1079,1267736,"TERMINAL",0,0,"4711",,terminal_output +1080,1267838,"TERMINAL",0,0,"4711",,terminal_output +1081,1268732,"TERMINAL",0,0,"5822",,terminal_output +1082,1268868,"TERMINAL",0,0,"5822",,terminal_output +1083,1269791,"TERMINAL",0,0,"6933",,terminal_output +1084,1269906,"TERMINAL",0,0,"6933",,terminal_output +1085,1270840,"TERMINAL",0,0,"710:0044",,terminal_output +1086,1270969,"TERMINAL",0,0,"710:0044",,terminal_output +1087,1271932,"TERMINAL",0,0,"8155",,terminal_output +1088,1272038,"TERMINAL",0,0,"8155",,terminal_output +1089,1272945,"TERMINAL",0,0,"9266",,terminal_output +1090,1273468,"TERMINAL",0,0,"9377",,terminal_output +1091,1273987,"TERMINAL",0,0,"30377",,terminal_output +1092,1274438,"TERMINAL",0,0,"31488",,terminal_output +1093,1275108,"TERMINAL",0,0,"1488",,terminal_output +1094,1275495,"TERMINAL",0,0,"2599",,terminal_output +1095,1276151,"TERMINAL",0,0,"2599",,terminal_output +1096,1276543,"TERMINAL",0,0,"362020",,terminal_output +1097,1277465,"TERMINAL",0,0,"372121",,terminal_output +1098,1277588,"TERMINAL",0,0,"4711",,terminal_output +1099,1278488,"TERMINAL",0,0,"5822",,terminal_output +1100,1278639,"TERMINAL",0,0,"5822",,terminal_output +1101,1279519,"TERMINAL",0,0,"6933",,terminal_output +1102,1279714,"TERMINAL",0,0,"6933",,terminal_output +1103,1280576,"TERMINAL",0,0,"71044",,terminal_output +1104,1280754,"TERMINAL",0,0,"71044",,terminal_output +1105,1281627,"TERMINAL",0,0,"8155",,terminal_output +1106,1281810,"TERMINAL",0,0,"8155",,terminal_output +1107,1282679,"TERMINAL",0,0,"9266",,terminal_output +1108,1282888,"TERMINAL",0,0,"9266",,terminal_output +1109,1283817,"TERMINAL",0,0,"40377",,terminal_output +1110,1283925,"TERMINAL",0,0,"40377",,terminal_output +1111,1284780,"TERMINAL",0,0,"1488",,terminal_output +1112,1284967,"TERMINAL",0,0,"1488",,terminal_output +1113,1285827,"TERMINAL",0,0,"2599",,terminal_output +1114,1286018,"TERMINAL",0,0,"2599",,terminal_output +1115,1286875,"TERMINAL",0,0,"363030",,terminal_output +1116,1287068,"TERMINAL",0,0,"363030",,terminal_output +1117,1287932,"TERMINAL",0,0,"4711",,terminal_output +1118,1288115,"TERMINAL",0,0,"4711",,terminal_output +1119,1288993,"TERMINAL",0,0,"5822",,terminal_output +1120,1289238,"TERMINAL",0,0,"5933",,terminal_output +1121,1290036,"TERMINAL",0,0,"6933",,terminal_output +1122,1290222,"TERMINAL",0,0,"72044",,terminal_output +1123,1291183,"TERMINAL",0,0,"72044",,terminal_output +1124,1291287,"TERMINAL",0,0,"8155",,terminal_output +1125,1292208,"TERMINAL",0,0,"8155",,terminal_output +1126,1292315,"TERMINAL",0,0,"9266",,terminal_output +1127,1293139,"TERMINAL",0,0,"9377",,terminal_output +1128,1293436,"TERMINAL",0,0,"50377",,terminal_output +1129,1294268,"TERMINAL",0,0,"51488",,terminal_output +1130,1294422,"TERMINAL",0,0,"1488",,terminal_output +1131,1295281,"TERMINAL",0,0,"2599",,terminal_output +1132,1295466,"TERMINAL",0,0,"2599",,terminal_output +1133,1296272,"TERMINAL",0,0,"364040",,terminal_output +1134,1296569,"TERMINAL",0,0,"364040",,terminal_output +1135,1297433,"TERMINAL",0,0,"4711",,terminal_output +1136,1297565,"TERMINAL",0,0,"4711",,terminal_output +1137,1298384,"TERMINAL",0,0,"5822",,terminal_output +1138,1298618,"TERMINAL",0,0,"5822",,terminal_output +1139,1299427,"TERMINAL",0,0,"6933",,terminal_output +1140,1299681,"TERMINAL",0,0,"6933",,terminal_output +1141,1300502,"TERMINAL",0,0,"73044",,terminal_output +1142,1300812,"TERMINAL",0,0,"73044",,terminal_output +1143,1301628,"TERMINAL",0,0,"8155",,terminal_output +1144,1301768,"TERMINAL",0,0,"8155",,terminal_output +1145,1302630,"TERMINAL",0,0,"9266",,terminal_output +1146,1303062,"TERMINAL",0,0,"9266",,terminal_output +1147,1303780,"TERMINAL",0,0,"1:00377",,terminal_output +1148,1304019,"TERMINAL",0,0,"1:00377",,terminal_output +1149,1304729,"TERMINAL",0,0,"1488",,terminal_output +1150,1305111,"TERMINAL",0,0,"1488",,terminal_output +1151,1305831,"TERMINAL",0,0,"2599",,terminal_output +1152,1306153,"TERMINAL",0,0,"2599",,terminal_output +1153,1306837,"TERMINAL",0,0,"365050",,terminal_output +1154,1307262,"TERMINAL",0,0,"375151",,terminal_output +1155,1307893,"TERMINAL",0,0,"4711",,terminal_output +1156,1308292,"TERMINAL",0,0,"5822",,terminal_output +1157,1308966,"TERMINAL",0,0,"5822",,terminal_output +1158,1309311,"TERMINAL",0,0,"6933",,terminal_output +1159,1309992,"TERMINAL",0,0,"6933",,terminal_output +1160,1310334,"TERMINAL",0,0,"74044",,terminal_output +1161,1311067,"TERMINAL",0,0,"74044",,terminal_output +1162,1311357,"TERMINAL",0,0,"8155",,terminal_output +1163,1312153,"TERMINAL",0,0,"8155",,terminal_output +1164,1312383,"TERMINAL",0,0,"9266",,terminal_output +1165,1313207,"TERMINAL",0,0,"9377",,terminal_output +1166,1313422,"TERMINAL",0,0,"10377",,terminal_output +1167,1314328,"TERMINAL",0,0,"11488",,terminal_output +1168,1314454,"TERMINAL",0,0,"1488",,terminal_output +1169,1315281,"TERMINAL",0,0,"2599",,terminal_output +1170,1315494,"TERMINAL",0,0,"2599",,terminal_output +1171,1316375,"TERMINAL",0,0,"361:001:00",,terminal_output +1172,1316546,"TERMINAL",0,0,"361:001:00",,terminal_output +1173,1317368,"TERMINAL",0,0,"4711",,terminal_output +1174,1317606,"TERMINAL",0,0,"4711",,terminal_output +1175,1318400,"TERMINAL",0,0,"5822",,terminal_output +1176,1318647,"TERMINAL",0,0,"5822",,terminal_output +1177,1319446,"TERMINAL",0,0,"6933",,terminal_output +1178,1319754,"TERMINAL",0,0,"6933",,terminal_output +1179,1319999,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +1180,1320779,"TERMINAL",0,0,"75044",,terminal_output +1181,1320954,"TERMINAL",0,0,"idling",,terminal_command +1182,1321031,"TERMINAL",0,0,"]633;E;2025-07-01 17:51:17 idling;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Tue Jul 1 17:51:17 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 71 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 12 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +1183,1321810,"TERMINAL",0,0,"8155",,terminal_output +1184,1322067,"TERMINAL",0,0,"8",,terminal_output +1185,1322872,"TERMINAL",0,0,"9266",,terminal_output +1186,1323133,"TERMINAL",0,0,"9",,terminal_output +1187,1323918,"TERMINAL",0,0,"20377",,terminal_output +1188,1324163,"TERMINAL",0,0,"20",,terminal_output +1189,1324964,"TERMINAL",0,0,"1488",,terminal_output +1190,1325284,"TERMINAL",0,0,"23",,terminal_output +1191,1326109,"TERMINAL",0,0,"2599",,terminal_output +1192,1326238,"TERMINAL",0,0,"3",,terminal_output +1193,1327139,"TERMINAL",0,0,"361010",,terminal_output +1194,1327313,"TERMINAL",0,0,"4",,terminal_output +1195,1329177,"TERMINAL",0,0,"53",,terminal_output +1196,1329177,"TERMINAL",0,0,"4822",,terminal_output +1197,1330202,"TERMINAL",0,0,"67",,terminal_output +1198,1330203,"TERMINAL",0,0,"61:0044",,terminal_output +1199,1331222,"TERMINAL",0,0,"8",,terminal_output +1200,1331222,"TERMINAL",0,0,"8155",,terminal_output +1201,1332349,"TERMINAL",0,0,"9",,terminal_output +1202,1332350,"TERMINAL",0,0,"9266",,terminal_output +1203,1333311,"TERMINAL",0,0,"30",,terminal_output +1204,1333326,"TERMINAL",0,0,"30377",,terminal_output +1205,1334350,"TERMINAL",0,0,"1",,terminal_output +1206,1334364,"TERMINAL",0,0,"1488",,terminal_output +1207,1335420,"TERMINAL",0,0,"2",,terminal_output +1208,1335435,"TERMINAL",0,0,"2599",,terminal_output +1209,1336447,"TERMINAL",0,0,"31",,terminal_output +1210,1336457,"TERMINAL",0,0,"362020",,terminal_output +1211,1337483,"TERMINAL",0,0,"4",,terminal_output +1212,1337539,"TERMINAL",0,0,"4711",,terminal_output +1213,1338595,"TERMINAL",0,0,"5",,terminal_output +1214,1338604,"TERMINAL",0,0,"5822",,terminal_output +1215,1339569,"TERMINAL",0,0,"6",,terminal_output +1216,1339628,"TERMINAL",0,0,"6933",,terminal_output +1217,1340614,"TERMINAL",0,0,"7",,terminal_output +1218,1340676,"TERMINAL",0,0,"71044",,terminal_output +1219,1341646,"TERMINAL",0,0,"8",,terminal_output +1220,1341708,"TERMINAL",0,0,"8155",,terminal_output +1221,1342798,"TERMINAL",0,0,"9",,terminal_output +1222,1342809,"TERMINAL",0,0,"9266",,terminal_output +1223,1343818,"TERMINAL",0,0,"40",,terminal_output +1224,1343823,"TERMINAL",0,0,"40377",,terminal_output +1225,1344768,"TERMINAL",0,0,"11",,terminal_output +1226,1344880,"TERMINAL",0,0,"1488",,terminal_output +1227,1345807,"TERMINAL",0,0,"2",,terminal_output +1228,1345915,"TERMINAL",0,0,"2599",,terminal_output +1229,1346843,"TERMINAL",0,0,"3",,terminal_output +1230,1346949,"TERMINAL",0,0,"363030",,terminal_output +1231,1347915,"TERMINAL",0,0,"4",,terminal_output +1232,1348022,"TERMINAL",0,0,"4711",,terminal_output +1233,1348972,"TERMINAL",0,0,"5",,terminal_output +1234,1349037,"TERMINAL",0,0,"5822",,terminal_output +1235,1349986,"TERMINAL",0,0,"6",,terminal_output +1236,1350092,"TERMINAL",0,0,"6933",,terminal_output +1237,1351026,"TERMINAL",0,0,"7",,terminal_output +1238,1351143,"TERMINAL",0,0,"72044",,terminal_output +1239,1352115,"TERMINAL",0,0,"8",,terminal_output +1240,1352222,"TERMINAL",0,0,"8266",,terminal_output +1241,1353150,"TERMINAL",0,0,"9",,terminal_output +1242,1353255,"TERMINAL",0,0,"50377",,terminal_output +1243,1354263,"TERMINAL",0,0,"50",,terminal_output +1244,1354316,"TERMINAL",0,0,"1488",,terminal_output +1245,1355287,"TERMINAL",0,0,"2",,terminal_output +1246,1355340,"TERMINAL",0,0,"2599",,terminal_output +1247,1356245,"TERMINAL",0,0,"3",,terminal_output +1248,1356381,"TERMINAL",0,0,"364040",,terminal_output +1249,1357289,"TERMINAL",0,0,"4",,terminal_output +1250,1357495,"TERMINAL",0,0,"4711",,terminal_output +1251,1358132,"TERMINAL",0,0,"salloc",,terminal_focus +1252,1358344,"TERMINAL",0,0,"5",,terminal_output +1253,1358489,"TERMINAL",0,0,"5822",,terminal_output +1254,1358928,"TERMINAL",0,0,"^Csalloc: Job allocation 3309749 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +1255,1359493,"TERMINAL",0,0,"6",,terminal_output +1256,1359564,"TERMINAL",0,0,"\r6933",,terminal_output +1257,1360444,"TERMINAL",0,0,"7",,terminal_output +1258,1360593,"TERMINAL",0,0,"73044",,terminal_output +1259,1361493,"TERMINAL",0,0,"8",,terminal_output +1260,1361645,"TERMINAL",0,0,"watch",,terminal_focus +1261,1361646,"TERMINAL",0,0,"8155",,terminal_output +1262,1362527,"TERMINAL",0,0,"9",,terminal_output +1263,1362703,"TERMINAL",0,0,"9266",,terminal_output +1264,1362868,"TERMINAL",0,0,"bash",,terminal_focus +1265,1363566,"TERMINAL",0,0,"2:00",,terminal_output +1266,1363745,"TERMINAL",0,0,"2:00377",,terminal_output +1267,1364630,"TERMINAL",0,0,"1",,terminal_output +1268,1364799,"TERMINAL",0,0,"1488",,terminal_output +1269,1365695,"TERMINAL",0,0,"2",,terminal_output +1270,1366046,"TERMINAL",0,0,"2599",,terminal_output +1271,1366753,"TERMINAL",0,0,"3",,terminal_output +1272,1367026,"TERMINAL",0,0,"365050",,terminal_output +1273,1367781,"TERMINAL",0,0,"4",,terminal_output +1274,1368116,"TERMINAL",0,0,"4711",,terminal_output +1275,1368775,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +1276,1368846,"TERMINAL",0,0,"5",,terminal_output +1277,1368855,"TERMINAL",0,0,"]633;E;2025-07-01 17:52:05 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;Csalloc: Pending job allocation 3309785\r\nsalloc: job 3309785 queued and waiting for resources\r\n",,terminal_output +1278,1369180,"TERMINAL",0,0,"58223309785 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +1279,1369866,"TERMINAL",0,0,"6",,terminal_output +1280,1370256,"TERMINAL",0,0,"64044",,terminal_output +1281,1370955,"TERMINAL",0,0,"7",,terminal_output +1282,1371264,"TERMINAL",0,0,"8155",,terminal_output +1283,1371977,"TERMINAL",0,0,"8",,terminal_output +1284,1372396,"TERMINAL",0,0,"9266",,terminal_output +1285,1373003,"TERMINAL",0,0,"9",,terminal_output +1286,1373413,"TERMINAL",0,0,"10377",,terminal_output +1287,1374076,"TERMINAL",0,0,"10",,terminal_output +1288,1374439,"TERMINAL",0,0,"1488",,terminal_output +1289,1375098,"TERMINAL",0,0,"1",,terminal_output +1290,1375463,"TERMINAL",0,0,"2599",,terminal_output +1291,1376177,"TERMINAL",0,0,"2",,terminal_output +1292,1376485,"TERMINAL",0,0,"362:002:00",,terminal_output +1293,1377174,"TERMINAL",0,0,"4",,terminal_output +1294,1377540,"TERMINAL",0,0,"4711",,terminal_output +1295,1377768,"TERMINAL",0,0,"^Csalloc: Job allocation 3309785 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +1296,1378213,"TERMINAL",0,0,"5",,terminal_output +1297,1378635,"TERMINAL",0,0,"\r5822",,terminal_output +1298,1379351,"TERMINAL",0,0,"6",,terminal_output +1299,1379747,"TERMINAL",0,0,"6933",,terminal_output +1300,1380406,"TERMINAL",0,0,"7",,terminal_output +1301,1380795,"TERMINAL",0,0,"75044",,terminal_output +1302,1381340,"TERMINAL",0,0,"8",,terminal_output +1303,1381592,"TERMINAL",0,0,"queue",,terminal_command +1304,1381670,"TERMINAL",0,0,"]633;E;2025-07-01 17:52:18 queue;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 17:52:18 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309772 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3309699 accelerat train_dy tum_cte0 R11:51\t 1 hkn07123309662 accelerat train_to tum_cte0 R 1:02:05\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:02:05\t 1 hkn0632",,terminal_output +1305,1381774,"TERMINAL",0,0,"8155",,terminal_output +1306,1382375,"TERMINAL",0,0,"9",,terminal_output +1307,1382746,"TERMINAL",0,0,"9266",,terminal_output +1308,1382832,"TERMINAL",0,0,"9266",,terminal_output +1309,1383279,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1310,1383454,"TERMINAL",0,0,"20",,terminal_output +1311,1383875,"TERMINAL",0,0,"20377",,terminal_output +1312,1384456,"TERMINAL",0,0,"1",,terminal_output +1313,1384473,"TERMINAL",0,0,"idling",,terminal_command +1314,1384520,"TERMINAL",0,0,"]633;E;2025-07-01 17:52:21 idling;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C",,terminal_output +1315,1384575,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Tue Jul 1 17:52:21 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 71 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 11 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +1316,1384909,"TERMINAL",0,0,"1488",,terminal_output +1317,1385282,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +1318,1385505,"TERMINAL",0,0,"2",,terminal_output +1319,1385955,"TERMINAL",0,0,"2599",,terminal_output +1320,1386067,"TERMINAL",0,0,"watch",,terminal_focus +1321,1386274,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +1322,1387006,"TERMINAL",0,0,"361010",,terminal_output +1323,1388058,"TERMINAL",0,0,"4711",,terminal_output +1324,1389181,"TERMINAL",0,0,"5822",,terminal_output +1325,1390214,"TERMINAL",0,0,"62:0044",,terminal_output +1326,1391229,"TERMINAL",0,0,"8155",,terminal_output +1327,1392356,"TERMINAL",0,0,"9266",,terminal_output +1328,1393318,"TERMINAL",0,0,"30377",,terminal_output +1329,1394404,"TERMINAL",0,0,"1699train_dy R12:04hkn0712772 wrap 0:0141188",,terminal_output +1330,1395405,"TERMINAL",0,0,"25299",,terminal_output +1331,1396455,"TERMINAL",0,0,"3632020",,terminal_output +1332,1397582,"TERMINAL",0,0,"47411",,terminal_output +1333,1398606,"TERMINAL",0,0,"58522",,terminal_output +1334,1399632,"TERMINAL",0,0,"69633",,terminal_output +1335,1400662,"TERMINAL",0,0,"710744",,terminal_output +1336,1401722,"TERMINAL",0,0,"81855",,terminal_output +1337,1402807,"TERMINAL",0,0,"92966",,terminal_output +1338,1403831,"TERMINAL",0,0,"4031077",,terminal_output +1339,1404881,"TERMINAL",0,0,"14188",,terminal_output +1340,1405920,"TERMINAL",0,0,"25299",,terminal_output +1341,1406992,"TERMINAL",0,0,"3633030",,terminal_output +1342,1408128,"TERMINAL",0,0,"47411",,terminal_output +1343,1409054,"TERMINAL",0,0,"58522",,terminal_output +1344,1410175,"TERMINAL",0,0,"69633",,terminal_output +1345,1411198,"TERMINAL",0,0,"721855",,terminal_output +1346,1412324,"TERMINAL",0,0,"92966",,terminal_output +1347,1413349,"TERMINAL",0,0,"5032077",,terminal_output +1348,1414374,"TERMINAL",0,0,"14188",,terminal_output +1349,1415396,"TERMINAL",0,0,"25299",,terminal_output +1350,1416442,"TERMINAL",0,0,"3634040",,terminal_output +1351,1417549,"TERMINAL",0,0,"47411",,terminal_output +1352,1418571,"TERMINAL",0,0,"58522",,terminal_output +1353,1419546,"TERMINAL",0,0,"69633",,terminal_output +1354,1420597,"TERMINAL",0,0,"730744",,terminal_output +1355,1421650,"TERMINAL",0,0,"81855",,terminal_output +1356,1422760,"TERMINAL",0,0,"92966",,terminal_output +1357,1423731,"TERMINAL",0,0,"3:0033077",,terminal_output +1358,1424820,"TERMINAL",0,0,"14188",,terminal_output +1359,1425823,"TERMINAL",0,0,"25299",,terminal_output +1360,1426881,"TERMINAL",0,0,"3635050",,terminal_output +1361,1427940,"TERMINAL",0,0,"47411",,terminal_output +1362,1428994,"TERMINAL",0,0,"58522",,terminal_output +1363,1430147,"TERMINAL",0,0,"69633",,terminal_output +1364,1431186,"TERMINAL",0,0,"740744",,terminal_output +1365,1432191,"TERMINAL",0,0,"82966",,terminal_output +1366,1433243,"TERMINAL",0,0,"1034077",,terminal_output +1367,1434255,"TERMINAL",0,0,"14188",,terminal_output +1368,1435363,"TERMINAL",0,0,"25299",,terminal_output +1369,1436491,"TERMINAL",0,0,"3633:003:00",,terminal_output +1370,1437416,"TERMINAL",0,0,"47411",,terminal_output +1371,1438643,"TERMINAL",0,0,"58522",,terminal_output +1372,1439532,"TERMINAL",0,0,"69633",,terminal_output +1373,1440690,"TERMINAL",0,0,"750744",,terminal_output +1374,1441637,"TERMINAL",0,0,"81855",,terminal_output +1375,1442681,"TERMINAL",0,0,"92966",,terminal_output +1376,1443782,"TERMINAL",0,0,"2035077",,terminal_output +1377,1444785,"TERMINAL",0,0,"14188",,terminal_output +1378,1445831,"TERMINAL",0,0,"25299",,terminal_output +1379,1446879,"TERMINAL",0,0,"3631010",,terminal_output +1380,1447927,"TERMINAL",0,0,"47411",,terminal_output +1381,1448979,"TERMINAL",0,0,"58522",,terminal_output +1382,1450110,"TERMINAL",0,0,"69633",,terminal_output +1383,1451135,"TERMINAL",0,0,"73:00744",,terminal_output +1384,1452169,"TERMINAL",0,0,"81855",,terminal_output +1385,1453173,"TERMINAL",0,0,"931:0077",,terminal_output +1386,1454309,"TERMINAL",0,0,"314188",,terminal_output +1387,1455348,"TERMINAL",0,0,"25299",,terminal_output +1388,1456357,"TERMINAL",0,0,"3632020",,terminal_output +1389,1457488,"TERMINAL",0,0,"47411",,terminal_output +1390,1458451,"TERMINAL",0,0,"58522",,terminal_output +1391,1459746,"TERMINAL",0,0,"69633",,terminal_output +1392,1460865,"TERMINAL",0,0,"710744",,terminal_output +1393,1461842,"TERMINAL",0,0,"81855",,terminal_output +1394,1462875,"TERMINAL",0,0,"92966",,terminal_output +1395,1463925,"TERMINAL",0,0,"4031077",,terminal_output +1396,1464977,"TERMINAL",0,0,"14188",,terminal_output +1397,1466028,"TERMINAL",0,0,"25299",,terminal_output +1398,1467073,"TERMINAL",0,0,"3633030",,terminal_output +1399,1468133,"TERMINAL",0,0,"47411",,terminal_output +1400,1469260,"TERMINAL",0,0,"59633",,terminal_output +1401,1470285,"TERMINAL",0,0,"720744",,terminal_output +1402,1471307,"TERMINAL",0,0,"81855",,terminal_output +1403,1472291,"TERMINAL",0,0,"92966",,terminal_output +1404,1473356,"TERMINAL",0,0,"5032077",,terminal_output +1405,1474398,"TERMINAL",0,0,"14188",,terminal_output +1406,1475506,"TERMINAL",0,0,"25299",,terminal_output +1407,1476502,"TERMINAL",0,0,"3634040",,terminal_output +1408,1477564,"TERMINAL",0,0,"47411",,terminal_output +1409,1478681,"TERMINAL",0,0,"58522",,terminal_output +1410,1479705,"TERMINAL",0,0,"69633",,terminal_output +1411,1480731,"TERMINAL",0,0,"730744",,terminal_output +1412,1481758,"TERMINAL",0,0,"81855",,terminal_output +1413,1482879,"TERMINAL",0,0,"92966",,terminal_output +1414,1483904,"TERMINAL",0,0,"4:0033077",,terminal_output +1415,1484878,"TERMINAL",0,0,"14188",,terminal_output +1416,1485931,"TERMINAL",0,0,"25299",,terminal_output +1417,1486962,"TERMINAL",0,0,"3635050",,terminal_output +1418,1488007,"TERMINAL",0,0,"47411",,terminal_output +1419,1489057,"TERMINAL",0,0,"58522",,terminal_output +1420,1490105,"TERMINAL",0,0,"69633",,terminal_output +1421,1491174,"TERMINAL",0,0,"741855",,terminal_output +1422,1492300,"TERMINAL",0,0,"92966",,terminal_output +1423,1493323,"TERMINAL",0,0,"1034077",,terminal_output +1424,1494347,"TERMINAL",0,0,"14188",,terminal_output +1425,1495374,"TERMINAL",0,0,"25299",,terminal_output +1426,1496399,"TERMINAL",0,0,"3634:004:00",,terminal_output +1427,1497441,"TERMINAL",0,0,"47411",,terminal_output +1428,1498548,"TERMINAL",0,0,"58522",,terminal_output +1429,1499538,"TERMINAL",0,0,"69633",,terminal_output +1430,1500582,"TERMINAL",0,0,"750744",,terminal_output +1431,1501628,"TERMINAL",0,0,"81855",,terminal_output +1432,1502744,"TERMINAL",0,0,"92966",,terminal_output +1433,1503770,"TERMINAL",0,0,"2035077",,terminal_output +1434,1504808,"TERMINAL",0,0,"14188",,terminal_output +1435,1505817,"TERMINAL",0,0,"25299",,terminal_output +1436,1506834,"TERMINAL",0,0,"3631010",,terminal_output +1437,1507883,"TERMINAL",0,0,"47411",,terminal_output +1438,1508923,"TERMINAL",0,0,"58522",,terminal_output +1439,1509965,"TERMINAL",0,0,"69633",,terminal_output +1440,1511014,"TERMINAL",0,0,"74:00744",,terminal_output +1441,1512166,"TERMINAL",0,0,"81855",,terminal_output +1442,1513118,"TERMINAL",0,0,"92966",,terminal_output +1443,1514214,"TERMINAL",0,0,"3042:0188",,terminal_output +1444,1515241,"TERMINAL",0,0,"25299",,terminal_output +1445,1516366,"TERMINAL",0,0,"3632020",,terminal_output +1446,1517388,"TERMINAL",0,0,"47411",,terminal_output +1447,1518336,"TERMINAL",0,0,"58522",,terminal_output +1448,1519439,"TERMINAL",0,0,"69633",,terminal_output +1449,1520459,"TERMINAL",0,0,"710744",,terminal_output +1450,1521475,"TERMINAL",0,0,"81855",,terminal_output +1451,1522525,"TERMINAL",0,0,"92966",,terminal_output +1452,1523633,"TERMINAL",0,0,"4031077",,terminal_output +1453,1524659,"TERMINAL",0,0,"14188",,terminal_output +1454,1525655,"TERMINAL",0,0,"25299",,terminal_output +1455,1526723,"TERMINAL",0,0,"3633030",,terminal_output +1456,1527834,"TERMINAL",0,0,"47411",,terminal_output +1457,1528822,"TERMINAL",0,0,"58522",,terminal_output +1458,1529883,"TERMINAL",0,0,"69633",,terminal_output +1459,1530908,"TERMINAL",0,0,"720744",,terminal_output +1460,1531949,"TERMINAL",0,0,"81855",,terminal_output +1461,1532994,"TERMINAL",0,0,"92966",,terminal_output +1462,1534028,"TERMINAL",0,0,"5032077",,terminal_output +1463,1535107,"TERMINAL",0,0,"14188",,terminal_output +1464,1536127,"TERMINAL",0,0,"25299",,terminal_output +1465,1537255,"TERMINAL",0,0,"3744141",,terminal_output +1466,1538279,"TERMINAL",0,0,"58522",,terminal_output +1467,1539301,"TERMINAL",0,0,"69633",,terminal_output +1468,1540276,"TERMINAL",0,0,"730744",,terminal_output +1469,1541371,"TERMINAL",0,0,"81855",,terminal_output +1470,1542374,"TERMINAL",0,0,"92966",,terminal_output +1471,1543510,"TERMINAL",0,0,"5:0033077",,terminal_output +1472,1544475,"TERMINAL",0,0,"14188",,terminal_output +1473,1545552,"TERMINAL",0,0,"25299",,terminal_output +1474,1546538,"TERMINAL",0,0,"3635050",,terminal_output +1475,1547602,"TERMINAL",0,0,"47411",,terminal_output +1476,1548755,"TERMINAL",0,0,"58522",,terminal_output +1477,1549747,"TERMINAL",0,0,"69633",,terminal_output +1478,1550783,"TERMINAL",0,0,"740744",,terminal_output +1479,1551784,"TERMINAL",0,0,"81855",,terminal_output +1480,1552923,"TERMINAL",0,0,"92966",,terminal_output +1481,1553862,"TERMINAL",0,0,"1034077",,terminal_output +1482,1554937,"TERMINAL",0,0,"14188",,terminal_output +1483,1555953,"TERMINAL",0,0,"25299",,terminal_output +1484,1557004,"TERMINAL",0,0,"3635:005:00",,terminal_output +1485,1558144,"TERMINAL",0,0,"47411",,terminal_output +1486,1559167,"TERMINAL",0,0,"58522",,terminal_output +1487,1560192,"TERMINAL",0,0,"69633",,terminal_output +1488,1561256,"TERMINAL",0,0,"751855",,terminal_output +1489,1562240,"TERMINAL",0,0,"92966",,terminal_output +1490,1563265,"TERMINAL",0,0,"2035077",,terminal_output +1491,1564391,"TERMINAL",0,0,"14188",,terminal_output +1492,1565414,"TERMINAL",0,0,"25299",,terminal_output +1493,1566395,"TERMINAL",0,0,"3631010",,terminal_output +1494,1567440,"TERMINAL",0,0,"47411",,terminal_output +1495,1568591,"TERMINAL",0,0,"58522",,terminal_output +1496,1569613,"TERMINAL",0,0,"69633",,terminal_output +1497,1570638,"TERMINAL",0,0,"75:00744",,terminal_output +1498,1571637,"TERMINAL",0,0,"81855",,terminal_output +1499,1572694,"TERMINAL",0,0,"92966",,terminal_output +1500,1573811,"TERMINAL",0,0,"3033:0077",,terminal_output +1501,1574784,"TERMINAL",0,0,"14188",,terminal_output +1502,1575838,"TERMINAL",0,0,"25299",,terminal_output +1503,1576883,"TERMINAL",0,0,"3632020",,terminal_output +1504,1577913,"TERMINAL",0,0,"47411",,terminal_output +1505,1578955,"TERMINAL",0,0,"58522",,terminal_output +1506,1580003,"TERMINAL",0,0,"69633",,terminal_output +1507,1581051,"TERMINAL",0,0,"710744",,terminal_output +1508,1582106,"TERMINAL",0,0,"81855",,terminal_output +1509,1583232,"TERMINAL",0,0,"931077",,terminal_output +1510,1584169,"TERMINAL",0,0,"414188",,terminal_output +1511,1585217,"TERMINAL",0,0,"25299",,terminal_output +1512,1586260,"TERMINAL",0,0,"3633030",,terminal_output +1513,1587328,"TERMINAL",0,0,"47411",,terminal_output +1514,1588455,"TERMINAL",0,0,"58522",,terminal_output +1515,1589478,"TERMINAL",0,0,"69633",,terminal_output +1516,1590510,"TERMINAL",0,0,"720744",,terminal_output +1517,1591529,"TERMINAL",0,0,"81855",,terminal_output +1518,1592552,"TERMINAL",0,0,"92966",,terminal_output +1519,1593575,"TERMINAL",0,0,"5032077",,terminal_output +1520,1594599,"TERMINAL",0,0,"14188",,terminal_output +1521,1595726,"TERMINAL",0,0,"25299",,terminal_output +1522,1596676,"TERMINAL",0,0,"3634040",,terminal_output +1523,1597773,"TERMINAL",0,0,"47411",,terminal_output +1524,1598759,"TERMINAL",0,0,"58522",,terminal_output +1525,1599822,"TERMINAL",0,0,"69633",,terminal_output +1526,1600950,"TERMINAL",0,0,"730744",,terminal_output +1527,1601898,"TERMINAL",0,0,"81855",,terminal_output +1528,1602947,"TERMINAL",0,0,"92966",,terminal_output +1529,1603993,"TERMINAL",0,0,"6:0033077",,terminal_output +1530,1605048,"TERMINAL",0,0,"14188",,terminal_output +1531,1606126,"TERMINAL",0,0,"25299",,terminal_output +1532,1607193,"TERMINAL",0,0,"3745151",,terminal_output +1533,1608218,"TERMINAL",0,0,"58522",,terminal_output +1534,1609345,"TERMINAL",0,0,"69633",,terminal_output +1535,1610287,"TERMINAL",0,0,"740744",,terminal_output +1536,1611392,"TERMINAL",0,0,"81855",,terminal_output +1537,1612417,"TERMINAL",0,0,"92966",,terminal_output +1538,1613440,"TERMINAL",0,0,"1034077",,terminal_output +1539,1614459,"TERMINAL",0,0,"14188",,terminal_output +1540,1615593,"TERMINAL",0,0,"25299",,terminal_output +1541,1616561,"TERMINAL",0,0,"3636:006:00",,terminal_output +1542,1617639,"TERMINAL",0,0,"47411",,terminal_output +1543,1618668,"TERMINAL",0,0,"58522",,terminal_output +1544,1619790,"TERMINAL",0,0,"69633",,terminal_output +1545,1620840,"TERMINAL",0,0,"750744",,terminal_output +1546,1621848,"TERMINAL",0,0,"81855",,terminal_output +1547,1622866,"TERMINAL",0,0,"92966",,terminal_output +1548,1623905,"TERMINAL",0,0,"2035077",,terminal_output +1549,1624946,"TERMINAL",0,0,"14188",,terminal_output +1550,1625984,"TERMINAL",0,0,"25299",,terminal_output +1551,1627029,"TERMINAL",0,0,"3631010",,terminal_output +1552,1628073,"TERMINAL",0,0,"47411",,terminal_output +1553,1629122,"TERMINAL",0,0,"58522",,terminal_output +1554,1630167,"TERMINAL",0,0,"66:00744",,terminal_output +1555,1631257,"TERMINAL",0,0,"81855",,terminal_output +1556,1632283,"TERMINAL",0,0,"92966",,terminal_output +1557,1633298,"TERMINAL",0,0,"3034:0077",,terminal_output +1558,1635242,"TERMINAL",0,0,"15299",,terminal_output +1559,1636276,"TERMINAL",0,0,"3632020",,terminal_output +1560,1637403,"TERMINAL",0,0,"47411",,terminal_output +1561,1638350,"TERMINAL",0,0,"58522",,terminal_output +1562,1639404,"TERMINAL",0,0,"69633",,terminal_output +1563,1640474,"TERMINAL",0,0,"710744",,terminal_output +1564,1641500,"TERMINAL",0,0,"81855",,terminal_output +1565,1642625,"TERMINAL",0,0,"92966",,terminal_output +1566,1643650,"TERMINAL",0,0,"4031077",,terminal_output +1567,1644673,"TERMINAL",0,0,"14188",,terminal_output +1568,1645675,"TERMINAL",0,0,"25299",,terminal_output +1569,1646727,"TERMINAL",0,0,"3633030",,terminal_output +1570,1647862,"TERMINAL",0,0,"47411",,terminal_output +1571,1648871,"TERMINAL",0,0,"58522",,terminal_output +1572,1649974,"TERMINAL",0,0,"69633",,terminal_output +1573,1650917,"TERMINAL",0,0,"720744",,terminal_output +1574,1651954,"TERMINAL",0,0,"81855",,terminal_output +1575,1653000,"TERMINAL",0,0,"92966",,terminal_output +1576,1654053,"TERMINAL",0,0,"5032077",,terminal_output +1577,1655117,"TERMINAL",0,0,"14188",,terminal_output +1578,1656140,"TERMINAL",0,0,"25299",,terminal_output +1579,1657175,"TERMINAL",0,0,"3744141",,terminal_output +1580,1658292,"TERMINAL",0,0,"58522",,terminal_output +1581,1659318,"TERMINAL",0,0,"69633",,terminal_output +1582,1660302,"TERMINAL",0,0,"730744",,terminal_output +1583,1661364,"TERMINAL",0,0,"81855",,terminal_output +1584,1662492,"TERMINAL",0,0,"92966",,terminal_output +1585,1663517,"TERMINAL",0,0,"7:0033077",,terminal_output +1586,1664538,"TERMINAL",0,0,"14188",,terminal_output +1587,1665569,"TERMINAL",0,0,"25299",,terminal_output +1588,1666637,"TERMINAL",0,0,"3635050",,terminal_output +1589,1667627,"TERMINAL",0,0,"47411",,terminal_output +1590,1668737,"TERMINAL",0,0,"58522",,terminal_output +1591,1669760,"TERMINAL",0,0,"69633",,terminal_output +1592,1670791,"TERMINAL",0,0,"740744",,terminal_output +1593,1671913,"TERMINAL",0,0,"81855",,terminal_output +1594,1672917,"TERMINAL",0,0,"92966",,terminal_output +1595,1673969,"TERMINAL",0,0,"1034077",,terminal_output +1596,1675029,"TERMINAL",0,0,"14188",,terminal_output +1597,1676072,"TERMINAL",0,0,"25299",,terminal_output +1598,1677135,"TERMINAL",0,0,"3637:007:00",,terminal_output +1599,1678169,"TERMINAL",0,0,"48522",,terminal_output +1600,1679284,"TERMINAL",0,0,"69633",,terminal_output +1601,1680309,"TERMINAL",0,0,"750744",,terminal_output +1602,1681283,"TERMINAL",0,0,"81855",,terminal_output +1603,1682335,"TERMINAL",0,0,"92966",,terminal_output +1604,1683437,"TERMINAL",0,0,"2035077",,terminal_output +1605,1684426,"TERMINAL",0,0,"14188",,terminal_output +1606,1685532,"TERMINAL",0,0,"25299",,terminal_output +1607,1686508,"TERMINAL",0,0,"3631010",,terminal_output +1608,1687579,"TERMINAL",0,0,"47411",,terminal_output +1609,1688592,"TERMINAL",0,0,"58522",,terminal_output +1610,1689729,"TERMINAL",0,0,"69633",,terminal_output +1611,1690754,"TERMINAL",0,0,"77:00744",,terminal_output +1612,1691708,"TERMINAL",0,0,"81855",,terminal_output +1613,1692758,"TERMINAL",0,0,"92966",,terminal_output +1614,1693827,"TERMINAL",0,0,"3035:0077",,terminal_output +1615,1694859,"TERMINAL",0,0,"14188",,terminal_output +1616,1695909,"TERMINAL",0,0,"25299",,terminal_output +1617,1696962,"TERMINAL",0,0,"3632020",,terminal_output +1618,1698008,"TERMINAL",0,0,"47411",,terminal_output +1619,1699056,"TERMINAL",0,0,"58522",,terminal_output +1620,1700103,"TERMINAL",0,0,"69633",,terminal_output +1621,1701153,"TERMINAL",0,0,"711855",,terminal_output +1622,1702225,"TERMINAL",0,0,"92966",,terminal_output +1623,1703247,"TERMINAL",0,0,"4031077",,terminal_output +1624,1704373,"TERMINAL",0,0,"14188",,terminal_output +1625,1705326,"TERMINAL",0,0,"25299",,terminal_output +1626,1706422,"TERMINAL",0,0,"3633030",,terminal_output +1627,1707446,"TERMINAL",0,0,"47411",,terminal_output +1628,1708531,"TERMINAL",0,0,"58522",,terminal_output +1629,1709587,"TERMINAL",0,0,"69633",,terminal_output +1630,1710623,"TERMINAL",0,0,"720744",,terminal_output +1631,1711673,"TERMINAL",0,0,"81855",,terminal_output +1632,1712770,"TERMINAL",0,0,"92966",,terminal_output +1633,1713782,"TERMINAL",0,0,"5032077",,terminal_output +1634,1714817,"TERMINAL",0,0,"14188",,terminal_output +1635,1715945,"TERMINAL",0,0,"25299",,terminal_output +1636,1716936,"TERMINAL",0,0,"3634040",,terminal_output +1637,1717994,"TERMINAL",0,0,"47411",,terminal_output +1638,1719004,"TERMINAL",0,0,"58522",,terminal_output +1639,1720060,"TERMINAL",0,0,"69633",,terminal_output +1640,1721170,"TERMINAL",0,0,"730744",,terminal_output +1641,1722191,"TERMINAL",0,0,"82966",,terminal_output +1642,1723213,"TERMINAL",0,0,"8:0033077",,terminal_output +1643,1724264,"TERMINAL",0,0,"14188",,terminal_output +1644,1725403,"TERMINAL",0,0,"25299",,terminal_output +1645,1726389,"TERMINAL",0,0,"3635050",,terminal_output +1646,1727515,"TERMINAL",0,0,"47411",,terminal_output +1647,1728480,"TERMINAL",0,0,"58522",,terminal_output +1648,1729562,"TERMINAL",0,0,"69633",,terminal_output +1649,1730587,"TERMINAL",0,0,"740744",,terminal_output +1650,1731621,"TERMINAL",0,0,"81855",,terminal_output +1651,1732683,"TERMINAL",0,0,"92966",,terminal_output +1652,1733762,"TERMINAL",0,0,"1034077",,terminal_output +1653,1734788,"TERMINAL",0,0,"14188",,terminal_output +1654,1735818,"TERMINAL",0,0,"25299",,terminal_output +1655,1736938,"TERMINAL",0,0,"3638:008:00",,terminal_output +1656,1737961,"TERMINAL",0,0,"47411",,terminal_output +1657,1738986,"TERMINAL",0,0,"58522",,terminal_output +1658,1740010,"TERMINAL",0,0,"69633",,terminal_output +1659,1741047,"TERMINAL",0,0,"750744",,terminal_output +1660,1742083,"TERMINAL",0,0,"81855",,terminal_output +1661,1743285,"TERMINAL",0,0,"935077",,terminal_output +1662,1744309,"TERMINAL",0,0,"214188",,terminal_output +1663,1745348,"TERMINAL",0,0,"25299",,terminal_output +1664,1746461,"TERMINAL",0,0,"3631010",,terminal_output +1665,1747484,"TERMINAL",0,0,"47411",,terminal_output +1666,1748507,"TERMINAL",0,0,"58522",,terminal_output +1667,1749634,"TERMINAL",0,0,"69633",,terminal_output +1668,1750658,"TERMINAL",0,0,"78:00744",,terminal_output +1669,1751637,"TERMINAL",0,0,"81855",,terminal_output +1670,1752706,"TERMINAL",0,0,"92966",,terminal_output +1671,1753834,"TERMINAL",0,0,"3036:0077",,terminal_output +1672,1754856,"TERMINAL",0,0,"14188",,terminal_output +1673,1755881,"TERMINAL",0,0,"25299",,terminal_output +1674,1756922,"TERMINAL",0,0,"3632020",,terminal_output +1675,1757928,"TERMINAL",0,0,"47411",,terminal_output +1676,1758975,"TERMINAL",0,0,"58522",,terminal_output +1677,1760017,"TERMINAL",0,0,"69633",,terminal_output +1678,1761062,"TERMINAL",0,0,"710744",,terminal_output +1679,1762105,"TERMINAL",0,0,"81855",,terminal_output +1680,1763253,"TERMINAL",0,0,"931077",,terminal_output +1681,1764206,"TERMINAL",0,0,"414188",,terminal_output +1682,1765253,"TERMINAL",0,0,"25299",,terminal_output +1683,1766326,"TERMINAL",0,0,"3633030",,terminal_output +1684,1767349,"TERMINAL",0,0,"47411",,terminal_output +1685,1768476,"TERMINAL",0,0,"58522",,terminal_output +1686,1769500,"TERMINAL",0,0,"69633",,terminal_output +1687,1770494,"TERMINAL",0,0,"720744",,terminal_output +1688,1771549,"TERMINAL",0,0,"81855",,terminal_output +1689,1772586,"TERMINAL",0,0,"92966",,terminal_output +1690,1773698,"TERMINAL",0,0,"5032077",,terminal_output +1691,1774684,"TERMINAL",0,0,"14188",,terminal_output +1692,1775741,"TERMINAL",0,0,"25299",,terminal_output +1693,1776787,"TERMINAL",0,0,"3634040",,terminal_output +1694,1777897,"TERMINAL",0,0,"47411",,terminal_output +1695,1778920,"TERMINAL",0,0,"58522",,terminal_output +1696,1779946,"TERMINAL",0,0,"69633",,terminal_output +1697,1780975,"TERMINAL",0,0,"730744",,terminal_output +1698,1782022,"TERMINAL",0,0,"81855",,terminal_output +1699,1783073,"TERMINAL",0,0,"92966",,terminal_output +1700,1784127,"TERMINAL",0,0,"9:0033077",,terminal_output +1701,1785271,"TERMINAL",0,0,"15299",,terminal_output +1702,1786295,"TERMINAL",0,0,"3635050",,terminal_output +1703,1787316,"TERMINAL",0,0,"47411",,terminal_output +1704,1788342,"TERMINAL",0,0,"58522",,terminal_output +1705,1789484,"TERMINAL",0,0,"69633",,terminal_output +1706,1790494,"TERMINAL",0,0,"740744",,terminal_output +1707,1791460,"TERMINAL",0,0,"81855",,terminal_output +1708,1792541,"TERMINAL",0,0,"92966",,terminal_output +1709,1793564,"TERMINAL",0,0,"1034077",,terminal_output +1710,1794601,"TERMINAL",0,0,"14188",,terminal_output +1711,1795630,"TERMINAL",0,0,"25299",,terminal_output +1712,1796689,"TERMINAL",0,0,"3639:009:00",,terminal_output +1713,1797740,"TERMINAL",0,0,"47411",,terminal_output +1714,1798888,"TERMINAL",0,0,"58522",,terminal_output +1715,1799914,"TERMINAL",0,0,"69633",,terminal_output +1716,1800913,"TERMINAL",0,0,"750744",,terminal_output +1717,1801966,"TERMINAL",0,0,"81855",,terminal_output +1718,1802989,"TERMINAL",0,0,"92966",,terminal_output +1719,1804038,"TERMINAL",0,0,"2035077",,terminal_output +1720,1805091,"TERMINAL",0,0,"14188",,terminal_output +1721,1806159,"TERMINAL",0,0,"2631010",,terminal_output +1722,1807287,"TERMINAL",0,0,"47411",,terminal_output +1723,1808256,"TERMINAL",0,0,"58522",,terminal_output +1724,1809335,"TERMINAL",0,0,"69633",,terminal_output +1725,1810378,"TERMINAL",0,0,"79:00744",,terminal_output +1726,1811484,"TERMINAL",0,0,"81855",,terminal_output +1727,1812508,"TERMINAL",0,0,"92966",,terminal_output +1728,1813522,"TERMINAL",0,0,"3037:0077",,terminal_output +1729,1814568,"TERMINAL",0,0,"14188",,terminal_output +1730,1815686,"TERMINAL",0,0,"25299",,terminal_output +1731,1816647,"TERMINAL",0,0,"3632020",,terminal_output +1732,1817737,"TERMINAL",0,0,"47411",,terminal_output +1733,1818805,"TERMINAL",0,0,"58522",,terminal_output +1734,1819882,"TERMINAL",0,0,"69633",,terminal_output +1735,1820907,"TERMINAL",0,0,"710744",,terminal_output +1736,1821941,"TERMINAL",0,0,"81855",,terminal_output +1737,1822942,"TERMINAL",0,0,"92966",,terminal_output +1738,1823997,"TERMINAL",0,0,"4031077",,terminal_output +1739,1825031,"TERMINAL",0,0,"14188",,terminal_output +1740,1826089,"TERMINAL",0,0,"25299",,terminal_output +1741,1827145,"TERMINAL",0,0,"3743131",,terminal_output +1742,1828196,"TERMINAL",0,0,"58522",,terminal_output +1743,1829246,"TERMINAL",0,0,"69633",,terminal_output +1744,1830327,"TERMINAL",0,0,"720744",,terminal_output +1745,1831453,"TERMINAL",0,0,"81855",,terminal_output +1746,1832477,"TERMINAL",0,0,"92966",,terminal_output +1747,1833441,"TERMINAL",0,0,"5032077",,terminal_output +1748,1834527,"TERMINAL",0,0,"14188",,terminal_output +1749,1835549,"TERMINAL",0,0,"25299",,terminal_output +1750,1836639,"TERMINAL",0,0,"3634040",,terminal_output +1751,1837700,"TERMINAL",0,0,"47411",,terminal_output +1752,1838679,"TERMINAL",0,0,"58522",,terminal_output +1753,1839749,"TERMINAL",0,0,"69633",,terminal_output +1754,1840874,"TERMINAL",0,0,"730744",,terminal_output +1755,1841832,"TERMINAL",0,0,"81855",,terminal_output +1756,1842945,"TERMINAL",0,0,"92966",,terminal_output +1757,1843955,"TERMINAL",0,0,"8:00:0033077",,terminal_output +1758,1844980,"TERMINAL",0,0,"14188",,terminal_output +1759,1846132,"TERMINAL",0,0,"25299",,terminal_output +1760,1847181,"TERMINAL",0,0,"3745151",,terminal_output +1761,1848247,"TERMINAL",0,0,"58522",,terminal_output +1762,1849271,"TERMINAL",0,0,"69633",,terminal_output +1763,1850398,"TERMINAL",0,0,"740744",,terminal_output +1764,1851421,"TERMINAL",0,0,"81855",,terminal_output +1765,1852445,"TERMINAL",0,0,"92966",,terminal_output +1766,1853452,"TERMINAL",0,0,"1034077",,terminal_output +1767,1854596,"TERMINAL",0,0,"14188",,terminal_output +1768,1855548,"TERMINAL",0,0,"25299",,terminal_output +1769,1856637,"TERMINAL",0,0,"36310:0010:00",,terminal_output +1770,1857667,"TERMINAL",0,0,"47411",,terminal_output +1771,1858700,"TERMINAL",0,0,"58522",,terminal_output +1772,1859821,"TERMINAL",0,0,"69633",,terminal_output +1773,1860843,"TERMINAL",0,0,"750744",,terminal_output +1774,1861970,"TERMINAL",0,0,"81855",,terminal_output +1775,1862911,"TERMINAL",0,0,"92966",,terminal_output +1776,1864018,"TERMINAL",0,0,"2035077",,terminal_output +1777,1865042,"TERMINAL",0,0,"14188",,terminal_output +1778,1866064,"TERMINAL",0,0,"25299",,terminal_output +1779,1867123,"TERMINAL",0,0,"3631010",,terminal_output +1780,1868164,"TERMINAL",0,0,"48522",,terminal_output +1781,1869239,"TERMINAL",0,0,"69633",,terminal_output +1782,1870377,"TERMINAL",0,0,"720:00744",,terminal_output +1783,1871322,"TERMINAL",0,0,"81855",,terminal_output +1784,1872414,"TERMINAL",0,0,"92966",,terminal_output +1785,1873438,"TERMINAL",0,0,"3038:0077",,terminal_output +1786,1874475,"TERMINAL",0,0,"14188",,terminal_output +1787,1876204,"TERMINAL",0,0,"25299",,terminal_output +1788,1877227,"TERMINAL",0,0,"3742121",,terminal_output +1789,1878251,"TERMINAL",0,0,"58522",,terminal_output +1790,1879376,"TERMINAL",0,0,"69633",,terminal_output +1791,1880400,"TERMINAL",0,0,"710744",,terminal_output +1792,1881424,"TERMINAL",0,0,"81855",,terminal_output +1793,1882447,"TERMINAL",0,0,"92966",,terminal_output +1794,1883466,"TERMINAL",0,0,"4031077",,terminal_output +1795,1884522,"TERMINAL",0,0,"14188",,terminal_output +1796,1885564,"TERMINAL",0,0,"25299",,terminal_output +1797,1886638,"TERMINAL",0,0,"3633030",,terminal_output +1798,1887773,"TERMINAL",0,0,"47411",,terminal_output +1799,1888799,"TERMINAL",0,0,"58522",,terminal_output +1800,1889823,"TERMINAL",0,0,"69633",,terminal_output +1801,1890847,"TERMINAL",0,0,"720744",,terminal_output +1802,1891864,"TERMINAL",0,0,"81855",,terminal_output +1803,1892995,"TERMINAL",0,0,"92966",,terminal_output +1804,1894019,"TERMINAL",0,0,"5032077",,terminal_output +1805,1895045,"TERMINAL",0,0,"14188",,terminal_output +1806,1896068,"TERMINAL",0,0,"25299",,terminal_output +1807,1897083,"TERMINAL",0,0,"3634040",,terminal_output +1808,1898128,"TERMINAL",0,0,"47411",,terminal_output +1809,1899161,"TERMINAL",0,0,"59633",,terminal_output +1810,1900211,"TERMINAL",0,0,"730744",,terminal_output +1811,1901290,"TERMINAL",0,0,"81855",,terminal_output +1812,1902315,"TERMINAL",0,0,"92966",,terminal_output +1813,1903441,"TERMINAL",0,0,"1:0033077",,terminal_output +1814,1904397,"TERMINAL",0,0,"14188",,terminal_output +1815,1905489,"TERMINAL",0,0,"25299",,terminal_output +1816,1906514,"TERMINAL",0,0,"3635050",,terminal_output +1817,1907543,"TERMINAL",0,0,"47411",,terminal_output +1818,1908569,"TERMINAL",0,0,"58522",,terminal_output +1819,1909691,"TERMINAL",0,0,"69633",,terminal_output +1820,1910669,"TERMINAL",0,0,"740744",,terminal_output +1821,1911745,"TERMINAL",0,0,"81855",,terminal_output +1822,1912862,"TERMINAL",0,0,"92966",,terminal_output +1823,1913802,"TERMINAL",0,0,"1034077",,terminal_output +1824,1914843,"TERMINAL",0,0,"14188",,terminal_output +1825,1915913,"TERMINAL",0,0,"25299",,terminal_output +1826,1916958,"TERMINAL",0,0,"3631:001:00",,terminal_output +1827,1917999,"TERMINAL",0,0,"47411",,terminal_output +1828,1919014,"TERMINAL",0,0,"58522",,terminal_output +1829,1920066,"TERMINAL",0,0,"69633",,terminal_output +1830,1921111,"TERMINAL",0,0,"750744",,terminal_output +1831,1922160,"TERMINAL",0,0,"82966",,terminal_output +1832,1923308,"TERMINAL",0,0,"2035077",,terminal_output +1833,1924257,"TERMINAL",0,0,"14188",,terminal_output +1834,1925355,"TERMINAL",0,0,"25299",,terminal_output +1835,1926377,"TERMINAL",0,0,"3631010",,terminal_output +1836,1927416,"TERMINAL",0,0,"47411",,terminal_output +1837,1928531,"TERMINAL",0,0,"58522",,terminal_output +1838,1929558,"TERMINAL",0,0,"69633",,terminal_output +1839,1931609,"TERMINAL",0,0,"71:00744",,terminal_output +1840,1931673,"TERMINAL",0,0,"81855",,terminal_output +1841,1932728,"TERMINAL",0,0,"92966",,terminal_output +1842,1933752,"TERMINAL",0,0,"3039:0077",,terminal_output +1843,1934744,"TERMINAL",0,0,"14188",,terminal_output +1844,1935904,"TERMINAL",0,0,"25299",,terminal_output +1845,1936854,"TERMINAL",0,0,"3632020",,terminal_output +1846,1937897,"TERMINAL",0,0,"47411",,terminal_output +1847,1939015,"TERMINAL",0,0,"58522",,terminal_output +1848,1939999,"TERMINAL",0,0,"69633",,terminal_output +1849,1941050,"TERMINAL",0,0,"710744",,terminal_output +1850,1942097,"TERMINAL",0,0,"81855",,terminal_output +1851,1943150,"TERMINAL",0,0,"931077",,terminal_output +1852,1944199,"TERMINAL",0,0,"414188",,terminal_output +1853,1945242,"TERMINAL",0,0,"25299",,terminal_output +1854,1946350,"TERMINAL",0,0,"3633030",,terminal_output +1855,1947364,"TERMINAL",0,0,"47411",,terminal_output +1856,1948412,"TERMINAL",0,0,"58522",,terminal_output +1857,1949521,"TERMINAL",0,0,"69633",,terminal_output +1858,1950544,"TERMINAL",0,0,"720744",,terminal_output +1859,1951569,"TERMINAL",0,0,"81855",,terminal_output +1860,1952625,"TERMINAL",0,0,"92966",,terminal_output +1861,1953649,"TERMINAL",0,0,"5032077",,terminal_output +1862,1954744,"TERMINAL",0,0,"14188",,terminal_output +1863,1955767,"TERMINAL",0,0,"25299",,terminal_output +1864,1956796,"TERMINAL",0,0,"3634040",,terminal_output +1865,1957851,"TERMINAL",0,0,"47411",,terminal_output +1866,1958942,"TERMINAL",0,0,"58522",,terminal_output +1867,1959943,"TERMINAL",0,0,"69633",,terminal_output +1868,1961093,"TERMINAL",0,0,"730744",,terminal_output +1869,1962044,"TERMINAL",0,0,"81855",,terminal_output +1870,1963093,"TERMINAL",0,0,"92966",,terminal_output +1871,1964140,"TERMINAL",0,0,"2:0043188",,terminal_output +1872,1965189,"TERMINAL",0,0,"25299",,terminal_output +1873,1966315,"TERMINAL",0,0,"3635050",,terminal_output +1874,1967340,"TERMINAL",0,0,"47411",,terminal_output +1875,1968364,"TERMINAL",0,0,"58522",,terminal_output +1876,1969386,"TERMINAL",0,0,"69633",,terminal_output +1877,1970513,"TERMINAL",0,0,"740744",,terminal_output +1878,1971460,"TERMINAL",0,0,"81855",,terminal_output +1879,1972505,"TERMINAL",0,0,"92966",,terminal_output +1880,1973543,"TERMINAL",0,0,"1034077",,terminal_output +1881,1974612,"TERMINAL",0,0,"14188",,terminal_output +1882,1975637,"TERMINAL",0,0,"25299",,terminal_output +1883,1976688,"TERMINAL",0,0,"3632:002:00",,terminal_output +1884,1977784,"TERMINAL",0,0,"47411",,terminal_output +1885,1978808,"TERMINAL",0,0,"58522",,terminal_output +1886,1979832,"TERMINAL",0,0,"69633",,terminal_output +1887,1980959,"TERMINAL",0,0,"750744",,terminal_output +1888,1981911,"TERMINAL",0,0,"81855",,terminal_output +1889,1982953,"TERMINAL",0,0,"92966",,terminal_output +1890,1984048,"TERMINAL",0,0,"2035077",,terminal_output +1891,1985104,"TERMINAL",0,0,"14188",,terminal_output +1892,1986134,"TERMINAL",0,0,"25299",,terminal_output +1893,1987149,"TERMINAL",0,0,"3741111",,terminal_output +1894,1988228,"TERMINAL",0,0,"58522",,terminal_output +1895,1989253,"TERMINAL",0,0,"69633",,terminal_output +1896,1990278,"TERMINAL",0,0,"72:00744",,terminal_output +1897,1991403,"TERMINAL",0,0,"81855",,terminal_output +1898,1992426,"TERMINAL",0,0,"92966",,terminal_output +1899,1993430,"TERMINAL",0,0,"30310:0077",,terminal_output +1900,1994474,"TERMINAL",0,0,"14188",,terminal_output +1901,1995513,"TERMINAL",0,0,"25299",,terminal_output +1902,1996558,"TERMINAL",0,0,"3632020",,terminal_output +1903,1997753,"TERMINAL",0,0,"47411",,terminal_output +1904,1998673,"TERMINAL",0,0,"58522",,terminal_output +1905,1999701,"TERMINAL",0,0,"69633",,terminal_output +1906,2000824,"TERMINAL",0,0,"710744",,terminal_output +1907,2001789,"TERMINAL",0,0,"81855",,terminal_output +1908,2002872,"TERMINAL",0,0,"92966",,terminal_output +1909,2004009,"TERMINAL",0,0,"4031077",,terminal_output +1910,2005023,"TERMINAL",0,0,"14188",,terminal_output +1911,2006012,"TERMINAL",0,0,"25299",,terminal_output +1912,2007076,"TERMINAL",0,0,"3633030",,terminal_output +1913,2008104,"TERMINAL",0,0,"47411",,terminal_output +1914,2009148,"TERMINAL",0,0,"59633",,terminal_output +1915,2010244,"TERMINAL",0,0,"720744",,terminal_output +1916,2011250,"TERMINAL",0,0,"81855",,terminal_output +1917,2012396,"TERMINAL",0,0,"92966",,terminal_output +1918,2013419,"TERMINAL",0,0,"5032077",,terminal_output +1919,2014398,"TERMINAL",0,0,"14188",,terminal_output +1920,2015469,"TERMINAL",0,0,"25299",,terminal_output +1921,2016622,"TERMINAL",0,0,"3634040",,terminal_output +1922,2017622,"TERMINAL",0,0,"47411",,terminal_output +1923,2018641,"TERMINAL",0,0,"58522",,terminal_output +1924,2019605,"TERMINAL",0,0,"69633",,terminal_output +1925,2020689,"TERMINAL",0,0,"730744",,terminal_output +1926,2021733,"TERMINAL",0,0,"81855",,terminal_output +1927,2022840,"TERMINAL",0,0,"92966",,terminal_output +1928,2023864,"TERMINAL",0,0,"3:0033077",,terminal_output +1929,2024889,"TERMINAL",0,0,"14188",,terminal_output +1930,2026021,"TERMINAL",0,0,"25299",,terminal_output +1931,2027039,"TERMINAL",0,0,"3635050",,terminal_output +1932,2028062,"TERMINAL",0,0,"47411",,terminal_output +1933,2029046,"TERMINAL",0,0,"58522",,terminal_output +1934,2030112,"TERMINAL",0,0,"69633",,terminal_output +1935,2031158,"TERMINAL",0,0,"741855",,terminal_output +1936,2032205,"TERMINAL",0,0,"92966",,terminal_output +1937,2033294,"TERMINAL",0,0,"1034077",,terminal_output +1938,2034309,"TERMINAL",0,0,"14188",,terminal_output +1939,2035387,"TERMINAL",0,0,"25299",,terminal_output +1940,2036404,"TERMINAL",0,0,"3633:003:00",,terminal_output +1941,2037483,"TERMINAL",0,0,"47411",,terminal_output +1942,2038610,"TERMINAL",0,0,"58522",,terminal_output +1943,2039564,"TERMINAL",0,0,"69633",,terminal_output +1944,2040656,"TERMINAL",0,0,"750744",,terminal_output +1945,2041657,"TERMINAL",0,0,"81855",,terminal_output +1946,2042809,"TERMINAL",0,0,"92966",,terminal_output +1947,2043757,"TERMINAL",0,0,"2035077",,terminal_output +1948,2044864,"TERMINAL",0,0,"14188",,terminal_output +1949,2045882,"TERMINAL",0,0,"25299",,terminal_output +1950,2047006,"TERMINAL",0,0,"3631010",,terminal_output +1951,2048038,"TERMINAL",0,0,"47411",,terminal_output +1952,2049055,"TERMINAL",0,0,"58522",,terminal_output +1953,2050063,"TERMINAL",0,0,"69633",,terminal_output +1954,2051109,"TERMINAL",0,0,"73:00744",,terminal_output +1955,2052150,"TERMINAL",0,0,"82966",,terminal_output +1956,2053253,"TERMINAL",0,0,"3031:0077",,terminal_output +1957,2054383,"TERMINAL",0,0,"14188",,terminal_output +1958,2055332,"TERMINAL",0,0,"25299",,terminal_output +1959,2056427,"TERMINAL",0,0,"3632020",,terminal_output +1960,2057420,"TERMINAL",0,0,"47411",,terminal_output +1961,2058477,"TERMINAL",0,0,"58522",,terminal_output +1962,2059603,"TERMINAL",0,0,"69633",,terminal_output +1963,2060545,"TERMINAL",0,0,"710744",,terminal_output +1964,2061638,"TERMINAL",0,0,"81855",,terminal_output +1965,2062625,"TERMINAL",0,0,"92966",,terminal_output +1966,2063699,"TERMINAL",0,0,"4031077",,terminal_output +1967,2064725,"TERMINAL",0,0,"14188",,terminal_output +1968,2065752,"TERMINAL",0,0,"25299",,terminal_output +1969,2066793,"TERMINAL",0,0,"3633030",,terminal_output +1970,2067896,"TERMINAL",0,0,"47411",,terminal_output +1971,2068883,"TERMINAL",0,0,"58522",,terminal_output +1972,2069926,"TERMINAL",0,0,"69633",,terminal_output +1973,2071071,"TERMINAL",0,0,"720744",,terminal_output +1974,2072013,"TERMINAL",0,0,"81855",,terminal_output +1975,2073122,"TERMINAL",0,0,"92966",,terminal_output +1976,2074076,"TERMINAL",0,0,"5032077",,terminal_output +1977,2075125,"TERMINAL",0,0,"14188",,terminal_output +1978,2076171,"TERMINAL",0,0,"2634040",,terminal_output +1979,2077227,"TERMINAL",0,0,"47411",,terminal_output +1980,2078257,"TERMINAL",0,0,"58522",,terminal_output +1981,2079367,"TERMINAL",0,0,"69633",,terminal_output +1982,2080391,"TERMINAL",0,0,"730744",,terminal_output +1983,2081395,"TERMINAL",0,0,"81855",,terminal_output +1984,2082445,"TERMINAL",0,0,"92966",,terminal_output +1985,2083565,"TERMINAL",0,0,"4:0033077",,terminal_output +1986,2084587,"TERMINAL",0,0,"14188",,terminal_output +1987,2085714,"TERMINAL",0,0,"25299",,terminal_output +1988,2086659,"TERMINAL",0,0,"3635050",,terminal_output +1989,2087763,"TERMINAL",0,0,"47411",,terminal_output +1990,2088792,"TERMINAL",0,0,"58522",,terminal_output +1991,2089914,"TERMINAL",0,0,"69633",,terminal_output +1992,2090936,"TERMINAL",0,0,"740744",,terminal_output +1993,2091963,"TERMINAL",0,0,"81855",,terminal_output +1994,2092972,"TERMINAL",0,0,"92966",,terminal_output +1995,2094054,"TERMINAL",0,0,"1034077",,terminal_output +1996,2095079,"TERMINAL",0,0,"14188",,terminal_output +1997,2096131,"TERMINAL",0,0,"25299",,terminal_output +1998,2097172,"TERMINAL",0,0,"3744:014:01",,terminal_output +1999,2098215,"TERMINAL",0,0,"58522",,terminal_output +2000,2099335,"TERMINAL",0,0,"69633",,terminal_output +2001,2100314,"TERMINAL",0,0,"750744",,terminal_output +2002,2101385,"TERMINAL",0,0,"81855",,terminal_output +2003,2102548,"TERMINAL",0,0,"92966",,terminal_output +2004,2103560,"TERMINAL",0,0,"2035077",,terminal_output +2005,2104567,"TERMINAL",0,0,"14188",,terminal_output +2006,2105691,"TERMINAL",0,0,"25299",,terminal_output +2007,2106624,"TERMINAL",0,0,"3631010",,terminal_output +2008,2107915,"TERMINAL",0,0,"47411",,terminal_output +2009,2108814,"TERMINAL",0,0,"58522",,terminal_output +2010,2109856,"TERMINAL",0,0,"69633",,terminal_output +2011,2111399,"TERMINAL",0,0,"74:00744",,terminal_output +2012,2111836,"TERMINAL",0,0,"81855",,terminal_output +2013,2112885,"TERMINAL",0,0,"92966",,terminal_output +2014,2113985,"TERMINAL",0,0,"3032:0077",,terminal_output +2015,2114985,"TERMINAL",0,0,"14188",,terminal_output +2016,2116047,"TERMINAL",0,0,"25299",,terminal_output +2017,2117083,"TERMINAL",0,0,"3632020",,terminal_output +2018,2118132,"TERMINAL",0,0,"47411",,terminal_output +2019,2119176,"TERMINAL",0,0,"59633",,terminal_output +2020,2120219,"TERMINAL",0,0,"710744",,terminal_output +2021,2121264,"TERMINAL",0,0,"81855",,terminal_output +2022,2122374,"TERMINAL",0,0,"92966",,terminal_output +2023,2123401,"TERMINAL",0,0,"40772 wrapCG12:09411699train_dy24:1371277",,terminal_output +2024,2124422,"TERMINAL",0,0,"1488",,terminal_output +2025,2125551,"TERMINAL",0,0,"2599",,terminal_output +2026,2126572,"TERMINAL",0,0,"363030",,terminal_output +2027,2127597,"TERMINAL",0,0,"4711",,terminal_output +2028,2128620,"TERMINAL",0,0,"5822",,terminal_output +2029,2129748,"TERMINAL",0,0,"6933",,terminal_output +2030,2130785,"TERMINAL",0,0,"72044",,terminal_output +2031,2131759,"TERMINAL",0,0,"8155",,terminal_output +2032,2132819,"TERMINAL",0,0,"9266",,terminal_output +2033,2133946,"TERMINAL",0,0,"\r50699train_dy R24:2371262to1:14:37633la7",,terminal_output +2034,2134978,"TERMINAL",0,0,"1488",,terminal_output +2035,2135993,"TERMINAL",0,0,"2599",,terminal_output +2036,2137121,"TERMINAL",0,0,"364040",,terminal_output +2037,2138101,"TERMINAL",0,0,"4711",,terminal_output +2038,2139114,"TERMINAL",0,0,"5822",,terminal_output +2039,2140178,"TERMINAL",0,0,"63044",,terminal_output +2040,2141217,"TERMINAL",0,0,"8155",,terminal_output +2041,2142263,"TERMINAL",0,0,"9266",,terminal_output +2042,2143302,"TERMINAL",0,0,"5:00377",,terminal_output +2043,2144390,"TERMINAL",0,0,"1488",,terminal_output +2044,2145416,"TERMINAL",0,0,"2599",,terminal_output +2045,2146439,"TERMINAL",0,0,"365050",,terminal_output +2046,2147564,"TERMINAL",0,0,"4711",,terminal_output +2047,2148588,"TERMINAL",0,0,"5822",,terminal_output +2048,2149575,"TERMINAL",0,0,"6933",,terminal_output +2049,2150622,"TERMINAL",0,0,"74044",,terminal_output +2050,2151660,"TERMINAL",0,0,"8155",,terminal_output +2051,2152789,"TERMINAL",0,0,"9266",,terminal_output +2052,2153755,"TERMINAL",0,0,"10377",,terminal_output +2053,2154835,"TERMINAL",0,0,"1488",,terminal_output +2054,2155860,"TERMINAL",0,0,"2599",,terminal_output +2055,2156883,"TERMINAL",0,0,"365:005:00",,terminal_output +2056,2157920,"TERMINAL",0,0,"4711",,terminal_output +2057,2159081,"TERMINAL",0,0,"5822",,terminal_output +2058,2159998,"TERMINAL",0,0,"6933",,terminal_output +2059,2161082,"TERMINAL",0,0,"75044",,terminal_output +2060,2162087,"TERMINAL",0,0,"8155",,terminal_output +2061,2163143,"TERMINAL",0,0,"9377",,terminal_output +2062,2164185,"TERMINAL",0,0,"21488",,terminal_output +2063,2165248,"TERMINAL",0,0,"2599",,terminal_output +2064,2166284,"TERMINAL",0,0,"361010",,terminal_output +2065,2167430,"TERMINAL",0,0,"4711",,terminal_output +2066,2168362,"TERMINAL",0,0,"5822",,terminal_output +2067,2169479,"TERMINAL",0,0,"6933",,terminal_output +2068,2170502,"TERMINAL",0,0,"75:0044",,terminal_output +2069,2171528,"TERMINAL",0,0,"8155",,terminal_output +2070,2172655,"TERMINAL",0,0,"9266",,terminal_output +2071,2173680,"TERMINAL",0,0,"30377",,terminal_output +2072,2174701,"TERMINAL",0,0,"1488",,terminal_output +2073,2175726,"TERMINAL",0,0,"2599",,terminal_output +2074,2176739,"TERMINAL",0,0,"362020",,terminal_output +2075,2177780,"TERMINAL",0,0,"4711",,terminal_output +2076,2178901,"TERMINAL",0,0,"5822",,terminal_output +2077,2179925,"TERMINAL",0,0,"6933",,terminal_output +2078,2180949,"TERMINAL",0,0,"71044",,terminal_output +2079,2182075,"TERMINAL",0,0,"8155",,terminal_output +2080,2183013,"TERMINAL",0,0,"9266",,terminal_output +2081,2184061,"TERMINAL",0,0,"40377",,terminal_output +2082,2185146,"TERMINAL",0,0,"1488",,terminal_output +2083,2186188,"TERMINAL",0,0,"263030",,terminal_output +2084,2187200,"TERMINAL",0,0,"4711",,terminal_output +2085,2188243,"TERMINAL",0,0,"5822",,terminal_output +2086,2189286,"TERMINAL",0,0,"6933",,terminal_output +2087,2190337,"TERMINAL",0,0,"72044",,terminal_output +2088,2191499,"TERMINAL",0,0,"8155",,terminal_output +2089,2192519,"TERMINAL",0,0,"9266",,terminal_output +2090,2193543,"TERMINAL",0,0,"50377",,terminal_output +2091,2194532,"TERMINAL",0,0,"1488",,terminal_output +2092,2195592,"TERMINAL",0,0,"2599",,terminal_output +2093,2196637,"TERMINAL",0,0,"364040",,terminal_output +2094,2197742,"TERMINAL",0,0,"4711",,terminal_output +2095,2198738,"TERMINAL",0,0,"5822",,terminal_output +2096,2199792,"TERMINAL",0,0,"6933",,terminal_output +2097,2200915,"TERMINAL",0,0,"73044",,terminal_output +2098,2201859,"TERMINAL",0,0,"8155",,terminal_output +2099,2202909,"TERMINAL",0,0,"9266",,terminal_output +2100,2203995,"TERMINAL",0,0,"6:00377",,terminal_output +2101,2205121,"TERMINAL",0,0,"1488",,terminal_output +2102,2206065,"TERMINAL",0,0,"2599",,terminal_output +2103,2207165,"TERMINAL",0,0,"365050",,terminal_output +2104,2208160,"TERMINAL",0,0,"4822",,terminal_output +2105,2209203,"TERMINAL",0,0,"6933",,terminal_output +2106,2210251,"TERMINAL",0,0,"74044",,terminal_output +2107,2211296,"TERMINAL",0,0,"8155",,terminal_output +2108,2212347,"TERMINAL",0,0,"9266",,terminal_output +2109,2213512,"TERMINAL",0,0,"10377",,terminal_output +2110,2214536,"TERMINAL",0,0,"1488",,terminal_output +2111,2215491,"TERMINAL",0,0,"2599",,terminal_output +2112,2216625,"TERMINAL",0,0,"366:006:00",,terminal_output +2113,2217601,"TERMINAL",0,0,"4711",,terminal_output +2114,2218734,"TERMINAL",0,0,"5822",,terminal_output +2115,2219757,"TERMINAL",0,0,"6933",,terminal_output +2116,2220781,"TERMINAL",0,0,"75044",,terminal_output +2117,2221790,"TERMINAL",0,0,"8155",,terminal_output +2118,2222838,"TERMINAL",0,0,"9266",,terminal_output +2119,2223958,"TERMINAL",0,0,"20377",,terminal_output +2120,2224982,"TERMINAL",0,0,"1488",,terminal_output +2121,2226004,"TERMINAL",0,0,"2599",,terminal_output +2122,2227028,"TERMINAL",0,0,"361010",,terminal_output +2123,2228238,"TERMINAL",0,0,"4711",,terminal_output +2124,2229112,"TERMINAL",0,0,"5822",,terminal_output +2125,2230205,"TERMINAL",0,0,"66:0044",,terminal_output +2126,2231213,"TERMINAL",0,0,"8155",,terminal_output +2127,2232258,"TERMINAL",0,0,"9266",,terminal_output +2128,2233381,"TERMINAL",0,0,"30377",,terminal_output +2129,2234400,"TERMINAL",0,0,"1488",,terminal_output +2130,2235425,"TERMINAL",0,0,"2599",,terminal_output +2131,2236448,"TERMINAL",0,0,"362020",,terminal_output +2132,2237577,"TERMINAL",0,0,"4711",,terminal_output +2133,2238611,"TERMINAL",0,0,"5822",,terminal_output +2134,2239568,"TERMINAL",0,0,"6933",,terminal_output +2135,2240648,"TERMINAL",0,0,"71044",,terminal_output +2136,2241670,"TERMINAL",0,0,"8155",,terminal_output +2137,2242799,"TERMINAL",0,0,"9266",,terminal_output +2138,2243822,"TERMINAL",0,0,"40377",,terminal_output +2139,2244830,"TERMINAL",0,0,"1488",,terminal_output +2140,2245883,"TERMINAL",0,0,"2599",,terminal_output +2141,2246941,"TERMINAL",0,0,"363030",,terminal_output +2142,2248021,"TERMINAL",0,0,"4711",,terminal_output +2143,2249045,"TERMINAL",0,0,"5822",,terminal_output +2144,2250076,"TERMINAL",0,0,"6933",,terminal_output +2145,2251197,"TERMINAL",0,0,"72044",,terminal_output +2146,2252210,"TERMINAL",0,0,"8266",,terminal_output +2147,2253200,"TERMINAL",0,0,"50377",,terminal_output +2148,2254275,"TERMINAL",0,0,"1488",,terminal_output +2149,2255290,"TERMINAL",0,0,"2599",,terminal_output +2150,2256326,"TERMINAL",0,0,"364040",,terminal_output +2151,2257440,"TERMINAL",0,0,"4711",,terminal_output +2152,2258414,"TERMINAL",0,0,"5822",,terminal_output +2153,2259464,"TERMINAL",0,0,"6933",,terminal_output +2154,2260514,"TERMINAL",0,0,"73044",,terminal_output +2155,2261639,"TERMINAL",0,0,"8155",,terminal_output +2156,2262666,"TERMINAL",0,0,"9266",,terminal_output +2157,2263674,"TERMINAL",0,0,"7:00377",,terminal_output +2158,2264816,"TERMINAL",0,0,"1488",,terminal_output +2159,2265838,"TERMINAL",0,0,"2599",,terminal_output +2160,2266818,"TERMINAL",0,0,"365050",,terminal_output +2161,2267859,"TERMINAL",0,0,"4711",,terminal_output +2162,2268905,"TERMINAL",0,0,"5822",,terminal_output +2163,2270038,"TERMINAL",0,0,"6933",,terminal_output +2164,2271060,"TERMINAL",0,0,"74044",,terminal_output +2165,2272085,"TERMINAL",0,0,"8155",,terminal_output +2166,2273066,"TERMINAL",0,0,"9266",,terminal_output +2167,2274125,"TERMINAL",0,0,"10377",,terminal_output +2168,2275158,"TERMINAL",0,0,"1599",,terminal_output +2169,2276193,"TERMINAL",0,0,"367:007:00",,terminal_output +2170,2277247,"TERMINAL",0,0,"4711",,terminal_output +2171,2278309,"TERMINAL",0,0,"5822",,terminal_output +2172,2279357,"TERMINAL",0,0,"6933",,terminal_output +2173,2280406,"TERMINAL",0,0,"75044",,terminal_output +2174,2281440,"TERMINAL",0,0,"8155",,terminal_output +2175,2282530,"TERMINAL",0,0,"9266",,terminal_output +2176,2283553,"TERMINAL",0,0,"20377",,terminal_output +2177,2284584,"TERMINAL",0,0,"1488",,terminal_output +2178,2285704,"TERMINAL",0,0,"2599",,terminal_output +2179,2286658,"TERMINAL",0,0,"361010",,terminal_output +2180,2287865,"TERMINAL",0,0,"4711",,terminal_output +2181,2288756,"TERMINAL",0,0,"5822",,terminal_output +2182,2289902,"TERMINAL",0,0,"6933",,terminal_output +2183,2290927,"TERMINAL",0,0,"77:0044",,terminal_output +2184,2291952,"TERMINAL",0,0,"8155",,terminal_output +2185,2292976,"TERMINAL",0,0,"9266",,terminal_output +2186,2294000,"TERMINAL",0,0,"30377",,terminal_output +2187,2295131,"TERMINAL",0,0,"1488",,terminal_output +2188,2296149,"TERMINAL",0,0,"2599",,terminal_output +2189,2297172,"TERMINAL",0,0,"362020",,terminal_output +2190,2298199,"TERMINAL",0,0,"4822",,terminal_output +2191,2299203,"TERMINAL",0,0,"6933",,terminal_output +2192,2300244,"TERMINAL",0,0,"71044",,terminal_output +2193,2301280,"TERMINAL",0,0,"8155",,terminal_output +2194,2302319,"TERMINAL",0,0,"9266",,terminal_output +2195,2303373,"TERMINAL",0,0,"40377",,terminal_output +2196,2304443,"TERMINAL",0,0,"1488",,terminal_output +2197,2305471,"TERMINAL",0,0,"2599",,terminal_output +2198,2306492,"TERMINAL",0,0,"363030",,terminal_output +2199,2307618,"TERMINAL",0,0,"4711",,terminal_output +2200,2308643,"TERMINAL",0,0,"5822",,terminal_output +2201,2309657,"TERMINAL",0,0,"6933",,terminal_output +2202,2310797,"TERMINAL",0,0,"72044",,terminal_output +2203,2311746,"TERMINAL",0,0,"8155",,terminal_output +2204,2312842,"TERMINAL",0,0,"9266",,terminal_output +2205,2313865,"TERMINAL",0,0,"50377",,terminal_output +2206,2314890,"TERMINAL",0,0,"1488",,terminal_output +2207,2315925,"TERMINAL",0,0,"2599",,terminal_output +2208,2317038,"TERMINAL",0,0,"364040",,terminal_output +2209,2318015,"TERMINAL",0,0,"4711",,terminal_output +2210,2319086,"TERMINAL",0,0,"5822",,terminal_output +2211,2320110,"TERMINAL",0,0,"6933",,terminal_output +2212,2321239,"TERMINAL",0,0,"73155",,terminal_output +2213,2322197,"TERMINAL",0,0,"9266",,terminal_output +2214,2323239,"TERMINAL",0,0,"8:00377",,terminal_output +2215,2324283,"TERMINAL",0,0,"1488",,terminal_output +2216,2325326,"TERMINAL",0,0,"2599",,terminal_output +2217,2326369,"TERMINAL",0,0,"365050",,terminal_output +2218,2327483,"TERMINAL",0,0,"4711",,terminal_output +2219,2328507,"TERMINAL",0,0,"5822",,terminal_output +2220,2329531,"TERMINAL",0,0,"6933",,terminal_output +2221,2330658,"TERMINAL",0,0,"74044",,terminal_output +2222,2331622,"TERMINAL",0,0,"8155",,terminal_output +2223,2332720,"TERMINAL",0,0,"9266",,terminal_output +2224,2333730,"TERMINAL",0,0,"10377",,terminal_output +2225,2334754,"TERMINAL",0,0,"1488",,terminal_output +2226,2335765,"TERMINAL",0,0,"2599",,terminal_output +2227,2336808,"TERMINAL",0,0,"368:008:00",,terminal_output +2228,2337929,"TERMINAL",0,0,"4711",,terminal_output +2229,2338952,"TERMINAL",0,0,"5822",,terminal_output +2230,2339977,"TERMINAL",0,0,"6933",,terminal_output +2231,2341001,"TERMINAL",0,0,"75044",,terminal_output +2232,2342029,"TERMINAL",0,0,"8155",,terminal_output +2233,2343078,"TERMINAL",0,0,"9266",,terminal_output +2234,2344121,"TERMINAL",0,0,"20377",,terminal_output +2235,2345201,"TERMINAL",0,0,"1599",,terminal_output +2236,2346223,"TERMINAL",0,0,"361010",,terminal_output +2237,2347252,"TERMINAL",0,0,"4711",,terminal_output +2238,2348302,"TERMINAL",0,0,"5822",,terminal_output +2239,2349397,"TERMINAL",0,0,"6933",,terminal_output +2240,2350396,"TERMINAL",0,0,"78:0044",,terminal_output +2241,2351447,"TERMINAL",0,0,"8155",,terminal_output +2242,2352572,"TERMINAL",0,0,"9266",,terminal_output +2243,2353596,"TERMINAL",0,0,"30377",,terminal_output +2244,2354621,"TERMINAL",0,0,"1488",,terminal_output +2245,2355645,"TERMINAL",0,0,"2599",,terminal_output +2246,2356653,"TERMINAL",0,0,"362020",,terminal_output +2247,2357794,"TERMINAL",0,0,"4711",,terminal_output +2248,2358757,"TERMINAL",0,0,"5822",,terminal_output +2249,2359804,"TERMINAL",0,0,"6933",,terminal_output +2250,2360841,"TERMINAL",0,0,"71044",,terminal_output +2251,2361888,"TERMINAL",0,0,"8155",,terminal_output +2252,2363018,"TERMINAL",0,0,"9266",,terminal_output +2253,2364040,"TERMINAL",0,0,"40377",,terminal_output +2254,2365034,"TERMINAL",0,0,"1488",,terminal_output +2255,2366092,"TERMINAL",0,0,"2599",,terminal_output +2256,2367217,"TERMINAL",0,0,"363030",,terminal_output +2257,2368240,"TERMINAL",0,0,"4822",,terminal_output +2258,2369232,"TERMINAL",0,0,"6933",,terminal_output +2259,2370285,"TERMINAL",0,0,"72044",,terminal_output +2260,2371325,"TERMINAL",0,0,"8155",,terminal_output +2261,2372376,"TERMINAL",0,0,"9266",,terminal_output +2262,2373413,"TERMINAL",0,0,"50377",,terminal_output +2263,2374451,"TERMINAL",0,0,"1488",,terminal_output +2264,2375511,"TERMINAL",0,0,"2599",,terminal_output +2265,2376537,"TERMINAL",0,0,"364040",,terminal_output +2266,2377662,"TERMINAL",0,0,"4711",,terminal_output +2267,2378612,"TERMINAL",0,0,"5822",,terminal_output +2268,2379708,"TERMINAL",0,0,"6933",,terminal_output +2269,2380711,"TERMINAL",0,0,"73044",,terminal_output +2270,2381750,"TERMINAL",0,0,"8155",,terminal_output +2271,2382883,"TERMINAL",0,0,"9266",,terminal_output +2272,2383850,"TERMINAL",0,0,"9:00377",,terminal_output +2273,2384931,"TERMINAL",0,0,"1488",,terminal_output +2274,2385931,"TERMINAL",0,0,"2599",,terminal_output +2275,2386981,"TERMINAL",0,0,"365050",,terminal_output +2276,2388105,"TERMINAL",0,0,"4711",,terminal_output +2277,2389076,"TERMINAL",0,0,"5822",,terminal_output +2278,2390129,"TERMINAL",0,0,"6933",,terminal_output +2279,2391182,"TERMINAL",0,0,"74155",,terminal_output +2280,2392227,"TERMINAL",0,0,"9266",,terminal_output +2281,2393281,"TERMINAL",0,0,"10377",,terminal_output +2282,2394324,"TERMINAL",0,0,"1488",,terminal_output +2283,2395375,"TERMINAL",0,0,"2599",,terminal_output +2284,2396457,"TERMINAL",0,0,"369:009:00",,terminal_output +2285,2397454,"TERMINAL",0,0,"4711",,terminal_output +2286,2398495,"TERMINAL",0,0,"5822",,terminal_output +2287,2399574,"TERMINAL",0,0,"6933",,terminal_output +2288,2400701,"TERMINAL",0,0,"75044",,terminal_output +2289,2401634,"TERMINAL",0,0,"8155",,terminal_output +2290,2402748,"TERMINAL",0,0,"9266",,terminal_output +2291,2403773,"TERMINAL",0,0,"20377",,terminal_output +2292,2404764,"TERMINAL",0,0,"1488",,terminal_output +2293,2405924,"TERMINAL",0,0,"2599",,terminal_output +2294,2406947,"TERMINAL",0,0,"361010",,terminal_output +2295,2407971,"TERMINAL",0,0,"4711",,terminal_output +2296,2408996,"TERMINAL",0,0,"5822",,terminal_output +2297,2410121,"TERMINAL",0,0,"6933",,terminal_output +2298,2411146,"TERMINAL",0,0,"79:0044",,terminal_output +2299,2412169,"TERMINAL",0,0,"8155",,terminal_output +2300,2413193,"TERMINAL",0,0,"9377",,terminal_output +2301,2414198,"TERMINAL",0,0,"31488",,terminal_output +2302,2415250,"TERMINAL",0,0,"2599",,terminal_output +2303,2416299,"TERMINAL",0,0,"362020",,terminal_output +2304,2417392,"TERMINAL",0,0,"4711",,terminal_output +2305,2418417,"TERMINAL",0,0,"5822",,terminal_output +2306,2419428,"TERMINAL",0,0,"6933",,terminal_output +2307,2420567,"TERMINAL",0,0,"71044",,terminal_output +2308,2421511,"TERMINAL",0,0,"8155",,terminal_output +2309,2422613,"TERMINAL",0,0,"9266",,terminal_output +2310,2423605,"TERMINAL",0,0,"40377",,terminal_output +2311,2424663,"TERMINAL",0,0,"1488",,terminal_output +2312,2425692,"TERMINAL",0,0,"2599",,terminal_output +2313,2426773,"TERMINAL",0,0,"363030",,terminal_output +2314,2427836,"TERMINAL",0,0,"4711",,terminal_output +2315,2428861,"TERMINAL",0,0,"5822",,terminal_output +2316,2429886,"TERMINAL",0,0,"6933",,terminal_output +2317,2431039,"TERMINAL",0,0,"72044",,terminal_output +2318,2432038,"TERMINAL",0,0,"8155",,terminal_output +2319,2433059,"TERMINAL",0,0,"9266",,terminal_output +2320,2434056,"TERMINAL",0,0,"50377",,terminal_output +2321,2435095,"TERMINAL",0,0,"1488",,terminal_output +2322,2436141,"TERMINAL",0,0,"264040",,terminal_output +2323,2437257,"TERMINAL",0,0,"4711",,terminal_output +2324,2438284,"TERMINAL",0,0,"5822",,terminal_output +2325,2439290,"TERMINAL",0,0,"6933",,terminal_output +2326,2440314,"TERMINAL",0,0,"73044",,terminal_output +2327,2441356,"TERMINAL",0,0,"8155",,terminal_output +2328,2442395,"TERMINAL",0,0,"9266",,terminal_output +2329,2443505,"TERMINAL",0,0,"10:00377",,terminal_output +2330,2444528,"TERMINAL",0,0,"1488",,terminal_output +2331,2445556,"TERMINAL",0,0,"2599",,terminal_output +2332,2446637,"TERMINAL",0,0,"365050",,terminal_output +2333,2447630,"TERMINAL",0,0,"4711",,terminal_output +2334,2448727,"TERMINAL",0,0,"5822",,terminal_output +2335,2449753,"TERMINAL",0,0,"6933",,terminal_output +2336,2450877,"TERMINAL",0,0,"74044",,terminal_output +2337,2451800,"TERMINAL",0,0,"8155",,terminal_output +2338,2452924,"TERMINAL",0,0,"9266",,terminal_output +2339,2453954,"TERMINAL",0,0,"10377",,terminal_output +2340,2454983,"TERMINAL",0,0,"1488",,terminal_output +2341,2456005,"TERMINAL",0,0,"2599",,terminal_output +2342,2457058,"TERMINAL",0,0,"3620:0020:00",,terminal_output +2343,2458147,"TERMINAL",0,0,"4711",,terminal_output +2344,2459160,"TERMINAL",0,0,"5933",,terminal_output +2345,2460223,"TERMINAL",0,0,"75044",,terminal_output +2346,2461262,"TERMINAL",0,0,"8155",,terminal_output +2347,2462307,"TERMINAL",0,0,"9266",,terminal_output +2348,2463344,"TERMINAL",0,0,"20377",,terminal_output +2349,2464396,"TERMINAL",0,0,"1488",,terminal_output +2350,2465520,"TERMINAL",0,0,"2599",,terminal_output +2351,2466545,"TERMINAL",0,0,"361010",,terminal_output +2352,2467624,"TERMINAL",0,0,"4711",,terminal_output +2353,2468559,"TERMINAL",0,0,"5822",,terminal_output +2354,2469617,"TERMINAL",0,0,"6933",,terminal_output +2355,2470632,"TERMINAL",0,0,"730:0044",,terminal_output +2356,2471676,"TERMINAL",0,0,"8155",,terminal_output +2357,2472719,"TERMINAL",0,0,"9266",,terminal_output +2358,2473817,"TERMINAL",0,0,"30377",,terminal_output +2359,2474792,"TERMINAL",0,0,"1488",,terminal_output +2360,2475837,"TERMINAL",0,0,"2599",,terminal_output +2361,2476989,"TERMINAL",0,0,"362020",,terminal_output +2362,2478014,"TERMINAL",0,0,"4711",,terminal_output +2363,2479037,"TERMINAL",0,0,"5822",,terminal_output +2364,2480062,"TERMINAL",0,0,"6933",,terminal_output +2365,2481190,"TERMINAL",0,0,"71044",,terminal_output +2366,2482212,"TERMINAL",0,0,"8155",,terminal_output +2367,2483237,"TERMINAL",0,0,"9377",,terminal_output +2368,3359187,"TERMINAL",0,0,"bash",,terminal_focus diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-640d2ea2-6d4b-4f60-ac22-96274589d9ad1759267592825-2025_09_30-23.27.51.17/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-640d2ea2-6d4b-4f60-ac22-96274589d9ad1759267592825-2025_09_30-23.27.51.17/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..301e4102d95fe64f4ab5449cc8b9f97c065adbfd --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-640d2ea2-6d4b-4f60-ac22-96274589d9ad1759267592825-2025_09_30-23.27.51.17/source.csv @@ -0,0 +1,227 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,8,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +2,5021,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"11:27:50 PM [info] Activating crowd-code\n11:27:51 PM [info] Recording started\n11:27:51 PM [info] Initializing git provider using file system watchers...\n11:27:53 PM [info] Retrying git provider initialization...\n",Log,tab +3,5680,"extension-output-pdoom-org.crowd-code-#1-crowd-code",212,0,"11:27:54 PM [info] Git repository found\n11:27:54 PM [info] Git provider initialized successfully\n11:27:54 PM [info] Initial git state: [object Object]\n11:27:56 PM [info] Git repository found\n11:27:56 PM [info] Git provider initialized successfully\n11:27:56 PM [info] Initial git state: [object Object]\n",Log,content +4,397553,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"",shellscript,tab +5,416841,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +6,418953,"jasmine/train_dynamics.py",1403,0,"",python,selection_mouse +7,418974,"jasmine/train_dynamics.py",1402,0,"",python,selection_command +8,421044,"jasmine/train_dynamics.py",6773,0,"",python,selection_mouse +9,421047,"jasmine/train_dynamics.py",6772,0,"",python,selection_command +10,425324,"jasmine/train_dynamics.py",8477,0,"",python,selection_command +11,426023,"jasmine/train_dynamics.py",8558,0,"",python,selection_command +12,426272,"jasmine/train_dynamics.py",11188,0,"",python,selection_command +13,427474,"jasmine/train_dynamics.py",11345,0,"",python,selection_command +14,427867,"jasmine/train_dynamics.py",11766,0,"",python,selection_command +15,431824,"jasmine/train_dynamics.py",11755,0,"",python,selection_mouse +16,431991,"jasmine/train_dynamics.py",11751,7,"outputs",python,selection_mouse +17,438594,"jasmine/train_dynamics.py",11832,0,"",python,selection_mouse +18,438754,"jasmine/train_dynamics.py",11825,7,"ce_loss",python,selection_mouse +19,512377,"TERMINAL",0,0,"queue",,terminal_command +20,512442,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Sep 30 23:36:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3533426 accelerat train_dy tum_cte0 R 8:32:16\t 1 hkn04233532468 accelerat train_dy tum_cte0 R 11:46:53\t 1 hkn08073532465 accelerat train_dy tum_cte0 R 11:47:36\t 1 hkn05073532466 accelerat train_dy tum_cte0 R 11:47:36\t 1 hkn05073532383 accelerat train_to tum_cte0 R 11:54:36\t 1 hkn08073529698 accelerat train_to tum_cte0 R 1-03:00:30\t 1 hkn04293533705 large generate tum_cte0 R 7:37:34\t 1 hkn1901",,terminal_output +21,513489,"TERMINAL",0,0,"47477715",,terminal_output +22,514552,"TERMINAL",0,0,"58588826",,terminal_output +23,515726,"TERMINAL",0,0,"69699937",,terminal_output +24,517006,"TERMINAL",0,0,"720740404048",,terminal_output +25,517743,"TERMINAL",0,0,"81811159",,terminal_output +26,519188,"TERMINAL",0,0,"929222640",,terminal_output +27,519999,"TERMINAL",0,0,"3037:0033371",,terminal_output +28,520805,"TERMINAL",0,0,"14144482",,terminal_output +29,521872,"TERMINAL",0,0,"25255593",,terminal_output +30,522917,"TERMINAL",0,0,"363666404",,terminal_output +31,523962,"TERMINAL",0,0,"47477715",,terminal_output +32,524982,"TERMINAL",0,0,"58588826",,terminal_output +33,525971,"TERMINAL",0,0,"69699937",,terminal_output +34,526920,"TERMINAL",0,0,"730750505048",,terminal_output +35,527965,"TERMINAL",0,0,"81811159",,terminal_output +36,529012,"TERMINAL",0,0,"929222650",,terminal_output +37,530117,"TERMINAL",0,0,"4031033371",,terminal_output +38,531140,"TERMINAL",0,0,"15255593",,terminal_output +39,532142,"TERMINAL",0,0,"363666504",,terminal_output +40,533190,"TERMINAL",0,0,"47477715",,terminal_output +41,534303,"TERMINAL",0,0,"58588826",,terminal_output +42,535440,"TERMINAL",0,0,"69699937",,terminal_output +43,536464,"TERMINAL",0,0,"74078:008:005:0048",,terminal_output +44,537395,"TERMINAL",0,0,"81811159",,terminal_output +45,538560,"TERMINAL",0,0,"92922268:00",,terminal_output +46,539677,"TERMINAL",0,0,"5032033371",,terminal_output +47,540701,"TERMINAL",0,0,"14144482",,terminal_output +48,541763,"TERMINAL",0,0,"25255593",,terminal_output +49,542806,"TERMINAL",0,0,"3636661:004",,terminal_output +50,543751,"TERMINAL",0,0,"47477715",,terminal_output +51,544086,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +52,550677,"TERMINAL",0,0,"git branch",,terminal_command +53,550747,"TERMINAL",0,0,"]633;C[?1h=\r",,terminal_output +54,550895,"TERMINAL",0,0," ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n* change-default-to-wsd\r\n coinrun-gt-actions\r\n:",,terminal_output +55,554416,"TERMINAL",0,0,"\r\r:",,terminal_output +56,555067,"TERMINAL",0,0,"\r\r:",,terminal_output +57,555161,"TERMINAL",0,0,"\r\r:\r\r:\r\r:\r\r:",,terminal_output +58,557447,"TERMINAL",0,0,"\r/",,terminal_output +59,565140,"TERMINAL",0,0,"cc",,terminal_output +60,565209,"TERMINAL",0,0,"oo",,terminal_output +61,565506,"TERMINAL",0,0,"ii",,terminal_output +62,565613,"TERMINAL",0,0,"nn",,terminal_output +63,566542,"TERMINAL",0,0,"rruu",,terminal_output +64,566607,"TERMINAL",0,0,"nn",,terminal_output +65,567107,"TERMINAL",0,0,"\r ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n* change-default-to-wsd\r\n coinrun-gt-actions\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n* change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n:",,terminal_output +66,568485,"TERMINAL",0,0,"\r/\r...skipping...\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +67,569937,"TERMINAL",0,0,"\r/\r\rPattern not found (press RETURN)",,terminal_output +68,571487,"TERMINAL",0,0,"\r/",,terminal_output +69,573023,"TERMINAL",0,0,"dd",,terminal_output +70,573183,"TERMINAL",0,0,"aa",,terminal_output +71,573366,"TERMINAL",0,0,"tt",,terminal_output +72,573464,"TERMINAL",0,0,"aa",,terminal_output +73,573890,"TERMINAL",0,0,"\r train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +74,575994,"TERMINAL",0,0,"\r...skipping...\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n(END)",,terminal_output +75,582829,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +76,586889,"TERMINAL",0,0,"git checkout seeding-data-generation",,terminal_command +77,586941,"TERMINAL",0,0,"]633;C",,terminal_output +78,587175,"TERMINAL",0,0,"Switched to branch 'seeding-data-generation'\r\nYour branch is up to date with 'origin/seeding-data-generation'.\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +79,587637,"jasmine/train_dynamics.py",895,16196," image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n return ce_loss, (outputs[""recon""], metrics)\n",python,content +80,588234,"jasmine/train_dynamics.py",0,0,"Switched from branch 'change-default-to-wsd' to 'seeding-data-generation'",python,git_branch_checkout +81,589551,"TERMINAL",0,0,"git pull",,terminal_command +82,589600,"TERMINAL",0,0,"]633;C",,terminal_output +83,591670,"TERMINAL",0,0,"remote: Enumerating objects: 11, done.\r\nremote: Counting objects: 9% (1/11)\rremote: Counting objects: 18% (2/11)\rremote: Counting objects: 27% (3/11)\rremote: Counting objects: 36% (4/11)\rremote: Counting objects: 45% (5/11)\rremote: Counting objects: 54% (6/11)\rremote: Counting objects: 63% (7/11)\rremote: Counting objects: 72% (8/11)\rremote: Counting objects: 81% (9/11)\rremote: Counting objects: 90% (10/11)\rremote: Counting objects: 100% (11/11)\rremote: Counting objects: 100% (11/11), done.\r\nremote: Compressing objects: 50% (1/2)\rremote: Compressing objects: 100% (2/2)\rremote: Compressing objects: 100% (2/2), done.\r\nremote: Total 6 (delta 4), reused 5 (delta 4), pack-reused 0 (from 0)\r\nUnpacking objects: 16% (1/6)\rUnpacking objects: 33% (2/6)\rUnpacking objects: 50% (3/6)\rUnpacking objects: 66% (4/6)\rUnpacking objects: 83% (5/6)\rUnpacking objects: 100% (6/6)\rUnpacking objects: 100% (6/6), 1.33 KiB | 52.00 KiB/s, done.\r\n",,terminal_output +84,591826,"TERMINAL",0,0,"From github.com:p-doom/jasmine\r\n 6a0b4aa..b8c92e7 main -> origin/main\r\n",,terminal_output +85,591904,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +86,593016,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +87,599160,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"",shellscript,tab +88,618818,"slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=large\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --job-name=generate_coinrun_chunked_500m\n\ncd data\nsource .venv/bin/activate\n\npython jasmine_data/coinrun/generate_coinrun_dataset.py \\n --min_episode_length 1000 \\n --num_episodes_train 500000 \\n --num_episodes_val 100 \\n --num_episodes_test 100 \\n --chunk_size 100 \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_500m_gt_actions_split",shellscript,tab +89,620149,"slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh",84,0,"",shellscript,selection_mouse +90,626578,"TERMINAL",0,0,"fqueue",,terminal_command +91,626654,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1993.localdomain: Tue Sep 30 23:38:17 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3533426\tacceleratedtrain_dynamics_maskgit tum_cte0 RUNNING 8:34:10 2-00:00:00\t1 hkn04233532468\tacceleratedtrain_dynamics_maskgit tum_cte0 RUNNING 11:48:47 2-00:00:00\t1 hkn08073532465\tacceleratedtrain_dynamics_maskgit tum_cte0 RUNNING 11:49:30 2-00:00:00\t1 hkn05073532466\tacceleratedtrain_dynamics_maskgit tum_cte0 RUNNING 11:49:30 2-00:00:00\t1 hkn05073532383\tacceleratedtrain_tokenizer_default tum_cte0 RUNNING 11:56:30 2-00:00:00\t1 hkn08073529698\tacceleratedtrain_tokenizer_default tum_cte0 RUNNING 1-03:02:24 2-00:00:00\t1 hkn04293533705large generate_coinrun_chunked_500m tum_cte0 RUNNING 7:39:28 2-00:00:00\t1 hkn1901",,terminal_output +92,627711,"TERMINAL",0,0,"81811159",,terminal_output +93,628911,"TERMINAL",0,0,"929222630",,terminal_output +94,630136,"TERMINAL",0,0,"2035033371",,terminal_output +95,630950,"TERMINAL",0,0,"14144482",,terminal_output +96,631765,"TERMINAL",0,0,"25255593",,terminal_output +97,632927,"TERMINAL",0,0,"363666304",,terminal_output +98,633868,"TERMINAL",0,0,"47477715",,terminal_output +99,634861,"TERMINAL",0,0,"58588826",,terminal_output +100,635903,"TERMINAL",0,0,"69699937",,terminal_output +101,636841,"TERMINAL",0,0,"720740404048",,terminal_output +102,638031,"TERMINAL",0,0,"81811159",,terminal_output +103,639070,"TERMINAL",0,0,"929222640",,terminal_output +104,639992,"TERMINAL",0,0,"3039:0033371",,terminal_output +105,640996,"TERMINAL",0,0,"14144482",,terminal_output +106,642067,"TERMINAL",0,0,"25255593",,terminal_output +107,643080,"TERMINAL",0,0,"363666404",,terminal_output +108,644074,"TERMINAL",0,0,"47477715",,terminal_output +109,645185,"TERMINAL",0,0,"58588826",,terminal_output +110,646129,"TERMINAL",0,0,"630750505048",,terminal_output +111,647356,"TERMINAL",0,0,"81811159",,terminal_output +112,648341,"TERMINAL",0,0,"929222650",,terminal_output +113,649414,"TERMINAL",0,0,"4031033371",,terminal_output +114,650256,"TERMINAL",0,0,"14144482",,terminal_output +115,651388,"TERMINAL",0,0,"25255593",,terminal_output +116,652436,"TERMINAL",0,0,"363666504",,terminal_output +117,653356,"TERMINAL",0,0,"47477715",,terminal_output +118,654437,"TERMINAL",0,0,"58588826",,terminal_output +119,655565,"TERMINAL",0,0,"69699937",,terminal_output +120,656887,"TERMINAL",0,0,"740750:0050:007:0048",,terminal_output +121,657814,"TERMINAL",0,0,"81811159",,terminal_output +122,658992,"TERMINAL",0,0,"929222640:00",,terminal_output +123,659647,"TERMINAL",0,0,"5032033371",,terminal_output +124,660677,"TERMINAL",0,0,"14144482",,terminal_output +125,661700,"TERMINAL",0,0,"25255593",,terminal_output +126,662725,"TERMINAL",0,0,"3636663:004",,terminal_output +127,663718,"TERMINAL",0,0,"47477715",,terminal_output +128,664849,"TERMINAL",0,0,"58588826",,terminal_output +129,665850,"TERMINAL",0,0,"69699937",,terminal_output +130,666807,"TERMINAL",0,0,"750710101048",,terminal_output +131,667899,"TERMINAL",0,0,"81811159",,terminal_output +132,669026,"TERMINAL",0,0,"929222610",,terminal_output +133,669900,"TERMINAL",0,0,"9:0033033371",,terminal_output +134,671025,"TERMINAL",0,0,"14144482",,terminal_output +135,672136,"TERMINAL",0,0,"25255593",,terminal_output +136,673075,"TERMINAL",0,0,"363666104",,terminal_output +137,674101,"TERMINAL",0,0,"47477715",,terminal_output +138,675068,"TERMINAL",0,0,"58588826",,terminal_output +139,676258,"TERMINAL",0,0,"65:00720202048",,terminal_output +140,677267,"TERMINAL",0,0,"81811159",,terminal_output +141,678449,"TERMINAL",0,0,"929222620",,terminal_output +142,679415,"TERMINAL",0,0,"1034033371",,terminal_output +143,680344,"TERMINAL",0,0,"14144482",,terminal_output +144,681599,"TERMINAL",0,0,"25255593",,terminal_output +145,682379,"TERMINAL",0,0,"363666204",,terminal_output +146,683425,"TERMINAL",0,0,"47477715",,terminal_output +147,684518,"TERMINAL",0,0,"58588826",,terminal_output +148,685418,"TERMINAL",0,0,"69699937",,terminal_output +149,686483,"TERMINAL",0,0,"710730303048",,terminal_output +150,687774,"TERMINAL",0,0,"81811159",,terminal_output +151,688675,"TERMINAL",0,0,"929222630",,terminal_output +152,689740,"TERMINAL",0,0,"2035033371",,terminal_output +153,690769,"TERMINAL",0,0,"14144482",,terminal_output +154,692025,"TERMINAL",0,0,"25255593",,terminal_output +155,693019,"TERMINAL",0,0,"363666304",,terminal_output +156,694009,"TERMINAL",0,0,"47477715",,terminal_output +157,695301,"TERMINAL",0,0,"58588826",,terminal_output +158,696209,"TERMINAL",0,0,"69699937",,terminal_output +159,697140,"TERMINAL",0,0,"720740404048",,terminal_output +160,698116,"TERMINAL",0,0,"81811159",,terminal_output +161,699175,"TERMINAL",0,0,"929222640",,terminal_output +162,700317,"TERMINAL",0,0,"30350:0033371",,terminal_output +163,701220,"TERMINAL",0,0,"14144482",,terminal_output +164,702245,"TERMINAL",0,0,"25255593",,terminal_output +165,703351,"TERMINAL",0,0,"363666404",,terminal_output +166,704812,"TERMINAL",0,0,"47477715",,terminal_output +167,705412,"TERMINAL",0,0,"58588826",,terminal_output +168,706329,"TERMINAL",0,0,"69699937",,terminal_output +169,707303,"TERMINAL",0,0,"730750505048",,terminal_output +170,708423,"TERMINAL",0,0,"829222650",,terminal_output +171,709366,"TERMINAL",0,0,"4031033371",,terminal_output +172,710421,"TERMINAL",0,0,"14144482",,terminal_output +173,711573,"TERMINAL",0,0,"25255593",,terminal_output +174,712376,"TERMINAL",0,0,"363666504",,terminal_output +175,713357,"TERMINAL",0,0,"47477715",,terminal_output +176,714455,"TERMINAL",0,0,"58588826",,terminal_output +177,715393,"TERMINAL",0,0,"69699937",,terminal_output +178,716488,"TERMINAL",0,0,"74071:001:008:0048",,terminal_output +179,717629,"TERMINAL",0,0,"81811159",,terminal_output +180,718646,"TERMINAL",0,0,"92922261:00",,terminal_output +181,719715,"TERMINAL",0,0,"5032033371",,terminal_output +182,720684,"TERMINAL",0,0,"14144482",,terminal_output +183,721698,"TERMINAL",0,0,"25255593",,terminal_output +184,723099,"TERMINAL",0,0,"3636664:004",,terminal_output +185,723893,"TERMINAL",0,0,"47477715",,terminal_output +186,724967,"TERMINAL",0,0,"58588826",,terminal_output +187,726008,"TERMINAL",0,0,"69699937",,terminal_output +188,727209,"TERMINAL",0,0,"750710101048",,terminal_output +189,728171,"TERMINAL",0,0,"81811159",,terminal_output +190,729299,"TERMINAL",0,0,"929222610",,terminal_output +191,730130,"TERMINAL",0,0,"40:0033033371",,terminal_output +192,731280,"TERMINAL",0,0,"14144482",,terminal_output +193,732432,"TERMINAL",0,0,"25255593",,terminal_output +194,733689,"TERMINAL",0,0,"363666104",,terminal_output +195,734686,"TERMINAL",0,0,"47477715",,terminal_output +196,735860,"TERMINAL",0,0,"58588826",,terminal_output +197,736768,"TERMINAL",0,0,"69699937",,terminal_output +198,737915,"TERMINAL",0,0,"76:00720202048",,terminal_output +199,738779,"TERMINAL",0,0,"81811159",,terminal_output +200,739643,"TERMINAL",0,0,"9340333721",,terminal_output +201,740758,"TERMINAL",0,0,"114144482",,terminal_output +202,741872,"TERMINAL",0,0,"25255593",,terminal_output +203,742820,"TERMINAL",0,0,"363666204",,terminal_output +204,743944,"TERMINAL",0,0,"47477715",,terminal_output +205,744905,"TERMINAL",0,0,"58588826",,terminal_output +206,746140,"TERMINAL",0,0,"69699937",,terminal_output +207,746742,"TERMINAL",0,0,"710730303048",,terminal_output +208,747510,"TERMINAL",0,0,"81811159",,terminal_output +209,748521,"TERMINAL",0,0,"929222630",,terminal_output +210,749496,"TERMINAL",0,0,"2035033371",,terminal_output +211,750793,"TERMINAL",0,0,"14144482",,terminal_output +212,751539,"TERMINAL",0,0,"25255593",,terminal_output +213,752600,"TERMINAL",0,0,"363666304",,terminal_output +214,753732,"TERMINAL",0,0,"47477715",,terminal_output +215,754701,"TERMINAL",0,0,"58588826",,terminal_output +216,755819,"TERMINAL",0,0,"69699937",,terminal_output +217,756746,"TERMINAL",0,0,"720740404048",,terminal_output +218,757855,"TERMINAL",0,0,"81811159",,terminal_output +219,758865,"TERMINAL",0,0,"929222640",,terminal_output +220,759856,"TERMINAL",0,0,"3031:0033371",,terminal_output +221,760853,"TERMINAL",0,0,"14144482",,terminal_output +222,762069,"TERMINAL",0,0,"25255593",,terminal_output +223,763081,"TERMINAL",0,0,"363666404",,terminal_output +224,764063,"TERMINAL",0,0,"47477715",,terminal_output +225,764925,"TERMINAL",0,0,"58588826",,terminal_output +226,766090,"TERMINAL",0,0,"69699937",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6cff88e9-fc80-42df-a4e7-540c108499311759485913059-2025_10_03-12.06.10.09/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6cff88e9-fc80-42df-a4e7-540c108499311759485913059-2025_10_03-12.06.10.09/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..4dd4e7eab53c8c233d655362d76b4d382ca62966 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6cff88e9-fc80-42df-a4e7-540c108499311759485913059-2025_10_03-12.06.10.09/source.csv @@ -0,0 +1,1533 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,6,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_batch_size_36_flash_attn_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""TODO""\ntokenizer_ckpt_dir=""TODO""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --patch_size=16 \\n --batch_size=36 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=100000 \\n --log_image_interval=200000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +2,829,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:06:09 PM [info] Activating crowd-code\n12:06:10 PM [info] Recording started\n12:06:10 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,1018,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"12:06:10 PM [info] Git repository found\n12:06:10 PM [info] Git provider initialized successfully\n12:06:10 PM [info] Initial git state: [object Object]\n",Log,content +4,4974,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_flash_attn_ablation.sh",0,0,"",shellscript,tab +5,61661,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_batch_size_36_dataloader_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""ablation/use-pytorch-dataloader"" ]; then\n echo ""This script must be run from the ablation/use-pytorch-dataloader branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36 ablation dataloader""\n\nnpy_files_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/npy/train""\ntokenizer_ckpt_dir=""TODO""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --batch_size=36 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=100000 \\n --log_image_interval=200000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${npy_files_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +6,63536,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_248/.gitkeep",0,0,"hallo franz",plaintext,tab +7,64595,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_flash_attn_mixed_prec_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_batch_size_36_flash_attn_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""TODO""\ntokenizer_ckpt_dir=""TODO""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""ablation/full-precision-training"" ]; then\n echo ""This script must be run from the ablation/full-precision-training branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --patch_size=16 \\n --batch_size=36 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=100000 \\n --log_image_interval=200000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +8,66486,"jasmine/utils/dataloader.py",0,0,"import jax\nimport numpy as np\nimport grain\nfrom typing import Any\nimport pickle\n\n\nclass EpisodeLengthFilter(grain.transforms.Filter):\n """"""\n A Grain Filter that keeps only episodes with sufficient length.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the filter with sequence length requirements.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def filter(self, element: Any) -> bool:\n """"""\n Filters episodes based on length.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n\n Returns:\n True if the episode has sufficient length, False otherwise.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n current_episode_len = element[""sequence_length""]\n if current_episode_len < self.seq_len:\n print(\n f""Filtering out episode with length {current_episode_len}, which is ""\n f""shorter than the requested sequence length {self.seq_len}.""\n )\n return False\n\n return True\n\n\nclass ProcessEpisodeAndSlice(grain.transforms.RandomMap):\n """"""\n A Grain Transformation that combines parsing, slicing, and normalizing.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the transformation with processing parameters.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def random_map(self, element: dict, rng: np.random.Generator) -> Any:\n """"""\n Processes a single raw episode from the data source.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n rng: A per-record random number generator provided by the Grain sampler.\n\n Returns:\n A processed video sequence as a NumPy array with shape\n (seq_len, height, width, channels) and dtype float32.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n video_shape = (\n element[""sequence_length""],\n self.image_h,\n self.image_w,\n self.image_c,\n )\n episode_tensor = np.frombuffer(element[""raw_video""], dtype=np.uint8)\n episode_tensor = episode_tensor.reshape(video_shape)\n\n current_episode_len = episode_tensor.shape[0]\n if current_episode_len < self.seq_len:\n raise ValueError(\n f""Episode length {current_episode_len} is shorter than ""\n f""requested sequence length {self.seq_len}. This should ""\n f""have been filtered out.""\n )\n\n max_start_idx = current_episode_len - self.seq_len\n\n start_idx = rng.integers(0, max_start_idx + 1)\n\n seq = episode_tensor[start_idx : start_idx + self.seq_len]\n\n data_dict = {""videos"": seq}\n if ""actions"" in element.keys():\n actions_tensor = np.array(element[""actions""])\n actions = actions_tensor[start_idx : start_idx + self.seq_len]\n data_dict[""actions""] = actions\n\n return data_dict\n\n\ndef get_dataloader(\n array_record_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n num_workers: int = 1,\n prefetch_buffer_size: int = 1,\n seed: int = 42,\n):\n """"""\n Creates a data loading pipeline using Grain.\n """"""\n if not array_record_paths:\n raise ValueError(""array_record_paths list cannot be empty."")\n\n num_processes = jax.process_count()\n\n if global_batch_size % num_processes != 0:\n raise ValueError(\n f""Global batch size {global_batch_size} must be divisible by ""\n f""the number of JAX processes {num_processes} for proper sharding.""\n )\n per_process_batch_size = global_batch_size // num_processes\n\n source = grain.sources.ArrayRecordDataSource(array_record_paths)\n\n sampler = grain.samplers.IndexSampler(\n num_records=len(source),\n shard_options=grain.sharding.ShardByJaxProcess(drop_remainder=True),\n shuffle=True,\n num_epochs=None,\n seed=seed,\n )\n\n operations = [\n EpisodeLengthFilter(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n ProcessEpisodeAndSlice(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n grain.transforms.Batch(batch_size=per_process_batch_size, drop_remainder=True),\n ]\n\n read_options = grain.ReadOptions(\n prefetch_buffer_size=prefetch_buffer_size,\n num_threads=1,\n )\n dataloader = grain.DataLoader(\n data_source=source,\n sampler=sampler,\n operations=operations,\n worker_count=num_workers,\n worker_buffer_size=1,\n read_options=read_options,\n )\n\n return dataloader\n",python,tab +9,66817,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +10,70974,"jasmine/train_dynamics.py",811,0,"",python,selection_mouse +11,73694,"TERMINAL",0,0,"undefined[tum_cte0515@hkn1990 jasmine]$ branch",,terminal_command +12,73716,"TERMINAL",0,0,"]633;Cprepend-action-maskgit\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +13,232200,"jasmine/train_dynamics.py",970,0,"",python,selection_mouse +14,232209,"jasmine/train_dynamics.py",969,0,"",python,selection_command +15,236056,"jasmine/train_dynamics.py",943,0,"",python,selection_command +16,236190,"jasmine/train_dynamics.py",916,0,"",python,selection_command +17,236300,"jasmine/train_dynamics.py",888,0,"",python,selection_command +18,236409,"jasmine/train_dynamics.py",865,0,"",python,selection_command +19,236527,"jasmine/train_dynamics.py",843,0,"",python,selection_command +20,236711,"jasmine/train_dynamics.py",819,0,"",python,selection_command +21,237083,"jasmine/train_dynamics.py",818,0,"",python,selection_command +22,237583,"jasmine/train_dynamics.py",817,0,"",python,selection_command +23,237614,"jasmine/train_dynamics.py",816,0,"",python,selection_command +24,237643,"jasmine/train_dynamics.py",815,0,"",python,selection_command +25,237674,"jasmine/train_dynamics.py",814,0,"",python,selection_command +26,237719,"jasmine/train_dynamics.py",813,0,"",python,selection_command +27,237760,"jasmine/train_dynamics.py",812,0,"",python,selection_command +28,237761,"jasmine/train_dynamics.py",811,0,"",python,selection_command +29,237802,"jasmine/train_dynamics.py",810,0,"",python,selection_command +30,237846,"jasmine/train_dynamics.py",809,0,"",python,selection_command +31,237888,"jasmine/train_dynamics.py",808,0,"",python,selection_command +32,237903,"jasmine/train_dynamics.py",807,0,"",python,selection_command +33,237933,"jasmine/train_dynamics.py",806,0,"",python,selection_command +34,237950,"jasmine/train_dynamics.py",805,0,"",python,selection_command +35,237998,"jasmine/train_dynamics.py",804,0,"",python,selection_command +36,238041,"jasmine/train_dynamics.py",803,0,"",python,selection_command +37,238083,"jasmine/train_dynamics.py",802,0,"",python,selection_command +38,238084,"jasmine/train_dynamics.py",801,0,"",python,selection_command +39,238369,"jasmine/train_dynamics.py",800,0,"",python,selection_command +40,239735,"jasmine/train_dynamics.py",829,0,"",python,selection_command +41,240808,"jasmine/train_dynamics.py",847,0,"",python,selection_command +42,241892,"jasmine/train_dynamics.py",869,0,"",python,selection_command +43,242787,"jasmine/train_dynamics.py",897,0,"",python,selection_command +44,243770,"jasmine/train_dynamics.py",924,0,"",python,selection_command +45,244158,"jasmine/train_dynamics.py",950,0,"",python,selection_command +46,244866,"jasmine/train_dynamics.py",973,0,"",python,selection_command +47,249220,"jasmine/train_dynamics.py",970,0,"",python,selection_mouse +48,249221,"jasmine/train_dynamics.py",969,0,"",python,selection_command +49,249760,"jasmine/train_dynamics.py",996,0,"",python,selection_mouse +50,249931,"jasmine/train_dynamics.py",993,5,"False",python,selection_mouse +51,251387,"jasmine/train_dynamics.py",972,0,"",python,selection_mouse +52,254905,"jasmine/train_dynamics.py",971,0,"",python,selection_command +53,263709,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +54,268102,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2009,0,"",shellscript,selection_mouse +55,268206,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2008,2,"--",shellscript,selection_mouse +56,268372,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2008,2,"--",shellscript,selection_mouse +57,268417,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2008,11,"--save_ckpt",shellscript,selection_mouse +58,268620,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2008,12,"--save_ckpt ",shellscript,selection_mouse +59,270030,"jasmine/train_dynamics.py",0,0,"",python,tab +60,271464,"jasmine/train_dynamics.py",999,0,"",python,selection_command +61,272472,"jasmine/train_dynamics.py",1030,0,"",python,selection_command +62,272837,"jasmine/train_dynamics.py",1049,0,"",python,selection_command +63,274898,"jasmine/train_dynamics.py",1074,0,"",python,selection_command +64,276986,"jasmine/train_dynamics.py",1099,0,"",python,selection_command +65,278022,"jasmine/train_dynamics.py",1124,0,"",python,selection_command +66,278768,"jasmine/train_dynamics.py",1151,0,"",python,selection_command +67,282259,"jasmine/train_dynamics.py",1180,0,"",python,selection_command +68,282643,"jasmine/train_dynamics.py",1264,0,"",python,selection_command +69,283040,"jasmine/train_dynamics.py",1270,0,"",python,selection_command +70,284211,"jasmine/train_dynamics.py",1299,0,"",python,selection_command +71,288440,"jasmine/train_dynamics.py",1359,0,"",python,selection_command +72,288887,"jasmine/train_dynamics.py",1375,0,"",python,selection_command +73,293741,"jasmine/train_dynamics.py",1359,0,"",python,selection_command +74,293930,"jasmine/train_dynamics.py",1299,0,"",python,selection_command +75,294093,"jasmine/train_dynamics.py",1270,0,"",python,selection_command +76,306115,"jasmine/train_dynamics.py",1299,0,"",python,selection_command +77,306576,"jasmine/train_dynamics.py",1359,0,"",python,selection_command +78,306872,"jasmine/train_dynamics.py",1375,0,"",python,selection_command +79,311589,"jasmine/train_dynamics.py",1404,0,"",python,selection_command +80,313221,"jasmine/train_dynamics.py",1438,0,"",python,selection_command +81,315584,"jasmine/train_dynamics.py",1469,0,"",python,selection_command +82,317404,"jasmine/train_dynamics.py",1503,0,"",python,selection_command +83,318039,"jasmine/train_dynamics.py",1469,0,"",python,selection_command +84,320343,"jasmine/train_dynamics.py",1503,0,"",python,selection_command +85,321059,"jasmine/train_dynamics.py",1469,0,"",python,selection_command +86,326043,"jasmine/train_dynamics.py",1503,0,"",python,selection_command +87,332053,"jasmine/train_dynamics.py",1528,0,"",python,selection_command +88,358698,"TERMINAL",0,0,"git diff",,terminal_command +89,358762,"TERMINAL",0,0,"]633;C[?1h=\r",,terminal_output +90,358815,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +91,368215,"TERMINAL",0,0,"git rebase --abort",,terminal_command +92,368270,"TERMINAL",0,0,"]633;C",,terminal_output +93,369106,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +94,370523,"jasmine/train_dynamics.py",0,0,"Switched from branch 'prepend-action-maskgit' to 'gt-actions'",python,git_branch_checkout +95,382331,"TERMINAL",0,0,"git diff",,terminal_command +96,382385,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +97,385084,"TERMINAL",0,0,"git status",,terminal_command +98,385132,"TERMINAL",0,0,"]633;COn branch gt-actions\r\nYour branch is behind 'origin/gt-actions' by 21 commits, and can be fast-forwarded.\r\n (use ""git pull"" to update your local branch)\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tali-old-branch.diff\r\n\tdata/\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\tuv.lock\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +99,411188,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_command +100,411229,"TERMINAL",0,0,"]633;C",,terminal_output +101,411365,"TERMINAL",0,0,"Switched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +102,412858,"TERMINAL",0,0,"git diff",,terminal_command +103,412901,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +104,415524,"jasmine/train_dynamics.py",0,0,"Switched from branch 'gt-actions' to 'prepend-action-maskgit'",python,git_branch_checkout +105,422537,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun jasmine/python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --use_gt_actions \",shellscript,tab +106,423795,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"q",shellscript,content +107,423797,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1,0,"",shellscript,selection_keyboard +108,425620,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",575,0,"",shellscript,selection_mouse +109,427240,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,1,"",shellscript,content +110,431868,"TERMINAL",0,0,"git checkout gt-actions",,terminal_command +111,431918,"TERMINAL",0,0,"]633;C",,terminal_output +112,431990,"TERMINAL",0,0,"Switched to branch 'gt-actions'\r\nYour branch is behind 'origin/gt-actions' by 21 commits, and can be fast-forwarded.\r\n (use ""git pull"" to update your local branch)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +113,433885,"TERMINAL",0,0,"git pull",,terminal_command +114,433929,"TERMINAL",0,0,"]633;C",,terminal_output +115,435526,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"Switched from branch 'prepend-action-maskgit' to 'gt-actions'",shellscript,git_branch_checkout +116,435736,"TERMINAL",0,0,"Updating 4c74fd8..60693f0\r\n",,terminal_output +117,435850,"TERMINAL",0,0,"Fast-forward\r\n README.md | 2 +-\r\n genie.py | 51 +++++++++++++++++---------\r\n .../{download_array_records.sh => huggingface/download_openai_array_records.sh} | 2 +-\r\n input_pipeline/generate_coinrun_dataset.py | 12 ++++---\r\n input_pipeline/pngs_to_array_records.py | 86 ++++++++++++++++++++++++++++----------------\r\n input_pipeline/utils.py | 4 +--\r\n input_pipeline/video_to_array_records.py | 84 ++++++++++++++++++++++++++-----------------\r\n sample.py | 7 ++--\r\n train_dynamics.py | 53 ++++++++++++++-------------\r\n train_lam.py | 19 ++++------\r\n train_tokenizer.py | 23 +++++-------\r\n utils/nn.py | 2 +-\r\n 12 files changed, 202 insertions(+), 143 deletions(-)\r\n rename input_pipeline/download/{download_array_records.sh => huggingface/download_openai_array_records.sh} (96%)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +118,456309,"TERMINAL",0,0,"clear",,terminal_command +119,456350,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +120,459507,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_command +121,459557,"TERMINAL",0,0,"]633;C",,terminal_output +122,459598,"TERMINAL",0,0,"Switched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +123,460528,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"Switched from branch 'gt-actions' to 'prepend-action-maskgit'",shellscript,git_branch_checkout +124,461308,"TERMINAL",0,0,"git diff",,terminal_command +125,461314,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +126,463292,"TERMINAL",0,0,"clear",,terminal_command +127,466411,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +128,470226,"jasmine/train_dynamics.py",1387,0,"",python,selection_mouse +129,470790,"jasmine/train_dynamics.py",1416,0,"",python,selection_command +130,470935,"jasmine/train_dynamics.py",1450,0,"",python,selection_command +131,471083,"jasmine/train_dynamics.py",1481,0,"",python,selection_command +132,471234,"jasmine/train_dynamics.py",1515,0,"",python,selection_command +133,471553,"jasmine/train_dynamics.py",1514,0,"",python,selection_command +134,471701,"jasmine/train_dynamics.py",1513,0,"",python,selection_command +135,471837,"jasmine/train_dynamics.py",1512,0,"",python,selection_command +136,471986,"jasmine/train_dynamics.py",1511,0,"",python,selection_command +137,472105,"jasmine/train_dynamics.py",1510,0,"",python,selection_command +138,472280,"jasmine/train_dynamics.py",1509,0,"",python,selection_command +139,472380,"jasmine/train_dynamics.py",1508,0,"",python,selection_command +140,472525,"jasmine/train_dynamics.py",1507,0,"",python,selection_command +141,472648,"jasmine/train_dynamics.py",1506,0,"",python,selection_command +142,472789,"jasmine/train_dynamics.py",1505,0,"",python,selection_command +143,473039,"jasmine/train_dynamics.py",1530,0,"",python,selection_command +144,473724,"jasmine/train_dynamics.py",1564,0,"",python,selection_command +145,474163,"jasmine/train_dynamics.py",1597,0,"",python,selection_command +146,485334,"jasmine/train_dynamics.py",1596,0,"",python,selection_command +147,485702,"jasmine/train_dynamics.py",1595,0,"",python,selection_command +148,486218,"jasmine/train_dynamics.py",1630,0,"",python,selection_command +149,486387,"jasmine/train_dynamics.py",1640,0,"",python,selection_command +150,487066,"jasmine/train_dynamics.py",1630,0,"",python,selection_command +151,498398,"jasmine/train_dynamics.py",1641,0,"",python,selection_mouse +152,499125,"jasmine/train_dynamics.py",1664,0,"",python,selection_command +153,500369,"jasmine/train_dynamics.py",1692,0,"",python,selection_command +154,500591,"jasmine/train_dynamics.py",1724,0,"",python,selection_command +155,501093,"jasmine/train_dynamics.py",1749,0,"",python,selection_command +156,502048,"jasmine/train_dynamics.py",1778,0,"",python,selection_command +157,502370,"jasmine/train_dynamics.py",1806,0,"",python,selection_command +158,505882,"jasmine/train_dynamics.py",1833,0,"",python,selection_command +159,506142,"jasmine/train_dynamics.py",1832,0,"",python,selection_command +160,508799,"jasmine/train_dynamics.py",1805,0,"",python,selection_command +161,508960,"jasmine/train_dynamics.py",1777,0,"",python,selection_command +162,509116,"jasmine/train_dynamics.py",1748,0,"",python,selection_command +163,509288,"jasmine/train_dynamics.py",1723,0,"",python,selection_command +164,509619,"jasmine/train_dynamics.py",1691,0,"",python,selection_command +165,509960,"jasmine/train_dynamics.py",1723,0,"",python,selection_command +166,608314,"jasmine/train_dynamics.py",1748,0,"",python,selection_command +167,609998,"jasmine/train_dynamics.py",1723,0,"",python,selection_command +168,658654,"jasmine/train_dynamics.py",1724,0,"",python,selection_mouse +169,658801,"jasmine/train_dynamics.py",1723,4," ",python,selection_mouse +170,659070,"jasmine/train_dynamics.py",1723,15," num_actions",python,selection_mouse +171,659308,"jasmine/train_dynamics.py",1723,16," num_actions:",python,selection_mouse +172,659352,"jasmine/train_dynamics.py",1723,17," num_actions: ",python,selection_mouse +173,659397,"jasmine/train_dynamics.py",1723,20," num_actions: int",python,selection_mouse +174,659464,"jasmine/train_dynamics.py",1723,21," num_actions: int ",python,selection_mouse +175,659509,"jasmine/train_dynamics.py",1723,22," num_actions: int =",python,selection_mouse +176,659510,"jasmine/train_dynamics.py",1723,23," num_actions: int = ",python,selection_mouse +177,659536,"jasmine/train_dynamics.py",1723,24," num_actions: int = 6",python,selection_mouse +178,660596,"jasmine/train_dynamics.py",1747,0,"",python,selection_mouse +179,660638,"jasmine/train_dynamics.py",1746,0,"",python,selection_command +180,662141,"jasmine/train_dynamics.py",1723,0,"",python,selection_mouse +181,668155,"jasmine/train_dynamics.py",1748,0,"",python,selection_command +182,672591,"jasmine/train_dynamics.py",1777,0,"",python,selection_command +183,673686,"jasmine/train_dynamics.py",1805,0,"",python,selection_command +184,673935,"jasmine/train_dynamics.py",1832,0,"",python,selection_command +185,674389,"jasmine/train_dynamics.py",1861,0,"",python,selection_command +186,674922,"jasmine/train_dynamics.py",1876,0,"",python,selection_command +187,675110,"jasmine/train_dynamics.py",1945,0,"",python,selection_command +188,675293,"jasmine/train_dynamics.py",1969,0,"",python,selection_command +189,675477,"jasmine/train_dynamics.py",1998,0,"",python,selection_command +190,675647,"jasmine/train_dynamics.py",2027,0,"",python,selection_command +191,675809,"jasmine/train_dynamics.py",2055,0,"",python,selection_command +192,676310,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +193,676390,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +194,676515,"jasmine/train_dynamics.py",2139,0,"",python,selection_command +195,676722,"jasmine/train_dynamics.py",2169,0,"",python,selection_command +196,676863,"jasmine/train_dynamics.py",2194,0,"",python,selection_command +197,677029,"jasmine/train_dynamics.py",2231,0,"",python,selection_command +198,677073,"jasmine/train_dynamics.py",2194,0,"",python,selection_command +199,677571,"jasmine/train_dynamics.py",2169,0,"",python,selection_command +200,677584,"jasmine/train_dynamics.py",2139,0,"",python,selection_command +201,677623,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +202,677664,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +203,677709,"jasmine/train_dynamics.py",2055,0,"",python,selection_command +204,677714,"jasmine/train_dynamics.py",2027,0,"",python,selection_command +205,677757,"jasmine/train_dynamics.py",1998,0,"",python,selection_command +206,677809,"jasmine/train_dynamics.py",1969,0,"",python,selection_command +207,677848,"jasmine/train_dynamics.py",1945,0,"",python,selection_command +208,677850,"jasmine/train_dynamics.py",1876,0,"",python,selection_command +209,681522,"jasmine/train_dynamics.py",1945,0,"",python,selection_command +210,682221,"jasmine/train_dynamics.py",1969,0,"",python,selection_command +211,689327,"jasmine/train_dynamics.py",1998,0,"",python,selection_command +212,689867,"jasmine/train_dynamics.py",2027,0,"",python,selection_command +213,690658,"jasmine/train_dynamics.py",2055,0,"",python,selection_command +214,694985,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +215,695334,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +216,695582,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +217,695948,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +218,696108,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +219,696336,"jasmine/train_dynamics.py",2055,0,"",python,selection_command +220,696559,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +221,696779,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +222,697008,"jasmine/train_dynamics.py",2080,0,"",python,selection_command +223,723072,"jasmine/train_dynamics.py",2108,0,"",python,selection_command +224,723227,"jasmine/train_dynamics.py",2139,0,"",python,selection_command +225,723426,"jasmine/train_dynamics.py",2169,0,"",python,selection_command +226,724004,"jasmine/train_dynamics.py",2194,0,"",python,selection_command +227,727230,"jasmine/train_dynamics.py",2231,0,"",python,selection_command +228,727614,"jasmine/train_dynamics.py",2264,0,"",python,selection_command +229,727837,"jasmine/train_dynamics.py",2278,0,"",python,selection_command +230,728053,"jasmine/train_dynamics.py",2299,0,"",python,selection_command +231,728259,"jasmine/train_dynamics.py",2320,0,"",python,selection_command +232,728500,"jasmine/train_dynamics.py",2342,0,"",python,selection_command +233,728651,"jasmine/train_dynamics.py",2375,0,"",python,selection_command +234,728812,"jasmine/train_dynamics.py",2441,0,"",python,selection_command +235,728945,"jasmine/train_dynamics.py",2468,0,"",python,selection_command +236,729163,"jasmine/train_dynamics.py",2503,0,"",python,selection_command +237,729344,"jasmine/train_dynamics.py",2526,0,"",python,selection_command +238,729518,"jasmine/train_dynamics.py",2566,0,"",python,selection_command +239,729858,"jasmine/train_dynamics.py",2611,0,"",python,selection_command +240,730195,"jasmine/train_dynamics.py",2643,0,"",python,selection_command +241,730396,"jasmine/train_dynamics.py",2670,0,"",python,selection_command +242,730610,"jasmine/train_dynamics.py",2701,0,"",python,selection_command +243,730985,"jasmine/train_dynamics.py",2725,0,"",python,selection_command +244,732285,"jasmine/train_dynamics.py",2701,0,"",python,selection_command +245,761032,"TERMINAL",0,0,"branch",,terminal_command +246,781186,"TERMINAL",0,0,"git diff ablation/use-pytorch-dataloader",,terminal_command +247,781209,"TERMINAL",0,0,"]633;C",,terminal_output +248,781307,"TERMINAL",0,0,"[?1h=\rdiff --git a/jasmine/train_dynamics.py b/jasmine/train_dynamics.py\r\nindex e8d2209..06cd966 100644\r\n--- a/jasmine/train_dynamics.py\r\n+++ b/jasmine/train_dynamics.py\r\n@@ -18,11 +18,11 @@ import jax\r\n import jax.numpy as jnp\r\n import tyro\r\n import wandb\r\n+import grain\r\n import flax.nnx as nnx\r\n-from torch.utils.data import DataLoader\r\n \r\n from genie import Genie, restore_genie_components\r\n-from utils.dataloader_torch import get_dataloader\r\n+from utils.dataloader import get_dataloader\r\n from utils.train_utils import (\r\n get_lr_schedule,\r\n count_parameters_by_component,\r\n@@ -201,8 +201,27 @@ def shard_optimizer_states(\r\n nnx.update(optimizer, optimizer_sharded_state)\r\n \r\n \r\n-def build_dataloader(args: Args, data_dir: str) -> DataLoader:\r\n- return get_dataloader(data_dir, args.seq_len, args.batch_size)\r\n+def build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\r\n+ image_shape = (args.image_height, args.image_width, args.image_channels)\r\n+ array_record_files = [\r\n+ os.path.join(data_dir, x)\r\n+ for x in os.listdir(data_dir)\r\n+ if x.endswith("".array_record"")\r\n:",,terminal_output +249,799424,"TERMINAL",0,0,"\r+ ]\r\n:",,terminal_output +250,799663,"TERMINAL",0,0,"\r+ grain_dataloader = get_dataloader(\r\n:",,terminal_output +251,799772,"TERMINAL",0,0,"\r+ array_record_files,\r\n:",,terminal_output +252,799894,"TERMINAL",0,0,"\r+ args.seq_len,\r\n:",,terminal_output +253,800056,"TERMINAL",0,0,"\r+ # NOTE: We deliberately pass the global batch size\r\n:",,terminal_output +254,800237,"TERMINAL",0,0,"\r+ # The dataloader shards the dataset across all processes\r\n:",,terminal_output +255,800352,"TERMINAL",0,0,"\r+ args.batch_size,\r\n:",,terminal_output +256,800524,"TERMINAL",0,0,"\r+ *image_shape,\r\n:",,terminal_output +257,800577,"TERMINAL",0,0,"\r+ num_workers=8,\r\n:",,terminal_output +258,800805,"TERMINAL",0,0,"\r+ prefetch_buffer_size=1,\r\n:",,terminal_output +259,800918,"TERMINAL",0,0,"\r+ seed=args.seed,\r\n:",,terminal_output +260,801082,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +261,801182,"TERMINAL",0,0,"\r+ initial_state = grain_dataloader._create_initial_state()\r\n:",,terminal_output +262,801326,"TERMINAL",0,0,"\r+ grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\r\n:",,terminal_output +263,801716,"TERMINAL",0,0,"\r+ return grain_iterator\r\n:",,terminal_output +264,802201,"TERMINAL",0,0,"\r \r\n:",,terminal_output +265,802456,"TERMINAL",0,0,"\r \r\n:",,terminal_output +266,802559,"TERMINAL",0,0,"\r def build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\r\n:",,terminal_output +267,802718,"TERMINAL",0,0,"\r@@ -214,6 +233,31 @@ def build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\r\n:",,terminal_output +268,825141,"TERMINAL",0,0,"\r handler_registry.add(\r\n:",,terminal_output +269,825336,"TERMINAL",0,0,"\r ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\r\n:",,terminal_output +270,825565,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +271,825675,"TERMINAL",0,0,"\r+ handler_registry.add(\r\n:",,terminal_output +272,825954,"TERMINAL",0,0,"\r+ ""train_dataloader_state"",\r\n:",,terminal_output +273,826089,"TERMINAL",0,0,"\r+ grain.checkpoint.CheckpointSave,\r\n:",,terminal_output +274,826142,"TERMINAL",0,0,"\r+ cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\r\n:",,terminal_output +275,826322,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +276,826489,"TERMINAL",0,0,"\r+ handler_registry.add(\r\n:",,terminal_output +277,826632,"TERMINAL",0,0,"\r+ ""train_dataloader_state"",\r\n:",,terminal_output +278,826902,"TERMINAL",0,0,"\r+ grain.checkpoint.CheckpointRestore,\r\n:",,terminal_output +279,827145,"TERMINAL",0,0,"\r+ cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\r\n:",,terminal_output +280,827366,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +281,827598,"TERMINAL",0,0,"\r+ if args.val_data_dir:\r\n:",,terminal_output +282,827704,"TERMINAL",0,0,"\r+ handler_registry.add(\r\n:",,terminal_output +283,827968,"TERMINAL",0,0,"\r+ ""val_dataloader_state"",\r\n:",,terminal_output +284,828297,"TERMINAL",0,0,"\r+ grain.checkpoint.CheckpointSave,\r\n:",,terminal_output +285,828526,"TERMINAL",0,0,"\r+ cast(\r\n:",,terminal_output +286,828728,"TERMINAL",0,0,"\r+ ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\r\n:",,terminal_output +287,828955,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +288,830552,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +289,830759,"TERMINAL",0,0,"\r+ handler_registry.add(\r\n:",,terminal_output +290,830995,"TERMINAL",0,0,"\r+ ""val_dataloader_state"",\r\n:",,terminal_output +291,831179,"TERMINAL",0,0,"\r+ grain.checkpoint.CheckpointRestore,\r\n:",,terminal_output +292,831378,"TERMINAL",0,0,"\r+ cast(\r\n:",,terminal_output +293,831625,"TERMINAL",0,0,"\r+ ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\r\n:",,terminal_output +294,831803,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +295,832386,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +296,832684,"TERMINAL",0,0,"\r checkpoint_options = ocp.CheckpointManagerOptions(\r\n:",,terminal_output +297,833086,"TERMINAL",0,0,"\r save_interval_steps=args.log_checkpoint_interval,\r\n:",,terminal_output +298,856984,"TERMINAL",0,0,"\r max_to_keep=3,\r\n:",,terminal_output +299,857236,"TERMINAL",0,0,"\r@@ -235,12 +279,16 @@ def restore_or_initialize_components(\r\n:",,terminal_output +300,857443,"TERMINAL",0,0,"\r args: Args,\r\n:",,terminal_output +301,857558,"TERMINAL",0,0,"\r checkpoint_manager: Optional[ocp.CheckpointManager],\r\n:",,terminal_output +302,858006,"TERMINAL",0,0,"\r optimizer: nnx.ModelAndOptimizer,\r\n:",,terminal_output +303,858119,"TERMINAL",0,0,"\r+ train_iterator: grain.DataLoaderIterator,\r\n:",,terminal_output +304,858296,"TERMINAL",0,0,"\r rng: jax.Array,\r\n:\r replicated_sharding: NamedSharding,\r\n:\r+ val_iterator: Optional[grain.DataLoaderIterator],\r\n:\r restore_step: Optional[int] = None,\r\n:\r ) -> tuple[\r\n:\r int,\r\n:",,terminal_output +305,858304,"TERMINAL",0,0,"\r nnx.ModelAndOptimizer,\r\n:",,terminal_output +306,858514,"TERMINAL",0,0,"\r+ grain.DataLoaderIterator,\r\n:\r+ grain.DataLoaderIterator,\r\n:\r jax.Array,\r\n:\r ]:\r\n:\r step = 0\r\n:\r@@ -250,21 +298,32 @@ def restore_or_initialize_components(\r\n:\r assert checkpoint_manager is not None\r\n:\r abstract_optimizer = nnx.eval_shape(lambda: optimizer)\r\n:\r abstract_optimizer_state = nnx.state(abstract_optimizer)\r\n:",,terminal_output +307,858624,"TERMINAL",0,0,"\r- restore_args = ocp.args.Composite(\r\n:\r- model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\r\n:",,terminal_output +308,858721,"TERMINAL",0,0,"\r- )\r\n:\r+ if val_iterator:\r\n:\r+ restore_args = ocp.args.Composite(\r\n:",,terminal_output +309,858821,"TERMINAL",0,0,"\r+ model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\r\n:\r+ train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\r\n:\r+ val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\r\n:",,terminal_output +310,862432,"TERMINAL",0,0,"\rM save_interval_steps=args.log_checkpoint_interval,\r\n\r:",,terminal_output +311,862663,"TERMINAL",0,0,"\rM checkpoint_options = ocp.CheckpointManagerOptions(\r\n\r:",,terminal_output +312,862895,"TERMINAL",0,0,"\rM+ )\r\n\r:",,terminal_output +313,863013,"TERMINAL",0,0,"\rM+ ),\r\n\r:",,terminal_output +314,866686,"TERMINAL",0,0,"\r+ restore_args = ocp.args.Composite(\r\n:",,terminal_output +315,866890,"TERMINAL",0,0,"\r+ model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\r\n:",,terminal_output +316,867002,"TERMINAL",0,0,"\r+ train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\r\n:",,terminal_output +317,867192,"TERMINAL",0,0,"\r+ val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\r\n:",,terminal_output +318,867346,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +319,867542,"TERMINAL",0,0,"\r+ else:\r\n:",,terminal_output +320,867692,"TERMINAL",0,0,"\r+ restore_args = ocp.args.Composite(\r\n:",,terminal_output +321,867805,"TERMINAL",0,0,"\r+ model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\r\n:",,terminal_output +322,868007,"TERMINAL",0,0,"\r+ train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\r\n:",,terminal_output +323,868185,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +324,868306,"TERMINAL",0,0,"\r restored = checkpoint_manager.restore(\r\n:",,terminal_output +325,868499,"TERMINAL",0,0,"\r checkpoint_manager.latest_step(), args=restore_args\r\n:",,terminal_output +326,868643,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +327,868814,"TERMINAL",0,0,"\r restored_optimizer_state = restored[""model_state""]\r\n:",,terminal_output +328,868959,"TERMINAL",0,0,"\r nnx.update(optimizer, restored_optimizer_state)\r\n:",,terminal_output +329,869013,"TERMINAL",0,0,"\r+ train_iterator = restored[""train_dataloader_state""]\r\n:",,terminal_output +330,869208,"TERMINAL",0,0,"\r+ if val_iterator:\r\n:",,terminal_output +331,869355,"TERMINAL",0,0,"\r+ val_iterator = restored[""val_dataloader_state""]\r\n:",,terminal_output +332,877603,"TERMINAL",0,0,"\r step = checkpoint_manager.latest_step() or 0\r\n:",,terminal_output +333,877770,"TERMINAL",0,0,"\r- print(f""Restored model state from step {step}"")\r\n:",,terminal_output +334,877916,"TERMINAL",0,0,"\r+ print(f""Restored dataloader and model state from step {step}"")\r\n:",,terminal_output +335,878383,"TERMINAL",0,0,"\r else:\r\n:",,terminal_output +336,878520,"TERMINAL",0,0,"\r # Restore from pre-trained tokenizer (and LAM)\r\n:\r rng, _rng = jax.random.split(rng)\r\n:",,terminal_output +337,878591,"TERMINAL",0,0,"\r optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\r\n:\r- return step, optimizer, rng\r\n:\r+ return step, optimizer, train_iterator, val_iterator, rng\r\n:",,terminal_output +338,878638,"TERMINAL",0,0,"\r \r\n:",,terminal_output +339,878731,"TERMINAL",0,0,"\r \r\n:\r def _calculate_top_k_accuracy(\r\n:",,terminal_output +340,878829,"TERMINAL",0,0,"\r@@ -348,7 +407,6 @@ def _calculate_step_metrics(\r\n:",,terminal_output +341,879022,"TERMINAL",0,0,"\r def main(args: Args) -> None:\r\n:",,terminal_output +342,879132,"TERMINAL",0,0,"\r jax.distributed.initialize()\r\n:",,terminal_output +343,879329,"TERMINAL",0,0,"\r num_devices = jax.device_count()\r\n:",,terminal_output +344,879448,"TERMINAL",0,0,"\r- assert num_devices == 1, ""This script is not meant to be run on multiple devices.""\r\n:",,terminal_output +345,879651,"TERMINAL",0,0,"\r if num_devices == 0:\r\n:",,terminal_output +346,879795,"TERMINAL",0,0,"\r raise ValueError(""No JAX devices found."")\r\n:",,terminal_output +347,879946,"TERMINAL",0,0,"\r print(f""Running on {num_devices} devices."")\r\n:",,terminal_output +348,883762,"TERMINAL",0,0,"\r@@ -411,12 +469,16 @@ def main(args: Args) -> None:\r\n:",,terminal_output +349,883952,"TERMINAL",0,0,"\r val_iterator = build_dataloader(args, args.val_data_dir)\r\n:",,terminal_output +350,884065,"TERMINAL",0,0,"\r \r\n:",,terminal_output +351,884259,"TERMINAL",0,0,"\r # --- Restore checkpoint ---\r\n:",,terminal_output +352,884400,"TERMINAL",0,0,"\r- step, optimizer, rng = restore_or_initialize_components(\r\n:",,terminal_output +353,886323,"TERMINAL",0,0,"\r- args,\r\n:",,terminal_output +354,886575,"TERMINAL",0,0,"\r- checkpoint_manager,\r\n:",,terminal_output +355,886724,"TERMINAL",0,0,"\r- optimizer,\r\n:",,terminal_output +356,886867,"TERMINAL",0,0,"\r- rng,\r\n:",,terminal_output +357,887019,"TERMINAL",0,0,"\r- replicated_sharding,\r\n:",,terminal_output +358,887126,"TERMINAL",0,0,"\r+ step, optimizer, train_iterator, val_iterator, rng = (\r\n:",,terminal_output +359,887313,"TERMINAL",0,0,"\r+ restore_or_initialize_components(\r\n:",,terminal_output +360,887463,"TERMINAL",0,0,"\r+ args,\r\n:",,terminal_output +361,887612,"TERMINAL",0,0,"\r+ checkpoint_manager,\r\n:",,terminal_output +362,887724,"TERMINAL",0,0,"\r+ optimizer,\r\n:",,terminal_output +363,887935,"TERMINAL",0,0,"\r+ train_iterator,\r\n:",,terminal_output +364,888087,"TERMINAL",0,0,"\r+ rng,\r\n:",,terminal_output +365,888244,"TERMINAL",0,0,"\r+ replicated_sharding,\r\n:",,terminal_output +366,888360,"TERMINAL",0,0,"\r+ val_iterator,\r\n:",,terminal_output +367,888503,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +368,888780,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +369,893211,"TERMINAL",0,0,"\r \r\n:",,terminal_output +370,893432,"TERMINAL",0,0,"\r # --- Define loss and train step (close over args) ---\r\n:",,terminal_output +371,893632,"TERMINAL",0,0,"\r@@ -551,10 +613,50 @@ def main(args: Args) -> None:\r\n:",,terminal_output +372,893776,"TERMINAL",0,0,"\r return val_metrics, batch, recon, recon_full_frame\r\n:",,terminal_output +373,893921,"TERMINAL",0,0,"\r \r\n:",,terminal_output +374,896702,"TERMINAL",0,0,"\rM num_devices = jax.device_count()\r\n\r:",,terminal_output +375,896865,"TERMINAL",0,0,"\rM jax.distributed.initialize()\r\n\r:",,terminal_output +376,957946,"TERMINAL",0,0,"\r return val_metrics, batch, recon, recon_full_frame\r\n:",,terminal_output +377,958230,"TERMINAL",0,0,"\r \r\n:",,terminal_output +378,958473,"TERMINAL",0,0,"\r # --- TRAIN LOOP ---\r\n:",,terminal_output +379,958926,"TERMINAL",0,0,"\rM- assert num_devices == 1, ""This script is not meant to be run on multiple devices.""\r\n\r:",,terminal_output +380,959137,"TERMINAL",0,0,"\rM num_devices = jax.device_count()\r\n\r:",,terminal_output +381,959311,"TERMINAL",0,0,"\rM jax.distributed.initialize()\r\n\r:",,terminal_output +382,959493,"TERMINAL",0,0,"\rM def main(args: Args) -> None:\r\n\r:",,terminal_output +383,959944,"TERMINAL",0,0,"\rM@@ -348,7 +407,6 @@ def _calculate_step_metrics(\r\n\r:",,terminal_output +384,959997,"TERMINAL",0,0,"\rM def _calculate_top_k_accuracy(\r\n\r:",,terminal_output +385,960230,"TERMINAL",0,0,"\r \r\n:",,terminal_output +386,960718,"TERMINAL",0,0,"\r # --- Define loss and train step (close over args) ---\r\n:\r@@ -551,10 +613,50 @@ def main(args: Args) -> None:\r\n:",,terminal_output +387,960771,"TERMINAL",0,0,"\r return val_metrics, batch, recon, recon_full_frame\r\n:",,terminal_output +388,1022452,"TERMINAL",0,0,"\rM jax.distributed.initialize()\r\n\r:",,terminal_output +389,1022697,"TERMINAL",0,0,"\rM def main(args: Args) -> None:\r\n\r:",,terminal_output +390,1022878,"TERMINAL",0,0,"\rM@@ -348,7 +407,6 @@ def _calculate_step_metrics(\r\n\r:",,terminal_output +391,1023043,"TERMINAL",0,0,"\rM def _calculate_top_k_accuracy(\r\n\r:",,terminal_output +392,1033167,"TERMINAL",0,0,"\r/\r def _calculate_top_k_accuracy(\r\n@@ -348,7 +407,6 @@ def _calculate_step_metrics(\r\n def main(args: Args) -> None:\r\n jax.distributed.initialize()\r\n num_devices = jax.device_count()\r\n- assert num_devices == 1, ""This script is not meant to be run on multiple devices.""\r\n if num_devices == 0:\r\n raise ValueError(""No JAX devices found."")\r\n print(f""Running on {num_devices} devices."")\r\n@@ -411,12 +469,16 @@ def main(args: Args) -> None:\r\n val_iterator = build_dataloader(args, args.val_data_dir)\r\n \r\n # --- Restore checkpoint ---\r\n- step, optimizer, rng = restore_or_initialize_components(\r\n- args,\r\n- checkpoint_manager,\r\n- optimizer,\r\n- rng,\r\n- replicated_sharding,\r\n+ step, optimizer, train_iterator, val_iterator, rng = (\r\n+ restore_or_initialize_components(\r\n+ args,\r\n+ checkpoint_manager,\r\n+ optimizer,\r\n+ train_iterator,\r\n+ rng,\r\n+ replicated_sharding,\r\n+ val_iterator,\r\n+ )\r\n )\r\n\rPattern not found (press RETURN)",,terminal_output +393,1033907,"TERMINAL",0,0,"\r \r\n:",,terminal_output +394,1034372,"TERMINAL",0,0,"\r # --- Define loss and train step (close over args) ---\r\n:",,terminal_output +395,1034607,"TERMINAL",0,0,"\r@@ -551,10 +613,50 @@ def main(args: Args) -> None:\r\n:",,terminal_output +396,1035117,"TERMINAL",0,0,"\r return val_metrics, batch, recon, recon_full_frame\r\n:\r \r\n:",,terminal_output +397,1035188,"TERMINAL",0,0,"\r # --- TRAIN LOOP ---\r\n:\r+ dataloader_train = (\r\n:",,terminal_output +398,1035242,"TERMINAL",0,0,"\r+ {\r\n:\r+ ""videos"": jax.make_array_from_process_local_data(\r\n:",,terminal_output +399,1035526,"TERMINAL",0,0,"\r+ videos_sharding, local_data=elem[""videos""]\r\n:\r+ ),\r\n:\r+ ""actions"": (\r\n:\r+ jax.make_array_from_process_local_data(\r\n:\r+ actions_sharding, elem[""actions""]\r\n:\r+ )\r\n:\r+ if args.use_gt_actions\r\n:\r+ else None\r\n:",,terminal_output +400,1035570,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +401,1035676,"TERMINAL",0,0,"\r+ }\r\n:",,terminal_output +402,1036102,"TERMINAL",0,0,"\r+ for elem in train_iterator\r\n:",,terminal_output +403,1036284,"TERMINAL",0,0,"\r+ )\r\n:\r+ dataloader_val = None\r\n:\r+ if val_iterator:\r\n:\r+ dataloader_val = (\r\n:\r+ {\r\n:",,terminal_output +404,1036336,"TERMINAL",0,0,"\r+ ""videos"": jax.make_array_from_process_local_data(\r\n:",,terminal_output +405,1036633,"TERMINAL",0,0,"\r+ videos_sharding, elem[""videos""]\r\n:",,terminal_output +406,1036866,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +407,1037026,"TERMINAL",0,0,"\r+ ""actions"": (\r\n:",,terminal_output +408,1037176,"TERMINAL",0,0,"\r+ jax.make_array_from_process_local_data(\r\n:",,terminal_output +409,1037469,"TERMINAL",0,0,"\r+ actions_sharding, elem[""actions""]\r\n:",,terminal_output +410,1037811,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +411,1038338,"TERMINAL",0,0,"\r+ if args.use_gt_actions\r\n:",,terminal_output +412,1039144,"TERMINAL",0,0,"\r+ else None\r\n:",,terminal_output +413,1042143,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +414,1042768,"TERMINAL",0,0,"\r+ }\r\n:\r+ for elem in val_iterator\r\n:\r+ )\r\n:\r+ if jax.process_index() == 0:\r\n:\r+ first_batch = next(dataloader_train)\r\n:",,terminal_output +415,1042868,"TERMINAL",0,0,"\r+ first_batch[""rng""] = rng # type: ignore\r\n:\r+ compiled = train_step.lower(optimizer, first_batch).compile()\r\n:",,terminal_output +416,1043074,"TERMINAL",0,0,"\r+ print_compiled_memory_stats(compiled.memory_analysis())\r\n:",,terminal_output +417,1043345,"TERMINAL",0,0,"\r+ print_compiled_cost_analysis(compiled.cost_analysis())\r\n:",,terminal_output +418,1045391,"TERMINAL",0,0,"\r+ # Do not skip the first batch during training\r\n:",,terminal_output +419,1045538,"TERMINAL",0,0,"\r+ dataloader_train = itertools.chain([first_batch], dataloader_train)\r\n:",,terminal_output +420,1045726,"TERMINAL",0,0,"\r print(f""Starting training from step {step}..."")\r\n:",,terminal_output +421,1045830,"TERMINAL",0,0,"\r first_step = step\r\n:",,terminal_output +422,1046030,"TERMINAL",0,0,"\r while step < args.num_steps:\r\n:",,terminal_output +423,1046170,"TERMINAL",0,0,"\r- for batch in train_iterator:\r\n:",,terminal_output +424,1046305,"TERMINAL",0,0,"\r+ for batch in dataloader_train:\r\n:",,terminal_output +425,1046456,"TERMINAL",0,0,"\r # --- Train step ---\r\n:",,terminal_output +426,1046597,"TERMINAL",0,0,"\r rng, _rng_mask = jax.random.split(rng, 2)\r\n:",,terminal_output +427,1046737,"TERMINAL",0,0,"\r batch[""rng""] = _rng_mask\r\n:",,terminal_output +428,1046857,"TERMINAL",0,0,"\r@@ -565,12 +667,12 @@ def main(args: Args) -> None:\r\n:",,terminal_output +429,1047043,"TERMINAL",0,0,"\r \r\n:",,terminal_output +430,1047179,"TERMINAL",0,0,"\r # --- Validation loss ---\r\n:",,terminal_output +431,1068364,"TERMINAL",0,0,"\r val_results = {}\r\n:",,terminal_output +432,1068553,"TERMINAL",0,0,"\r- if val_iterator and step % args.val_interval == 0:\r\n:",,terminal_output +433,1068727,"TERMINAL",0,0,"\r+ if dataloader_val and step % args.val_interval == 0:\r\n:",,terminal_output +434,1068870,"TERMINAL",0,0,"\r rng, _rng_mask_val = jax.random.split(rng, 2)\r\n:",,terminal_output +435,1069005,"TERMINAL",0,0,"\r print(""Calculating validation metrics..."")\r\n:",,terminal_output +436,1069226,"TERMINAL",0,0,"\r val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\r\n:",,terminal_output +437,1069332,"TERMINAL",0,0,"\r calculate_validation_metrics(\r\n:",,terminal_output +438,1069554,"TERMINAL",0,0,"\r- val_iterator, optimizer.model, _rng_mask_val\r\n:",,terminal_output +439,1069654,"TERMINAL",0,0,"\r+ dataloader_val, optimizer.model, _rng_mask_val\r\n:",,terminal_output +440,1069786,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +441,1070110,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +442,1070248,"TERMINAL",0,0,"\r print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\r\n:",,terminal_output +443,1070431,"TERMINAL",0,0,"\r@@ -688,9 +790,23 @@ def main(args: Args) -> None:\r\n:",,terminal_output +444,1070591,"TERMINAL",0,0,"\r if args.save_ckpt and step % args.log_checkpoint_interval == 0:\r\n:",,terminal_output +445,1070758,"TERMINAL",0,0,"\r assert checkpoint_manager is not None\r\n:",,terminal_output +446,1070948,"TERMINAL",0,0,"\r optimizer_state = nnx.state(optimizer)\r\n:",,terminal_output +447,1071070,"TERMINAL",0,0,"\r- ckpt_manager_args = ocp.args.Composite(\r\n:",,terminal_output +448,1071283,"TERMINAL",0,0,"\r- model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\r\n:",,terminal_output +449,1080147,"TERMINAL",0,0,"\r- )\r\n:",,terminal_output +450,1080426,"TERMINAL",0,0,"\r+ if val_iterator:\r\n:",,terminal_output +451,1080568,"TERMINAL",0,0,"\r+ ckpt_manager_args = ocp.args.Composite(\r\n:",,terminal_output +452,1080618,"TERMINAL",0,0,"\r+ model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\r\n:",,terminal_output +453,1080768,"TERMINAL",0,0,"\r+ train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n:",,terminal_output +454,1080960,"TERMINAL",0,0,"\r+ train_iterator # type: ignore\r\n:",,terminal_output +455,1081120,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +456,1081223,"TERMINAL",0,0,"\r+ val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n:",,terminal_output +457,1081373,"TERMINAL",0,0,"\r+ val_iterator # type: ignore\r\n:",,terminal_output +458,1081558,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +459,1081710,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +460,1081893,"TERMINAL",0,0,"\r+ else:\r\n:",,terminal_output +461,1081995,"TERMINAL",0,0,"\r+ ckpt_manager_args = ocp.args.Composite(\r\n:",,terminal_output +462,1082185,"TERMINAL",0,0,"\r+ model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\r\n:",,terminal_output +463,1082319,"TERMINAL",0,0,"\r+ train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\r\n:",,terminal_output +464,1082461,"TERMINAL",0,0,"\r+ train_iterator # type: ignore\r\n:",,terminal_output +465,1082599,"TERMINAL",0,0,"\r+ ),\r\n:",,terminal_output +466,1082736,"TERMINAL",0,0,"\r+ )\r\n:",,terminal_output +467,1082883,"TERMINAL",0,0,"\r checkpoint_manager.save(step, args=ckpt_manager_args)\r\n:",,terminal_output +468,1083098,"TERMINAL",0,0,"\r print(f""Saved checkpoint at step {step}"")\r\n:",,terminal_output +469,1092610,"TERMINAL",0,0,"\r if step >= args.num_steps:\r\n:",,terminal_output +470,1092823,"TERMINAL",0,0,"\rdiff --git a/jasmine/utils/dataloader_torch.py b/jasmine/utils/dataloader_torch.py\r\n:",,terminal_output +471,1092967,"TERMINAL",0,0,"\rdeleted file mode 100644\r\n:",,terminal_output +472,1093120,"TERMINAL",0,0,"\rindex 7c189d2..0000000\r\n:",,terminal_output +473,1093299,"TERMINAL",0,0,"\r--- a/jasmine/utils/dataloader_torch.py\r\n:",,terminal_output +474,1093571,"TERMINAL",0,0,"\r+++ /dev/null\r\n:",,terminal_output +475,1093812,"TERMINAL",0,0,"\r@@ -1,37 +0,0 @@\r\n:",,terminal_output +476,1094433,"TERMINAL",0,0,"\rM assert checkpoint_manager is not None\r\n\r:",,terminal_output +477,1094937,"TERMINAL",0,0,"\rM if args.save_ckpt and step % args.log_checkpoint_interval == 0:\r\n\r:",,terminal_output +478,1095035,"TERMINAL",0,0,"\rM@@ -688,9 +790,23 @@ def main(args: Args) -> None:\r\n\r:\rM print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\r\n\r:",,terminal_output +479,1095306,"TERMINAL",0,0,"\rM )\r\n\r:",,terminal_output +480,1095511,"TERMINAL",0,0,"\rM )\r\n\r:",,terminal_output +481,1095827,"TERMINAL",0,0,"\rM+ dataloader_val, optimizer.model, _rng_mask_val\r\n\r:",,terminal_output +482,1096293,"TERMINAL",0,0,"\rM- val_iterator, optimizer.model, _rng_mask_val\r\n\r:",,terminal_output +483,1096553,"TERMINAL",0,0,"\r print(f""Saved checkpoint at step {step}"")\r\n:",,terminal_output +484,1097087,"TERMINAL",0,0,"\r if step >= args.num_steps:\r\n:\rdiff --git a/jasmine/utils/dataloader_torch.py b/jasmine/utils/dataloader_torch.py\r\n:",,terminal_output +485,1097191,"TERMINAL",0,0,"\rdeleted file mode 100644\r\n:\rindex 7c189d2..0000000\r\n:\r--- a/jasmine/utils/dataloader_torch.py\r\n:",,terminal_output +486,1097267,"TERMINAL",0,0,"\r+++ /dev/null\r\n:\r@@ -1,37 +0,0 @@\r\n:\r-# file copied from https://raw.githubusercontent.com/FLAIROx/jafar/refs/heads/main/utils/dataloader.py\r\n:",,terminal_output +487,1097343,"TERMINAL",0,0,"\r-from pathlib import Path\r\n:\r-\r\n:\r-import jax.numpy as jnp\r\n:",,terminal_output +488,1097527,"TERMINAL",0,0,"\r-import numpy as np\r\n:",,terminal_output +489,1097771,"TERMINAL",0,0,"\r-from torch.utils.data import Dataset, DataLoader\r\n:",,terminal_output +490,1098032,"TERMINAL",0,0,"\r-\r\n:",,terminal_output +491,1098207,"TERMINAL",0,0,"\r-\r\n:",,terminal_output +492,1108324,"TERMINAL",0,0,"\r-class VideoDataset(Dataset):\r\n:",,terminal_output +493,1108967,"TERMINAL",0,0,"\r- def __init__(self, data_dir, seq_len):\r\n:\r- self.data_dir = Path(data_dir)\r\n:\r- self.seq_len = seq_len\r\n:\r- self.metadata = np.load(self.data_dir / ""metadata.npy"", allow_pickle=True)\r\n:\r-\r\n:\r- def __len__(self):\r\n:",,terminal_output +494,1109400,"TERMINAL",0,0,"\r- return len(self.metadata)\r\n:\r-\r\n:\r- def __getitem__(self, idx):\r\n:\r- episode = np.load(self.metadata[idx][""path""])\r\n:\r- start_idx = np.random.randint(0, len(episode) - self.seq_len + 1)\r\n:\r- seq = episode[start_idx : start_idx + self.seq_len]\r\n:\r- return seq.astype(np.float32) / 255.0\r\n:\r-\r\n:\r-\r\n:\r-def collate_fn(batch):\r\n:\r- """"""Convert batch of numpy arrays to JAX array""""""\r\n:\r- return {""videos"": jnp.array(np.stack(batch))}\r\n:\r-\r\n:\r-\r\n:",,terminal_output +495,1109429,"TERMINAL",0,0,"\r-def get_dataloader(data_dir, seq_len, batch_size):\r\n:",,terminal_output +496,1109939,"TERMINAL",0,0,"\r- dataset = VideoDataset(data_dir, seq_len)\r\n:\r- return DataLoader(\r\n:\r- dataset,\r\n:\r- batch_size=batch_size,\r\n:\r- shuffle=True,\r\n:\r- collate_fn=collate_fn,\r\n:\r- )\r\n:\rdiff --git a/pyproject.toml b/pyproject.toml\r\n:\rindex 9527b14..115048b 100644\r\n:\r--- a/pyproject.toml\r\n:\r+++ b/pyproject.toml\r\n:\r@@ -14,7 +14,6 @@ dependencies = [\r\n:\r ""array-record>=0.7.2"",\r\n:\r ""pre-commit>=4.2.0"",\r\n:\r ""pillow>=11.3.0"",\r\n:\r- ""torch>=2.0.1"",\r\n:\r ]\r\n:",,terminal_output +497,1109981,"TERMINAL",0,0,"\r \r\n:",,terminal_output +498,1110209,"TERMINAL",0,0,"\r [build-system]\r\n(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +499,1111900,"TERMINAL",0,0,"\rM- episode = np.load(self.metadata[idx][""path""])\r\n\r:",,terminal_output +500,1112110,"TERMINAL",0,0,"\rM- def __getitem__(self, idx):\r\n\r:",,terminal_output +501,1112308,"TERMINAL",0,0,"\rM-\r\n\r:",,terminal_output +502,1112673,"TERMINAL",0,0,"\r ]\r\n:",,terminal_output +503,1112864,"TERMINAL",0,0,"\r \r\n:",,terminal_output +504,1113063,"TERMINAL",0,0,"\r [build-system]\r\n(END)",,terminal_output +505,1113254,"TERMINAL",0,0,"\rM- episode = np.load(self.metadata[idx][""path""])\r\n\r:",,terminal_output +506,1113736,"TERMINAL",0,0,"\rM- def __getitem__(self, idx):\r\n\r:",,terminal_output +507,1113919,"TERMINAL",0,0,"\rM-\r\n\r:\rM- return len(self.metadata)\r\n\r:\rM- def __len__(self):\r\n\r:\rM-\r\n\r:\rM- self.metadata = np.load(self.data_dir / ""metadata.npy"", allow_pickle=True)\r\n\r:\rM- self.seq_len = seq_len\r\n\r:",,terminal_output +508,1114050,"TERMINAL",0,0,"\rM- self.data_dir = Path(data_dir)\r\n\r:\rM- def __init__(self, data_dir, seq_len):\r\n\r:\rM-class VideoDataset(Dataset):\r\n\r:\rM-\r\n\r:",,terminal_output +509,1114101,"TERMINAL",0,0,"\rM-\r\n\r:",,terminal_output +510,1114404,"TERMINAL",0,0,"\rM-from torch.utils.data import Dataset, DataLoader\r\n\r:\rM-import numpy as np\r\n\r:\rM-import jax.numpy as jnp\r\n\r:\rM-\r\n\r:\rM-from pathlib import Path\r\n\r:\rM-# file copied from https://raw.githubusercontent.com/FLAIROx/jafar/refs/heads/main/utils/dataloader.py\r\n\r:\rM@@ -1,37 +0,0 @@\r\n\r:\rM+++ /dev/null\r\n\r:\rM--- a/jasmine/utils/dataloader_torch.py\r\n\r:\rMindex 7c189d2..0000000\r\n\r:",,terminal_output +511,1114635,"TERMINAL",0,0,"\rMdeleted file mode 100644\r\n\r:\rMdiff --git a/jasmine/utils/dataloader_torch.py b/jasmine/utils/dataloader_torch.py\r\n\r:\rM if step >= args.num_steps:\r\n\r:\rM print(f""Saved checkpoint at step {step}"")\r\n\r:\rM checkpoint_manager.save(step, args=ckpt_manager_args)\r\n\r:\rM+ )\r\n\r:",,terminal_output +512,1114795,"TERMINAL",0,0,"\r- seq = episode[start_idx : start_idx + self.seq_len]\r\n:",,terminal_output +513,1115271,"TERMINAL",0,0,"\r- return seq.astype(np.float32) / 255.0\r\n:",,terminal_output +514,1115379,"TERMINAL",0,0,"\r-\r\n:\r-\r\n:\r-def collate_fn(batch):\r\n:",,terminal_output +515,1115589,"TERMINAL",0,0,"\r- """"""Convert batch of numpy arrays to JAX array""""""\r\n:",,terminal_output +516,1115729,"TERMINAL",0,0,"\r- return {""videos"": jnp.array(np.stack(batch))}\r\n:",,terminal_output +517,1115913,"TERMINAL",0,0,"\r-\r\n:",,terminal_output +518,1116053,"TERMINAL",0,0,"\r-\r\n:",,terminal_output +519,1116245,"TERMINAL",0,0,"\r-def get_dataloader(data_dir, seq_len, batch_size):\r\n:",,terminal_output +520,1116351,"TERMINAL",0,0,"\r- dataset = VideoDataset(data_dir, seq_len)\r\n:",,terminal_output +521,1116491,"TERMINAL",0,0,"\r- return DataLoader(\r\n:",,terminal_output +522,1116627,"TERMINAL",0,0,"\r- dataset,\r\n:",,terminal_output +523,1128760,"TERMINAL",0,0,"\r- batch_size=batch_size,\r\n:",,terminal_output +524,1128954,"TERMINAL",0,0,"\r- shuffle=True,\r\n:",,terminal_output +525,1129097,"TERMINAL",0,0,"\r- collate_fn=collate_fn,\r\n:",,terminal_output +526,1129201,"TERMINAL",0,0,"\r- )\r\n:",,terminal_output +527,1129347,"TERMINAL",0,0,"\rdiff --git a/pyproject.toml b/pyproject.toml\r\n:",,terminal_output +528,1129557,"TERMINAL",0,0,"\rindex 9527b14..115048b 100644\r\n:",,terminal_output +529,1129658,"TERMINAL",0,0,"\r--- a/pyproject.toml\r\n:",,terminal_output +530,1129853,"TERMINAL",0,0,"\r+++ b/pyproject.toml\r\n:",,terminal_output +531,1130015,"TERMINAL",0,0,"\r@@ -14,7 +14,6 @@ dependencies = [\r\n:",,terminal_output +532,1130068,"TERMINAL",0,0,"\r ""array-record>=0.7.2"",\r\n:",,terminal_output +533,1130324,"TERMINAL",0,0,"\r ""pre-commit>=4.2.0"",\r\n:",,terminal_output +534,1130463,"TERMINAL",0,0,"\r ""pillow>=11.3.0"",\r\n:",,terminal_output +535,1130533,"TERMINAL",0,0,"\r- ""torch>=2.0.1"",\r\n:",,terminal_output +536,1130758,"TERMINAL",0,0,"\r ]\r\n:",,terminal_output +537,1131024,"TERMINAL",0,0,"\r \r\n:",,terminal_output +538,1131125,"TERMINAL",0,0,"\r [build-system]\r\n(END)",,terminal_output +539,1182182,"TERMINAL",0,0,"\r\r(END)",,terminal_output +540,1182340,"TERMINAL",0,0,"\r\r(END)",,terminal_output +541,1182492,"TERMINAL",0,0,"\r\r(END)",,terminal_output +542,1185767,"TERMINAL",0,0,"\rM- episode = np.load(self.metadata[idx][""path""])\r\n\r:",,terminal_output +543,1186250,"TERMINAL",0,0,"\rM- def __getitem__(self, idx):\r\n\r:",,terminal_output +544,1186430,"TERMINAL",0,0,"\rM-\r\n\r:\rM- return len(self.metadata)\r\n\r:\rM- def __len__(self):\r\n\r:\rM-\r\n\r:\rM- self.metadata = np.load(self.data_dir / ""metadata.npy"", allow_pickle=True)\r\n\r:\rM- self.seq_len = seq_len\r\n\r:",,terminal_output +545,1186614,"TERMINAL",0,0,"\r@@ -14,7 +14,6 @@ dependencies = [\r\n:",,terminal_output +546,1187054,"TERMINAL",0,0,"\r ""array-record>=0.7.2"",\r\n:",,terminal_output +547,1187263,"TERMINAL",0,0,"\r ""pre-commit>=4.2.0"",\r\n:\r ""pillow>=11.3.0"",\r\n:\r- ""torch>=2.0.1"",\r\n:\r ]\r\n:\r \r\n:\r [build-system]\r\n(END)",,terminal_output +548,1187327,"TERMINAL",0,0,"\r\r(END)",,terminal_output +549,1188270,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +550,1195041,"TERMINAL",0,0,"clear",,terminal_command +551,1195084,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +552,1205214,"TERMINAL",0,0,"branch",,terminal_command +553,1209036,"TERMINAL",0,0,"git diff ablation/full-precision-training",,terminal_command +554,1209125,"TERMINAL",0,0,"]633;C[?1h=\r",,terminal_output +555,1209206,"TERMINAL",0,0,"diff --git a/jasmine/train_dynamics.py b/jasmine/train_dynamics.py\r\nindex 62d84cd..06cd966 100644\r\n--- a/jasmine/train_dynamics.py\r\n+++ b/jasmine/train_dynamics.py\r\n@@ -82,7 +82,7 @@ class Args:\r\n mask_limit: float = 0.5\r\n z_loss_weight: float = 0.0\r\n param_dtype = jnp.float32\r\n- dtype = jnp.float32\r\n+ dtype = jnp.bfloat16\r\n use_flash_attention: bool = True\r\n use_gt_actions: bool = False\r\n # Logging\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +556,1215178,"TERMINAL",0,0,"clear",,terminal_command +557,1215220,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +558,1260042,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +559,1263757,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +560,1272915,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1730,0,"",shellscript,selection_mouse +561,1272923,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1729,0,"",shellscript,selection_command +562,1273041,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1729,1,"1",shellscript,selection_mouse +563,1273057,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1730,0,"",shellscript,selection_command +564,1273140,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1613,117,"is script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +565,1273140,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1612,118,"his script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +566,1273140,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1611,119,"This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +567,1273185,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1610,120,"""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +568,1273186,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1550,180,"urrent_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +569,1273220,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1549,181,"current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +570,1273267,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1548,182,"$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +571,1273292,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1497,233,"nt_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +572,1273381,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1496,234,"ent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +573,1273407,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1495,235,"rent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +574,1273495,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1494,236,"rrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +575,1273785,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1493,0,"",shellscript,selection_mouse +576,1273901,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,14,"current_branch",shellscript,selection_mouse +577,1274148,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,53,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif ",shellscript,selection_mouse +578,1274192,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,117,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo",shellscript,selection_mouse +579,1274229,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,236,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit",shellscript,selection_mouse +580,1274289,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,237,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit ",shellscript,selection_mouse +581,1274353,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,238,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1",shellscript,selection_mouse +582,1274383,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1492,241,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,selection_mouse +583,1274632,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1733,0,"",shellscript,selection_mouse +584,1274666,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1732,0,"",shellscript,selection_command +585,1275603,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1575,0,"",shellscript,selection_mouse +586,1275739,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1569,7,"prepend",shellscript,selection_mouse +587,1275956,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1569,8,"prepend-",shellscript,selection_mouse +588,1275981,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1569,14,"prepend-action",shellscript,selection_mouse +589,1276078,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1569,15,"prepend-action-",shellscript,selection_mouse +590,1276109,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1569,22,"prepend-action-maskgit",shellscript,selection_mouse +591,1276622,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1586,0,"",shellscript,selection_mouse +592,1278469,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1490,0,"",shellscript,selection_mouse +593,1278476,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1489,0,"",shellscript,selection_command +594,1279426,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1490,0,"",shellscript,selection_mouse +595,1279437,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1489,0,"",shellscript,selection_command +596,1306188," checklist.md",0,0,"",markdown,tab +597,1308158," checklist.md",0,0,"",markdown,tab +598,1314426," checklist.md",0,0,"-",markdown,content +599,1314429," checklist.md",1,0,"",markdown,selection_keyboard +600,1314477," checklist.md",1,0," ",markdown,content +601,1314478," checklist.md",2,0,"",markdown,selection_keyboard +602,1314509," checklist.md",2,0,"a",markdown,content +603,1314511," checklist.md",3,0,"",markdown,selection_keyboard +604,1314560," checklist.md",3,0,"s",markdown,content +605,1314561," checklist.md",4,0,"",markdown,selection_keyboard +606,1314745," checklist.md",4,0,"s",markdown,content +607,1314746," checklist.md",5,0,"",markdown,selection_keyboard +608,1314899," checklist.md",5,0,"e",markdown,content +609,1314900," checklist.md",6,0,"",markdown,selection_keyboard +610,1314957," checklist.md",6,0,"r",markdown,content +611,1314958," checklist.md",7,0,"",markdown,selection_keyboard +612,1315055," checklist.md",7,0,"t",markdown,content +613,1315056," checklist.md",8,0,"",markdown,selection_keyboard +614,1315137," checklist.md",8,0," ",markdown,content +615,1315138," checklist.md",9,0,"",markdown,selection_keyboard +616,1315404," checklist.md",9,0,"b",markdown,content +617,1315405," checklist.md",10,0,"",markdown,selection_keyboard +618,1315507," checklist.md",10,0,"r",markdown,content +619,1315508," checklist.md",11,0,"",markdown,selection_keyboard +620,1315678," checklist.md",11,0,"a",markdown,content +621,1315679," checklist.md",12,0,"",markdown,selection_keyboard +622,1315856," checklist.md",12,0,"c",markdown,content +623,1315857," checklist.md",13,0,"",markdown,selection_keyboard +624,1316245," checklist.md",12,1,"",markdown,content +625,1316503," checklist.md",12,0,"n",markdown,content +626,1316504," checklist.md",13,0,"",markdown,selection_keyboard +627,1316669," checklist.md",13,0,"c",markdown,content +628,1316670," checklist.md",14,0,"",markdown,selection_keyboard +629,1316764," checklist.md",14,0,"h",markdown,content +630,1316765," checklist.md",15,0,"",markdown,selection_keyboard +631,1317679," checklist.md",15,0,"\n",markdown,content +632,1318633," checklist.md",16,0,"-",markdown,content +633,1318634," checklist.md",17,0,"",markdown,selection_keyboard +634,1318775," checklist.md",17,0," ",markdown,content +635,1318776," checklist.md",18,0,"",markdown,selection_keyboard +636,1321110," checklist.md",18,0,"p",markdown,content +637,1321112," checklist.md",19,0,"",markdown,selection_keyboard +638,1321230," checklist.md",19,0,"a",markdown,content +639,1321231," checklist.md",20,0,"",markdown,selection_keyboard +640,1321419," checklist.md",20,0,"t",markdown,content +641,1321420," checklist.md",21,0,"",markdown,selection_keyboard +642,1321604," checklist.md",21,0,"c",markdown,content +643,1321605," checklist.md",22,0,"",markdown,selection_keyboard +644,1321672," checklist.md",22,0,"h",markdown,content +645,1321673," checklist.md",23,0,"",markdown,selection_keyboard +646,1321815," checklist.md",23,0," ",markdown,content +647,1321816," checklist.md",24,0,"",markdown,selection_keyboard +648,1321904," checklist.md",24,0,"s",markdown,content +649,1321905," checklist.md",25,0,"",markdown,selection_keyboard +650,1322063," checklist.md",25,0,"i",markdown,content +651,1322064," checklist.md",26,0,"",markdown,selection_keyboard +652,1322171," checklist.md",26,0,"z",markdown,content +653,1322172," checklist.md",27,0,"",markdown,selection_keyboard +654,1322416," checklist.md",27,0,"e",markdown,content +655,1322417," checklist.md",28,0,"",markdown,selection_keyboard +656,1322583," checklist.md",28,0," ",markdown,content +657,1322584," checklist.md",29,0,"",markdown,selection_keyboard +658,1322945," checklist.md",29,0,"s",markdown,content +659,1322946," checklist.md",30,0,"",markdown,selection_keyboard +660,1323126," checklist.md",30,0,"e",markdown,content +661,1323127," checklist.md",31,0,"",markdown,selection_keyboard +662,1323841," checklist.md",31,0,"t",markdown,content +663,1323842," checklist.md",32,0,"",markdown,selection_keyboard +664,1323993," checklist.md",32,0,"z",markdown,content +665,1323994," checklist.md",33,0,"",markdown,selection_keyboard +666,1324102," checklist.md",33,0,"e",markdown,content +667,1324103," checklist.md",34,0,"",markdown,selection_keyboard +668,1324193," checklist.md",34,0," ",markdown,content +669,1324194," checklist.md",35,0,"",markdown,selection_keyboard +670,1324734," checklist.md",34,1,"",markdown,content +671,1324999," checklist.md",34,0,"n",markdown,content +672,1325000," checklist.md",35,0,"",markdown,selection_keyboard +673,1325138," checklist.md",35,0,"\n",markdown,content +674,1326362," checklist.md",36,0,"-",markdown,content +675,1326363," checklist.md",37,0,"",markdown,selection_keyboard +676,1326623," checklist.md",37,0,"r",markdown,content +677,1326625," checklist.md",38,0,"",markdown,selection_keyboard +678,1326748," checklist.md",38,0,"u",markdown,content +679,1326749," checklist.md",39,0,"",markdown,selection_keyboard +680,1326852," checklist.md",39,0,"n",markdown,content +681,1326853," checklist.md",40,0,"",markdown,selection_keyboard +682,1327149," checklist.md",39,1,"",markdown,content +683,1327295," checklist.md",38,1,"",markdown,content +684,1327599," checklist.md",37,1,"",markdown,content +685,1327736," checklist.md",37,0," ",markdown,content +686,1327737," checklist.md",38,0,"",markdown,selection_keyboard +687,1327878," checklist.md",38,0,"r",markdown,content +688,1327879," checklist.md",39,0,"",markdown,selection_keyboard +689,1327927," checklist.md",39,0,"u",markdown,content +690,1327928," checklist.md",40,0,"",markdown,selection_keyboard +691,1327988," checklist.md",40,0,"n",markdown,content +692,1327988," checklist.md",41,0,"",markdown,selection_keyboard +693,1328092," checklist.md",41,0," ",markdown,content +694,1328093," checklist.md",42,0,"",markdown,selection_keyboard +695,1328246," checklist.md",42,0,"n",markdown,content +696,1328248," checklist.md",43,0,"",markdown,selection_keyboard +697,1328343," checklist.md",43,0,"a",markdown,content +698,1328344," checklist.md",44,0,"",markdown,selection_keyboard +699,1328390," checklist.md",44,0,"m",markdown,content +700,1328391," checklist.md",45,0,"",markdown,selection_keyboard +701,1328495," checklist.md",45,0,"e",markdown,content +702,1328496," checklist.md",46,0,"",markdown,selection_keyboard +703,1328602," checklist.md",46,0,"\n",markdown,content +704,1329045," checklist.md",47,0,"d",markdown,content +705,1329047," checklist.md",48,0,"",markdown,selection_keyboard +706,1329185," checklist.md",48,0,"a",markdown,content +707,1329186," checklist.md",49,0,"",markdown,selection_keyboard +708,1329298," checklist.md",49,0,"t",markdown,content +709,1329299," checklist.md",50,0,"",markdown,selection_keyboard +710,1329395," checklist.md",50,0,"a",markdown,content +711,1329396," checklist.md",51,0,"",markdown,selection_keyboard +712,1329444," checklist.md",51,0,"s",markdown,content +713,1329445," checklist.md",52,0,"",markdown,selection_keyboard +714,1329647," checklist.md",52,0,"e",markdown,content +715,1329648," checklist.md",53,0,"",markdown,selection_keyboard +716,1329720," checklist.md",53,0,"t",markdown,content +717,1329721," checklist.md",54,0,"",markdown,selection_keyboard +718,1330332," checklist.md",47,0,"-",markdown,content +719,1330333," checklist.md",48,0,"",markdown,selection_keyboard +720,1330472," checklist.md",48,0," ",markdown,content +721,1330473," checklist.md",49,0,"",markdown,selection_keyboard +722,1330824," checklist.md",50,0,"",markdown,selection_command +723,1331284," checklist.md",56,0,"\n",markdown,content +724,1331807," checklist.md",57,0,"-",markdown,content +725,1331808," checklist.md",58,0,"",markdown,selection_keyboard +726,1332156," checklist.md",58,0," ",markdown,content +727,1332157," checklist.md",59,0,"",markdown,selection_keyboard +728,1334983," checklist.md",59,0,"t",markdown,content +729,1334985," checklist.md",60,0,"",markdown,selection_keyboard +730,1335037," checklist.md",60,0,"a",markdown,content +731,1335038," checklist.md",61,0,"",markdown,selection_keyboard +732,1335189," checklist.md",61,0,"g",markdown,content +733,1335190," checklist.md",62,0,"",markdown,selection_keyboard +734,1335253," checklist.md",62,0,"s",markdown,content +735,1335254," checklist.md",63,0,"",markdown,selection_keyboard +736,1344273,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +737,1344274,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",519,0,"",shellscript,selection_mouse +738,1344288,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",518,0,"",shellscript,selection_command +739,1345622,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",399,0,"",shellscript,selection_mouse +740,1345665,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",398,0,"",shellscript,selection_command +741,1358399,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1733,0,"",shellscript,selection_mouse +742,1358402,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1732,0,"",shellscript,selection_command +743,1359085,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1730,0,"",shellscript,selection_mouse +744,1359104,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1729,0,"",shellscript,selection_command +745,1359723,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1734,0,"",shellscript,selection_mouse +746,1360402,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1730,0,"",shellscript,selection_mouse +747,1360417,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1729,0,"",shellscript,selection_command +748,1364884," checklist.md",0,0,"",markdown,tab +749,1364885," checklist.md",5,0,"",markdown,selection_mouse +750,1364928," checklist.md",2,6,"assert",markdown,selection_mouse +751,1365062," checklist.md",0,16,"- assert branch\n",markdown,selection_mouse +752,1371850," checklist.md",19,0,"",markdown,selection_mouse +753,1372021," checklist.md",18,5,"patch",markdown,selection_mouse +754,1372143," checklist.md",16,20,"- patch size setzen\n",markdown,selection_mouse +755,1376754," checklist.md",40,0,"",markdown,selection_mouse +756,1376901," checklist.md",38,3,"run",markdown,selection_mouse +757,1377053," checklist.md",36,11,"- run name\n",markdown,selection_mouse +758,1379790,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +759,1379791,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",519,0,"",shellscript,selection_command +760,1381151,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",381,0,"",shellscript,selection_mouse +761,1381157,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",380,0,"",shellscript,selection_command +762,1445720," checklist.md",0,0,"",markdown,tab +763,1445721," checklist.md",53,0,"",markdown,selection_mouse +764,1445904," checklist.md",49,7,"dataset",markdown,selection_mouse +765,1446065," checklist.md",47,10,"- dataset\n",markdown,selection_mouse +766,1448293,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +767,1448294,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1214,0,"",shellscript,selection_mouse +768,1449417,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1300,0,"",shellscript,selection_mouse +769,1449562,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1287,30,"array_records_250M_npy_arr_rec",shellscript,selection_mouse +770,1450238,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1305,0,"",shellscript,selection_mouse +771,1450239,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1287,30,"array_records_250M_npy_arr_rec",shellscript,selection_mouse +772,1450383,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1215,118,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\n",shellscript,selection_mouse +773,1451097,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1305,0,"",shellscript,selection_mouse +774,1451097,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1287,30,"array_records_250M_npy_arr_rec",shellscript,selection_mouse +775,1452235,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1305,0,"",shellscript,selection_mouse +776,1452851,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1287,30,"array_records_250M_npy_arr_rec",shellscript,selection_mouse +777,1467286,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +778,1470350,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",1287,0,"",shellscript,selection_mouse +779,1470485,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",1263,35,"array_records_500m_seed_w_increment",shellscript,selection_mouse +780,1470609,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",1191,109,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\n",shellscript,selection_mouse +781,1472300,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",1218,0,"",shellscript,selection_mouse +782,1479349,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +783,1480853,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1230,0,"",shellscript,selection_mouse +784,1481744,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1332,0,"\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,content +785,1481834,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1333,0,"",shellscript,selection_command +786,1482299,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1215,0,"",shellscript,selection_command +787,1482564,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1215,118,"",shellscript,content +788,1484230,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +789,1485149,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +790,1499000,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1292,0,"",shellscript,selection_mouse +791,1501828," checklist.md",0,0,"",markdown,tab +792,1501882," checklist.md",54,0,"",markdown,selection_mouse +793,1501882," checklist.md",49,7,"dataset",markdown,selection_mouse +794,1502031," checklist.md",47,10,"- dataset\n",markdown,selection_mouse +795,1505146," checklist.md",51,0,"",markdown,selection_mouse +796,1505573," checklist.md",60,0,"",markdown,selection_mouse +797,1505718," checklist.md",59,4,"tags",markdown,selection_mouse +798,1507513,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +799,1507514,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1185,0,"",shellscript,selection_mouse +800,1508571,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1184,0,"",shellscript,selection_command +801,1509370,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1184,1,"",shellscript,content +802,1511118,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1213,0,"",shellscript,selection_mouse +803,1512082,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1480,0,"",shellscript,selection_mouse +804,1512095,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1479,0,"",shellscript,selection_command +805,1514456,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1462,0,"",shellscript,selection_mouse +806,1514460,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1461,0,"",shellscript,selection_command +807,1516767,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1720,0,"",shellscript,selection_mouse +808,1516809,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1719,0,"",shellscript,selection_command +809,1517952,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1211,0,"",shellscript,selection_mouse +810,1520558,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1560,0,"",shellscript,selection_mouse +811,1520699,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1559,7,"prepend",shellscript,selection_mouse +812,1520810,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1559,14,"prepend-action",shellscript,selection_mouse +813,1520972,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1559,15,"prepend-action-",shellscript,selection_mouse +814,1521070,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1559,22,"prepend-action-maskgit",shellscript,selection_mouse +815,1537575,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1580,0,"",shellscript,selection_command +816,1553844,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1159,0,"",shellscript,selection_mouse +817,1554391,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1211,0,"",shellscript,selection_mouse +818,1577938,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1211,0," ",shellscript,content +819,1577939,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1212,0,"",shellscript,selection_keyboard +820,1578052,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1212,0,"u",shellscript,content +821,1578053,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1213,0,"",shellscript,selection_keyboard +822,1578169,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1213,0,"n",shellscript,content +823,1578170,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1214,0,"",shellscript,selection_keyboard +824,1578323,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1214,0,"i",shellscript,content +825,1578323,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1215,0,"",shellscript,selection_keyboard +826,1578668,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1215,0,"c",shellscript,content +827,1578668,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1216,0,"",shellscript,selection_keyboard +828,1578819,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1216,0,"o",shellscript,content +829,1578820,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1217,0,"",shellscript,selection_keyboard +830,1578931,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1217,0,"r",shellscript,content +831,1578931,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1218,0,"",shellscript,selection_keyboard +832,1578981,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1218,0,"n",shellscript,content +833,1578981,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1219,0,"",shellscript,selection_keyboard +834,1579360,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1219,0,"^",shellscript,content +835,1579361,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1220,0,"",shellscript,selection_keyboard +836,1580109,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1219,1,"",shellscript,content +837,1580110,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1219,0,"",shellscript,selection_keyboard +838,1580240,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1218,0,"",shellscript,selection_command +839,1585170," checklist.md",0,0,"",markdown,tab +840,1585171," checklist.md",55,0,"",markdown,selection_mouse +841,1585716," checklist.md",63,0,"",markdown,selection_mouse +842,1586280," checklist.md",63,0,"\n",markdown,content +843,1587043," checklist.md",64,0,"-",markdown,content +844,1587044," checklist.md",65,0,"",markdown,selection_keyboard +845,1587198," checklist.md",65,0," ",markdown,content +846,1587199," checklist.md",66,0,"",markdown,selection_keyboard +847,1587473," checklist.md",66,0,"n",markdown,content +848,1587474," checklist.md",67,0,"",markdown,selection_keyboard +849,1588057," checklist.md",66,1,"",markdown,content +850,1588390," checklist.md",66,0,"n",markdown,content +851,1588391," checklist.md",67,0,"",markdown,selection_keyboard +852,1588800," checklist.md",66,1,"",markdown,content +853,1589265," checklist.md",66,0,"u",markdown,content +854,1589266," checklist.md",67,0,"",markdown,selection_keyboard +855,1589634," checklist.md",67,0,"n",markdown,content +856,1589635," checklist.md",68,0,"",markdown,selection_keyboard +857,1589807," checklist.md",68,0,"i",markdown,content +858,1589808," checklist.md",69,0,"",markdown,selection_keyboard +859,1590194," checklist.md",69,0,"c",markdown,content +860,1590195," checklist.md",70,0,"",markdown,selection_keyboard +861,1590273," checklist.md",70,0,"o",markdown,content +862,1590274," checklist.md",71,0,"",markdown,selection_keyboard +863,1590423," checklist.md",71,0,"r",markdown,content +864,1590424," checklist.md",72,0,"",markdown,selection_keyboard +865,1590497," checklist.md",72,0,"n",markdown,content +866,1590497," checklist.md",73,0,"",markdown,selection_keyboard +867,1590936," checklist.md",72,0,"",markdown,selection_command +868,1591584,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +869,1591584,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1221,0,"",shellscript,selection_mouse +870,1592056,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1220,0,"",shellscript,selection_mouse +871,1592066,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1219,0,"",shellscript,selection_command +872,1592756,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1216,0,"",shellscript,selection_mouse +873,1607146,"jasmine/train_dynamics.py",0,0,"",python,tab +874,1617942,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +875,1622252,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +876,1629486,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +877,1634901,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_lam.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +878,1638978,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +879,1647395,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1756,0,"",shellscript,selection_mouse +880,1657266,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +881,1658681,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1254,0,"",shellscript,selection_mouse +882,1658825,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1247,7,"project",shellscript,selection_mouse +883,1658971,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh",1222,109,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\n",shellscript,selection_mouse +884,1662487,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +885,1663843,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1215,0,"",shellscript,selection_mouse +886,1664791,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1308,0,"\n",shellscript,content +887,1664971,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1309,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\n",shellscript,content +888,1665519,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1309,0,"",shellscript,selection_command +889,1665830,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1191,0,"",shellscript,selection_command +890,1666207,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1191,118,"",shellscript,content +891,1666296,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1300,0,"",shellscript,selection_command +892,1666828,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1300,1,"",shellscript,content +893,1671315,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1160,0,"",shellscript,selection_mouse +894,1672553,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1160,1,"",shellscript,content +895,1674055,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1187,0,"",shellscript,selection_mouse +896,1675048,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1187,0," ",shellscript,content +897,1675049,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1188,0,"",shellscript,selection_keyboard +898,1675272,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1188,0,"u",shellscript,content +899,1675273,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1189,0,"",shellscript,selection_keyboard +900,1675361,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1189,0,"n",shellscript,content +901,1675362,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1190,0,"",shellscript,selection_keyboard +902,1675540,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1190,0,"i",shellscript,content +903,1675541,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1191,0,"",shellscript,selection_keyboard +904,1675640,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1191,0,"c",shellscript,content +905,1675641,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1192,0,"",shellscript,selection_keyboard +906,1675792,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1192,0,"o",shellscript,content +907,1675793,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1193,0,"",shellscript,selection_keyboard +908,1675944,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1193,0,"r",shellscript,content +909,1675944,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1194,0,"",shellscript,selection_keyboard +910,1676041,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1194,0,"n",shellscript,content +911,1676041,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1195,0,"",shellscript,selection_keyboard +912,1676671,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1194,0,"",shellscript,selection_command +913,1680519,"jasmine/train_dynamics.py",0,0,"",python,tab +914,1684130," checklist.md",0,0,"",markdown,tab +915,1692503,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +916,1695708,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +917,1698957," checklist.md",0,0,"",markdown,tab +918,1701051," checklist.md",0,0,"",markdown,tab +919,1710283,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +920,1710284,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1921,0,"",shellscript,selection_mouse +921,1718788,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1264,0,"",shellscript,selection_mouse +922,1720249,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1330,0,"\n",shellscript,content +923,1723940,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +924,1727359,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1220,0,"",shellscript,selection_mouse +925,1727464,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1218,4,"fast",shellscript,selection_mouse +926,1727624,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh",1198,109,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\n",shellscript,selection_mouse +927,1731876,"TERMINAL",0,0,"# array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",,terminal_command +928,1734171,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +929,1735495,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1331,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\n",shellscript,content +930,1738121,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1239,0,"",shellscript,selection_mouse +931,1738203,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1238,0,"",shellscript,selection_command +932,1738716,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1213,118,"",shellscript,content +933,1738892,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1322,0,"",shellscript,selection_command +934,1739191,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1322,1,"",shellscript,content +935,1743453,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1182,0,"",shellscript,selection_mouse +936,1744231,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1182,1,"",shellscript,content +937,1745331,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1209,0,"",shellscript,selection_mouse +938,1746219,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1209,0," ",shellscript,content +939,1746220,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1210,0,"",shellscript,selection_keyboard +940,1746401,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1210,0,"u",shellscript,content +941,1746401,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1211,0,"",shellscript,selection_keyboard +942,1746470,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1211,0,"n",shellscript,content +943,1746471,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1212,0,"",shellscript,selection_keyboard +944,1746622,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1212,0,"i",shellscript,content +945,1746622,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1213,0,"",shellscript,selection_keyboard +946,1746705,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1213,0,"c",shellscript,content +947,1746705,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1214,0,"",shellscript,selection_keyboard +948,1746845,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1214,0,"o",shellscript,content +949,1746846,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1215,0,"",shellscript,selection_keyboard +950,1746994,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1215,0,"r",shellscript,content +951,1746995,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1216,0,"",shellscript,selection_keyboard +952,1747058,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1216,0,"n",shellscript,content +953,1747058,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1217,0,"",shellscript,selection_keyboard +954,1747327,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1216,0,"",shellscript,selection_command +955,1802670,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_causal\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation causal""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --dyna_type=causal \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +956,1812889,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +957,1815007,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +958,1818166,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_causal\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation causal""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --dyna_type=causal \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +959,1822141,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",374,0,"",shellscript,selection_mouse +960,1834181,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1944,0,"",shellscript,selection_mouse +961,1834301,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1935,10,"patch_size",shellscript,selection_mouse +962,1853723,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1260,0,"",shellscript,selection_mouse +963,1854816,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1353,0,"\n",shellscript,content +964,1855108,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1354,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,content +965,1855660,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1461,0,"",shellscript,selection_command +966,1855835,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1343,0,"",shellscript,selection_command +967,1856344,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1236,118,"",shellscript,content +968,1860666," checklist.md",0,0,"",markdown,tab +969,1860667," checklist.md",60,0,"",markdown,selection_mouse +970,1865520,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +971,1865521,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1189,0,"",shellscript,selection_mouse +972,1866735,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1189,1,"",shellscript,content +973,1867288,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1233,0,"",shellscript,selection_command +974,1867569,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1232,0,"",shellscript,selection_command +975,1867980,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1232,0," ",shellscript,content +976,1867981,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1233,0,"",shellscript,selection_keyboard +977,1868323,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1233,0,"u",shellscript,content +978,1868324,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1234,0,"",shellscript,selection_keyboard +979,1868620,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1234,0,"n",shellscript,content +980,1868621,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1235,0,"",shellscript,selection_keyboard +981,1868779,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1235,0,"i",shellscript,content +982,1868780,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1236,0,"",shellscript,selection_keyboard +983,1868922,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1236,0,"c",shellscript,content +984,1868922,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1237,0,"",shellscript,selection_keyboard +985,1869010,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1237,0,"o",shellscript,content +986,1869013,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1238,0,"",shellscript,selection_keyboard +987,1869164,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1238,0,"r",shellscript,content +988,1869165,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1239,0,"",shellscript,selection_keyboard +989,1869231,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1239,0,"n",shellscript,content +990,1869231,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1240,0,"",shellscript,selection_keyboard +991,1869506,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1239,0,"",shellscript,selection_command +992,1881587,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_ffn_dim_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ffn_dim ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --dyna_ffn_dim=512 \\n --dyna_num_blocks=12 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +993,1884856,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_ffn_dim_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ffn_dim ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --dyna_ffn_dim=512 \\n --dyna_num_blocks=12 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +994,1904122,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1391,0,"",shellscript,selection_mouse +995,1904599,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1276,0,"",shellscript,selection_mouse +996,1905328,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1364,0,"\n",shellscript,content +997,1905518,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1365,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,content +998,1905912,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1472,0,"",shellscript,selection_command +999,1906792,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1354,0,"",shellscript,selection_command +1000,1907118,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,118,"",shellscript,content +1001,1910702,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1200,0,"",shellscript,selection_mouse +1002,1911731,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1199,0,"",shellscript,selection_command +1003,1911836,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1199,1,"",shellscript,content +1004,1912131,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1244,0,"",shellscript,selection_command +1005,1912441,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1243,0,"",shellscript,selection_command +1006,1912835,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1243,0," ",shellscript,content +1007,1912836,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1244,0,"",shellscript,selection_keyboard +1008,1913213,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1244,0,"u",shellscript,content +1009,1913213,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1245,0,"",shellscript,selection_keyboard +1010,1913387,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1245,0,"n",shellscript,content +1011,1913387,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1246,0,"",shellscript,selection_keyboard +1012,1913505,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1246,0,"i",shellscript,content +1013,1913506,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,0,"",shellscript,selection_keyboard +1014,1913599,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,0,"c",shellscript,content +1015,1913600,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1248,0,"",shellscript,selection_keyboard +1016,1913683,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1248,0,"o",shellscript,content +1017,1913683,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1249,0,"",shellscript,selection_keyboard +1018,1913800,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1249,0,"r",shellscript,content +1019,1913801,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1250,0,"",shellscript,selection_keyboard +1020,1913889,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1250,0,"n",shellscript,content +1021,1913890,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1251,0,"",shellscript,selection_keyboard +1022,1914085,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1250,0,"",shellscript,selection_command +1023,1919441,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1897,0,"",shellscript,selection_mouse +1024,1920796,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +1025,1930610,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +1026,1937919,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +1027,1941388,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +1028,1944785,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +1029,1949578,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1030,1953850,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1031,1957160," checklist.md",0,0,"",markdown,tab +1032,1957161," checklist.md",73,0,"",markdown,selection_mouse +1033,1957193," checklist.md",72,0,"",markdown,selection_command +1034,1957986," checklist.md",73,0,"\n",markdown,content +1035,1958468," checklist.md",74,0,"-",markdown,content +1036,1958469," checklist.md",75,0,"",markdown,selection_keyboard +1037,1958709," checklist.md",75,0," ",markdown,content +1038,1958710," checklist.md",76,0,"",markdown,selection_keyboard +1039,1958862," checklist.md",76,0,"d",markdown,content +1040,1958863," checklist.md",77,0,"",markdown,selection_keyboard +1041,1959030," checklist.md",77,0,"a",markdown,content +1042,1959031," checklist.md",78,0,"",markdown,selection_keyboard +1043,1959116," checklist.md",78,0,"s",markdown,content +1044,1959117," checklist.md",79,0,"",markdown,selection_keyboard +1045,1959176," checklist.md",79,0," ",markdown,content +1046,1959177," checklist.md",80,0,"",markdown,selection_keyboard +1047,1959752," checklist.md",80,0,"w",markdown,content +1048,1959753," checklist.md",81,0,"",markdown,selection_keyboard +1049,1959923," checklist.md",81,0,"a",markdown,content +1050,1959924," checklist.md",82,0,"",markdown,selection_keyboard +1051,1960032," checklist.md",82,0,"s",markdown,content +1052,1960032," checklist.md",83,0,"",markdown,selection_keyboard +1053,1960232," checklist.md",83,0," ",markdown,content +1054,1960233," checklist.md",84,0,"",markdown,selection_keyboard +1055,1960367," checklist.md",84,0,"w",markdown,content +1056,1960367," checklist.md",85,0,"",markdown,selection_keyboard +1057,1960446," checklist.md",85,0,"i",markdown,content +1058,1960447," checklist.md",86,0,"",markdown,selection_keyboard +1059,1960552," checklist.md",86,0,"r",markdown,content +1060,1960553," checklist.md",87,0,"",markdown,selection_keyboard +1061,1960636," checklist.md",87,0," ",markdown,content +1062,1960637," checklist.md",88,0,"",markdown,selection_keyboard +1063,1960732," checklist.md",88,0,"m",markdown,content +1064,1960733," checklist.md",89,0,"",markdown,selection_keyboard +1065,1960819," checklist.md",89,0,"a",markdown,content +1066,1960820," checklist.md",90,0,"",markdown,selection_keyboard +1067,1960963," checklist.md",90,0,"c",markdown,content +1068,1960964," checklist.md",91,0,"",markdown,selection_keyboard +1069,1960997," checklist.md",91,0,"h",markdown,content +1070,1960998," checklist.md",92,0,"",markdown,selection_keyboard +1071,1961135," checklist.md",92,0,"e",markdown,content +1072,1961136," checklist.md",93,0,"",markdown,selection_keyboard +1073,1961185," checklist.md",93,0,"n",markdown,content +1074,1961186," checklist.md",94,0,"",markdown,selection_keyboard +1075,1961286," checklist.md",94,0," ",markdown,content +1076,1961287," checklist.md",95,0,"",markdown,selection_keyboard +1077,1961438," checklist.md",95,0,"w",markdown,content +1078,1961439," checklist.md",96,0,"",markdown,selection_keyboard +1079,1961462," checklist.md",96,0,"o",markdown,content +1080,1961593," checklist.md",97,0,"l",markdown,content +1081,1961593," checklist.md",98,0,"",markdown,selection_keyboard +1082,1961733," checklist.md",98,0,"l",markdown,content +1083,1961733," checklist.md",99,0,"",markdown,selection_keyboard +1084,1961800," checklist.md",99,0,"e",markdown,content +1085,1961800," checklist.md",100,0,"",markdown,selection_keyboard +1086,1961903," checklist.md",100,0,"n",markdown,content +1087,1961904," checklist.md",101,0,"",markdown,selection_keyboard +1088,1963703," checklist.md",4,0,"",markdown,selection_mouse +1089,1963845," checklist.md",2,6,"assert",markdown,selection_mouse +1090,1990758,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +1091,1990759,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1269,0,"",shellscript,selection_mouse +1092,1991807,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1361,0,"\n",shellscript,content +1093,1992022,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1362,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,content +1094,1992555,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1469,0,"",shellscript,selection_command +1095,1992668,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1351,0,"",shellscript,selection_command +1096,1993154,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1244,118,"",shellscript,content +1097,1994978," checklist.md",0,0,"",markdown,tab +1098,1994979," checklist.md",53,0,"",markdown,selection_mouse +1099,1994979," checklist.md",49,7,"dataset",markdown,selection_mouse +1100,1995990," checklist.md",61,0,"",markdown,selection_mouse +1101,1996145," checklist.md",59,4,"tags",markdown,selection_mouse +1102,1998084,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +1103,1998084,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1194,0,"",shellscript,selection_mouse +1104,1999010,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1193,0,"",shellscript,selection_command +1105,1999707,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1193,1,"",shellscript,content +1106,2000091,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1241,0,"",shellscript,selection_command +1107,2000449,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1240,0,"",shellscript,selection_command +1108,2000785,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1240,0," ",shellscript,content +1109,2000786,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1241,0,"",shellscript,selection_keyboard +1110,2001206,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1241,0,"u",shellscript,content +1111,2001208,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1242,0,"",shellscript,selection_keyboard +1112,2001303,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1242,0,"n",shellscript,content +1113,2001303,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1243,0,"",shellscript,selection_keyboard +1114,2001471,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1243,0,"i",shellscript,content +1115,2001472,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1244,0,"",shellscript,selection_keyboard +1116,2001597,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1244,0,"c",shellscript,content +1117,2001598,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1245,0,"",shellscript,selection_keyboard +1118,2001696,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1245,0,"o",shellscript,content +1119,2001697,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1246,0,"",shellscript,selection_keyboard +1120,2001807,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1246,0,"r",shellscript,content +1121,2001808,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1247,0,"",shellscript,selection_keyboard +1122,2001874,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1247,0,"n",shellscript,content +1123,2001875,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1248,0,"",shellscript,selection_keyboard +1124,2002311,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1247,0,"",shellscript,selection_command +1125,2004298," checklist.md",0,0,"",markdown,tab +1126,2004299," checklist.md",68,0,"",markdown,selection_mouse +1127,2004299," checklist.md",66,7,"unicorn",markdown,selection_mouse +1128,2008229,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +1129,2008230,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1969,0,"",shellscript,selection_mouse +1130,2008230,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1962,14,"use_gt_actions",shellscript,selection_mouse +1131,2008337,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1956,23," --use_gt_actions \\n",shellscript,selection_mouse +1132,2008939,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1991,0,"",shellscript,selection_mouse +1133,2009103,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1985,11,"num_actions",shellscript,selection_mouse +1134,2009226,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1979,23," --num_actions=15 \\n",shellscript,selection_mouse +1135,2009838,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2220,0,"",shellscript,selection_mouse +1136,2009862,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",2219,0,"",shellscript,selection_command +1137,2013917,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_cotraining\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation no-cotraining""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1138,2019420,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_cotraining\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 250m_dataset mila_submission ablation no-cotraining""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1139,2043835,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1295,0,"",shellscript,selection_mouse +1140,2044751,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1296,0,"2",shellscript,content +1141,2044775,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1296,0,"",shellscript,selection_command +1142,2046462,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1295,0,"",shellscript,selection_command +1143,2046653,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1368,0,"\n",shellscript,content +1144,2047612,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1369,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,content +1145,2048346,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1476,0,"",shellscript,selection_command +1146,2048495,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1357,0,"",shellscript,selection_command +1147,2048967,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1250,119,"",shellscript,content +1148,2054479,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1196,0,"",shellscript,selection_mouse +1149,2055235,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1196,1,"",shellscript,content +1150,2055520,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1247,0,"",shellscript,selection_command +1151,2055826,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1246,0,"",shellscript,selection_command +1152,2056132,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1246,0," ",shellscript,content +1153,2056133,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1247,0,"",shellscript,selection_keyboard +1154,2056371,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1247,0,"u",shellscript,content +1155,2056372,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1248,0,"",shellscript,selection_keyboard +1156,2056478,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1248,0,"n",shellscript,content +1157,2056479,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1249,0,"",shellscript,selection_keyboard +1158,2056647,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1249,0,"i",shellscript,content +1159,2056648,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1250,0,"",shellscript,selection_keyboard +1160,2056769,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1250,0,"c",shellscript,content +1161,2056770,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1251,0,"",shellscript,selection_keyboard +1162,2056904,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1251,0,"o",shellscript,content +1163,2056905,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1252,0,"",shellscript,selection_keyboard +1164,2056982,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1252,0,"r",shellscript,content +1165,2056983,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1253,0,"",shellscript,selection_keyboard +1166,2057083,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1253,0,"n",shellscript,content +1167,2057083,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1254,0,"",shellscript,selection_keyboard +1168,2057358,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1253,0,"",shellscript,selection_command +1169,2063038,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",2321,0,"",shellscript,selection_mouse +1170,2063200,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",2320,20,"tokenizer_checkpoint",shellscript,selection_mouse +1171,2063734,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",2375,0,"",shellscript,selection_mouse +1172,2063868,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",2373,14,"lam_checkpoint",shellscript,selection_mouse +1173,2076455,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1519,0,"",shellscript,selection_mouse +1174,2077567,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,0,"",shellscript,selection_mouse +1175,2078600,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1512,0,"",shellscript,selection_command +1176,2080123,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1512,118,"",shellscript,content +1177,2080176,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1511,0,"",shellscript,selection_command +1178,2080688,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1512,0,"",shellscript,selection_command +1179,2081262,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1512,0,"T",shellscript,content +1180,2081263,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,0,"",shellscript,selection_keyboard +1181,2081459,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,0,"O",shellscript,content +1182,2081459,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1514,0,"",shellscript,selection_keyboard +1183,2081556,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1514,0,"D",shellscript,content +1184,2081557,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1515,0,"",shellscript,selection_keyboard +1185,2081618,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1515,0,"O",shellscript,content +1186,2081619,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"",shellscript,selection_keyboard +1187,2081918,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"""",shellscript,content +1188,2081919,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1517,0,"",shellscript,selection_keyboard +1189,2082378,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"",shellscript,selection_command +1190,2083758,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1536,0,"",shellscript,selection_command +1191,2084085,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"",shellscript,selection_command +1192,2084257,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1384,0,"",shellscript,selection_command +1193,2084873,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1385,0,"",shellscript,selection_command +1194,2085081,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,0,"",shellscript,selection_command +1195,2085311,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1387,0,"",shellscript,selection_command +1196,2085742,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,0,"",shellscript,selection_command +1197,2085926,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1385,0,"",shellscript,selection_command +1198,2086318,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1384,0,"",shellscript,selection_command +1199,2086665,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"",shellscript,selection_command +1200,2087317,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1515,1,"",shellscript,content +1201,2087446,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1514,1,"",shellscript,content +1202,2087568,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,1,"",shellscript,content +1203,2087941,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1512,1,"",shellscript,content +1204,2088396,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,0,"",shellscript,selection_command +1205,2088726,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1513,0," ",shellscript,content +1206,2088726,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1514,0,"",shellscript,selection_keyboard +1207,2089876,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1514,0,"#",shellscript,content +1208,2089877,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1515,0,"",shellscript,selection_keyboard +1209,2090028,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1515,0," ",shellscript,content +1210,2090030,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"",shellscript,selection_keyboard +1211,2090250,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1516,0,"T",shellscript,content +1212,2090251,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1517,0,"",shellscript,selection_keyboard +1213,2090436,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1517,0,"O",shellscript,content +1214,2090437,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1518,0,"",shellscript,selection_keyboard +1215,2090545,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1518,0,"D",shellscript,content +1216,2090546,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1519,0,"",shellscript,selection_keyboard +1217,2090602,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1519,0,"O",shellscript,content +1218,2090602,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1520,0,"",shellscript,selection_keyboard +1219,2090789,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1520,0," ",shellscript,content +1220,2090790,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1521,0,"",shellscript,selection_keyboard +1221,2093006,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1521,0,"s",shellscript,content +1222,2093007,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1522,0,"",shellscript,selection_keyboard +1223,2093181,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1522,0,"e",shellscript,content +1224,2093182,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1523,0,"",shellscript,selection_keyboard +1225,2093278,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1523,0,"t",shellscript,content +1226,2093279,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1524,0,"",shellscript,selection_keyboard +1227,2093366,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1524,0," ",shellscript,content +1228,2093367,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1525,0,"",shellscript,selection_keyboard +1229,2093942,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1525,0,"l",shellscript,content +1230,2093943,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1526,0,"",shellscript,selection_keyboard +1231,2094117,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1526,0,"a",shellscript,content +1232,2094118,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1527,0,"",shellscript,selection_keyboard +1233,2094212,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1527,0,"m",shellscript,content +1234,2094213,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1528,0,"",shellscript,selection_keyboard +1235,2094334,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1528,0," ",shellscript,content +1236,2094334,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1529,0,"",shellscript,selection_keyboard +1237,2094888,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1529,0,"c",shellscript,content +1238,2094889,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1530,0,"",shellscript,selection_keyboard +1239,2095076,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1530,0,"k",shellscript,content +1240,2095077,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1531,0,"",shellscript,selection_keyboard +1241,2095271,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1531,0,"p",shellscript,content +1242,2095271,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1532,0,"",shellscript,selection_keyboard +1243,2095491,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1532,0,"t",shellscript,content +1244,2095492,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1533,0,"",shellscript,selection_keyboard +1245,2095757,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1533,0,"d",shellscript,content +1246,2095758,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1534,0,"",shellscript,selection_keyboard +1247,2095852,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1534,0,"i",shellscript,content +1248,2095853,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1535,0,"",shellscript,selection_keyboard +1249,2095953,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1535,0,"r",shellscript,content +1250,2095954,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1536,0,"",shellscript,selection_keyboard +1251,2096212,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1535,1,"",shellscript,content +1252,2096365,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1534,1,"",shellscript,content +1253,2096502,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1533,1,"",shellscript,content +1254,2096779,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1533,0,"_",shellscript,content +1255,2096780,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1534,0,"",shellscript,selection_keyboard +1256,2096985,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1534,0,"d",shellscript,content +1257,2096986,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1535,0,"",shellscript,selection_keyboard +1258,2097070,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1535,0,"i",shellscript,content +1259,2097071,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1536,0,"",shellscript,selection_keyboard +1260,2097148,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1536,0,"r",shellscript,content +1261,2097149,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1537,0,"",shellscript,selection_keyboard +1262,2097624,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1536,0,"",shellscript,selection_command +1263,2097748,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1404,0,"",shellscript,selection_command +1264,2098027,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1403,0,"",shellscript,selection_command +1265,2098158,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1402,0,"",shellscript,selection_command +1266,2098680,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1401,0,"",shellscript,selection_command +1267,2098693,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1400,0,"",shellscript,selection_command +1268,2098804,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1399,0,"",shellscript,selection_command +1269,2098959,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1398,0,"",shellscript,selection_command +1270,2098959,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1397,0,"",shellscript,selection_command +1271,2098960,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1396,0,"",shellscript,selection_command +1272,2099014,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1395,0,"",shellscript,selection_command +1273,2099015,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1394,0,"",shellscript,selection_command +1274,2099015,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1393,0,"",shellscript,selection_command +1275,2099015,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1392,0,"",shellscript,selection_command +1276,2099016,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1391,0,"",shellscript,selection_command +1277,2099056,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1390,0,"",shellscript,selection_command +1278,2099103,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1389,0,"",shellscript,selection_command +1279,2099263,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1388,0,"",shellscript,selection_command +1280,2099408,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1387,0,"",shellscript,selection_command +1281,2099591,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,0,"",shellscript,selection_command +1282,2100601,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,111,"",shellscript,content +1283,2100606,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1385,0,"",shellscript,selection_command +1284,2100941,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,0,"",shellscript,selection_command +1285,2101287,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1386,0,"""",shellscript,content +1286,2101288,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1387,0,"",shellscript,selection_keyboard +1287,2103326,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1387,0," ",shellscript,content +1288,2103327,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1388,0,"",shellscript,selection_keyboard +1289,2104171,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1388,0,"#",shellscript,content +1290,2104172,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1389,0,"",shellscript,selection_keyboard +1291,2104277,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1389,0," ",shellscript,content +1292,2104277,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1390,0,"",shellscript,selection_keyboard +1293,2104583,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1390,0,"T",shellscript,content +1294,2104584,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1391,0,"",shellscript,selection_keyboard +1295,2104771,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1391,0,"O",shellscript,content +1296,2104772,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1392,0,"",shellscript,selection_keyboard +1297,2104895,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1392,0,"D",shellscript,content +1298,2104896,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1393,0,"",shellscript,selection_keyboard +1299,2104940,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1393,0,"O",shellscript,content +1300,2104940,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1394,0,"",shellscript,selection_keyboard +1301,2105036,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1394,0," ",shellscript,content +1302,2105036,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1395,0,"",shellscript,selection_keyboard +1303,2105327,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1395,0,"s",shellscript,content +1304,2105328,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1396,0,"",shellscript,selection_keyboard +1305,2105530,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1396,0,"e",shellscript,content +1306,2105531,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1397,0,"",shellscript,selection_keyboard +1307,2105602,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1397,0,"t",shellscript,content +1308,2105602,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1398,0,"",shellscript,selection_keyboard +1309,2105703,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1398,0," ",shellscript,content +1310,2105704,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1399,0,"",shellscript,selection_keyboard +1311,2106283,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1399,0,"t",shellscript,content +1312,2106284,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1400,0,"",shellscript,selection_keyboard +1313,2106336,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1400,0,"o",shellscript,content +1314,2106336,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1401,0,"",shellscript,selection_keyboard +1315,2106423,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1401,0,"k",shellscript,content +1316,2106424,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1402,0,"",shellscript,selection_keyboard +1317,2106513,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1402,0,"e",shellscript,content +1318,2106514,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1403,0,"",shellscript,selection_keyboard +1319,2106638,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1403,0,"n",shellscript,content +1320,2106638,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1404,0,"",shellscript,selection_keyboard +1321,2107113,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1404,0,"i",shellscript,content +1322,2107114,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1405,0,"",shellscript,selection_keyboard +1323,2107315,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1405,0,"t",shellscript,content +1324,2107316,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1406,0,"",shellscript,selection_keyboard +1325,2107410,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1406,0,"e",shellscript,content +1326,2107411,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1407,0,"",shellscript,selection_keyboard +1327,2107464,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1407,0,"r",shellscript,content +1328,2107465,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1408,0,"",shellscript,selection_keyboard +1329,2107560,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1408,0," ",shellscript,content +1330,2107561,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1409,0,"",shellscript,selection_keyboard +1331,2107931,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1409,0,"c",shellscript,content +1332,2107931,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1410,0,"",shellscript,selection_keyboard +1333,2108115,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1410,0,"j",shellscript,content +1334,2108116,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1411,0,"",shellscript,selection_keyboard +1335,2108345,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1411,0,"p",shellscript,content +1336,2108346,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1412,0,"",shellscript,selection_keyboard +1337,2108517,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1412,0,"t",shellscript,content +1338,2108518,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1413,0,"",shellscript,selection_keyboard +1339,2108760,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1412,1,"",shellscript,content +1340,2108864,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1411,1,"",shellscript,content +1341,2108990,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1410,1,"",shellscript,content +1342,2109157,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1410,0,"k",shellscript,content +1343,2109158,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1411,0,"",shellscript,selection_keyboard +1344,2109406,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1411,0,"p",shellscript,content +1345,2109407,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1412,0,"",shellscript,selection_keyboard +1346,2109671,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1412,0,"t",shellscript,content +1347,2109672,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1413,0,"",shellscript,selection_keyboard +1348,2109875,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1413,0," ",shellscript,content +1349,2109876,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1414,0,"",shellscript,selection_keyboard +1350,2110004,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1414,0,"d",shellscript,content +1351,2110004,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1415,0,"",shellscript,selection_keyboard +1352,2110072,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1415,0,"i",shellscript,content +1353,2110072,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1416,0,"",shellscript,selection_keyboard +1354,2110227,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1416,0,"r",shellscript,content +1355,2110228,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1417,0,"",shellscript,selection_keyboard +1356,2110894,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1416,0,"",shellscript,selection_command +1357,2113464,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +1358,2114759,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1369,0,"",shellscript,selection_mouse +1359,2115799,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1491,0,"\ntokenizer_ckpt_dir="""" # TODO set tokeniter ckpt dir",shellscript,content +1360,2115841,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1492,0,"",shellscript,selection_command +1361,2116059,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1360,0,"",shellscript,selection_command +1362,2116731,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1360,132,"",shellscript,content +1363,2119441,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +1364,2121416,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1370,0,"",shellscript,selection_mouse +1365,2122468,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1494,0,"\ntokenizer_ckpt_dir="""" # TODO set tokeniter ckpt dir",shellscript,content +1366,2122512,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1495,0,"",shellscript,selection_command +1367,2122906,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1363,0,"",shellscript,selection_command +1368,2123268,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1363,132,"",shellscript,content +1369,2125279,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +1370,2127248,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1365,0,"",shellscript,selection_mouse +1371,2128032,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1483,0,"\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""",shellscript,content +1372,2128076,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1484,0,"",shellscript,selection_command +1373,2128321,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1352,0,"",shellscript,selection_command +1374,2130229,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1352,132,"",shellscript,content +1375,2130815,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1353,0,"",shellscript,selection_command +1376,2131357,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1354,0,"",shellscript,selection_command +1377,2131358,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1355,0,"",shellscript,selection_command +1378,2131401,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1356,0,"",shellscript,selection_command +1379,2131427,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1357,0,"",shellscript,selection_command +1380,2131444,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1358,0,"",shellscript,selection_command +1381,2131486,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1359,0,"",shellscript,selection_command +1382,2131512,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1360,0,"",shellscript,selection_command +1383,2131527,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1361,0,"",shellscript,selection_command +1384,2131606,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1362,0,"",shellscript,selection_command +1385,2131607,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1363,0,"",shellscript,selection_command +1386,2131621,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1364,0,"",shellscript,selection_command +1387,2131653,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1365,0,"",shellscript,selection_command +1388,2131698,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1366,0,"",shellscript,selection_command +1389,2131709,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1367,0,"",shellscript,selection_command +1390,2131790,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1368,0,"",shellscript,selection_command +1391,2131822,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1369,0,"",shellscript,selection_command +1392,2131822,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1370,0,"",shellscript,selection_command +1393,2131833,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1371,0,"",shellscript,selection_command +1394,2132155,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1372,0,"",shellscript,selection_command +1395,2135859,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1372,111,"",shellscript,content +1396,2135915,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1371,0,"",shellscript,selection_command +1397,2136271,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1372,0,"",shellscript,selection_command +1398,2137070,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1372,0,"""",shellscript,content +1399,2137071,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1373,0,"",shellscript,selection_keyboard +1400,2137197,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1373,0," ",shellscript,content +1401,2137197,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1374,0,"",shellscript,selection_keyboard +1402,2137496,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1374,0,"T",shellscript,content +1403,2137497,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1375,0,"",shellscript,selection_keyboard +1404,2137743,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1375,0,"O",shellscript,content +1405,2137744,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1376,0,"",shellscript,selection_keyboard +1406,2137840,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1376,0,"D",shellscript,content +1407,2137840,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1377,0,"",shellscript,selection_keyboard +1408,2137888,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1377,0,"O",shellscript,content +1409,2137889,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1378,0,"",shellscript,selection_keyboard +1410,2138484,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1378,0," ",shellscript,content +1411,2138484,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1379,0,"",shellscript,selection_keyboard +1412,2138630,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1379,0,"s",shellscript,content +1413,2138630,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1380,0,"",shellscript,selection_keyboard +1414,2138818,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1380,0,"e",shellscript,content +1415,2138819,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1381,0,"",shellscript,selection_keyboard +1416,2138844,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1381,0,"t",shellscript,content +1417,2138845,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1382,0,"",shellscript,selection_keyboard +1418,2138951,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1382,0," ",shellscript,content +1419,2138952,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1383,0,"",shellscript,selection_keyboard +1420,2139127,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1383,0,"t",shellscript,content +1421,2139128,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1384,0,"",shellscript,selection_keyboard +1422,2139164,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1384,0,"o",shellscript,content +1423,2139165,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1385,0,"",shellscript,selection_keyboard +1424,2139336,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1385,0,"k",shellscript,content +1425,2139336,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1386,0,"",shellscript,selection_keyboard +1426,2139443,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1386,0,"e",shellscript,content +1427,2139443,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1387,0,"",shellscript,selection_keyboard +1428,2139493,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1387,0,"n",shellscript,content +1429,2139493,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1388,0,"",shellscript,selection_keyboard +1430,2139607,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1388,0,"i",shellscript,content +1431,2139607,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1389,0,"",shellscript,selection_keyboard +1432,2139779,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1389,0,"z",shellscript,content +1433,2139779,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1390,0,"",shellscript,selection_keyboard +1434,2139874,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1390,0,"e",shellscript,content +1435,2139875,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1391,0,"",shellscript,selection_keyboard +1436,2139959,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1391,0,"r",shellscript,content +1437,2139964,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1392,0,"",shellscript,selection_keyboard +1438,2141305,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1374,0,"#",shellscript,content +1439,2141306,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1375,0,"",shellscript,selection_keyboard +1440,2141441,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1375,0," ",shellscript,content +1441,2141442,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1376,0,"",shellscript,selection_keyboard +1442,2142182,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1375,0,"",shellscript,selection_command +1443,2142613,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1374,0,"",shellscript,selection_command +1444,2146367,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +1445,2147402,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1372,0,"",shellscript,selection_mouse +1446,2147497,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1363,18,"tokenizer_ckpt_dir",shellscript,selection_mouse +1447,2147650,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1363,52,"tokenizer_ckpt_dir="""" # TODO set tokeniter ckpt dir\n",shellscript,selection_mouse +1448,2152236,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +1449,2153158,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1336,0,"",shellscript,selection_mouse +1450,2154445,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1337,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""",shellscript,content +1451,2154494,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1447,0,"",shellscript,selection_command +1452,2155331,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1337,111,"",shellscript,content +1453,2155337,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1336,0,"",shellscript,selection_command +1454,2156042,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1460,0,"\n",shellscript,content +1455,2156387,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1461,0,"tokenizer_ckpt_dir="""" # TODO set tokeniter ckpt dir\n",shellscript,content +1456,2157107,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1461,0,"",shellscript,selection_command +1457,2157257,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1329,0,"",shellscript,selection_command +1458,2157648,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1329,132,"",shellscript,content +1459,2157733,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1381,0,"",shellscript,selection_command +1460,2158049,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1381,1,"",shellscript,content +1461,2163478,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1777,0,"",shellscript,selection_mouse +1462,2163534,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",1776,0,"",shellscript,selection_command +1463,2165818," checklist.md",0,0,"",markdown,tab +1464,2165820," checklist.md",101,0,"",markdown,selection_mouse +1465,2166703," checklist.md",101,0,"\n",markdown,content +1466,2167834," checklist.md",102,0,"-",markdown,content +1467,2167835," checklist.md",103,0,"",markdown,selection_keyboard +1468,2168008," checklist.md",103,0," ",markdown,content +1469,2168009," checklist.md",104,0,"",markdown,selection_keyboard +1470,2168184," checklist.md",104,0,"t",markdown,content +1471,2168185," checklist.md",105,0,"",markdown,selection_keyboard +1472,2168223," checklist.md",105,0,"o",markdown,content +1473,2168224," checklist.md",106,0,"",markdown,selection_keyboard +1474,2168437," checklist.md",106,0,"d",markdown,content +1475,2168438," checklist.md",107,0,"",markdown,selection_keyboard +1476,2168493," checklist.md",107,0,"o",markdown,content +1477,2168494," checklist.md",108,0,"",markdown,selection_keyboard +1478,2168647," checklist.md",108,0," ",markdown,content +1479,2168648," checklist.md",109,0,"",markdown,selection_keyboard +1480,2168895," checklist.md",109,0,"t",markdown,content +1481,2168896," checklist.md",110,0,"",markdown,selection_keyboard +1482,2168949," checklist.md",110,0,"o",markdown,content +1483,2168950," checklist.md",111,0,"",markdown,selection_keyboard +1484,2169046," checklist.md",111,0,"k",markdown,content +1485,2169047," checklist.md",112,0,"",markdown,selection_keyboard +1486,2169217," checklist.md",112,0,"e",markdown,content +1487,2169217," checklist.md",113,0,"",markdown,selection_keyboard +1488,2169305," checklist.md",113,0,"n",markdown,content +1489,2169306," checklist.md",114,0,"",markdown,selection_keyboard +1490,2169466," checklist.md",114,0,"i",markdown,content +1491,2169467," checklist.md",115,0,"",markdown,selection_keyboard +1492,2169608," checklist.md",115,0,"z",markdown,content +1493,2169609," checklist.md",116,0,"",markdown,selection_keyboard +1494,2169748," checklist.md",116,0,"e",markdown,content +1495,2169749," checklist.md",117,0,"",markdown,selection_keyboard +1496,2169776," checklist.md",117,0,"r",markdown,content +1497,2169776," checklist.md",118,0,"",markdown,selection_keyboard +1498,2169867," checklist.md",118,0," ",markdown,content +1499,2169868," checklist.md",119,0,"",markdown,selection_keyboard +1500,2170051," checklist.md",119,0,"d",markdown,content +1501,2170051," checklist.md",120,0,"",markdown,selection_keyboard +1502,2170129," checklist.md",120,0,"i",markdown,content +1503,2170130," checklist.md",121,0,"",markdown,selection_keyboard +1504,2170267," checklist.md",121,0,"r",markdown,content +1505,2170268," checklist.md",122,0,"",markdown,selection_keyboard +1506,2177348,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +1507,2180241," checklist.md",0,0,"",markdown,tab +1508,2187043,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +1509,2193009,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",2360,0,"",shellscript,selection_mouse +1510,2241432,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +1511,2245659,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +1512,2251360,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +1513,2254753,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +1514,2257479,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +1515,2259616,"slurm/jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +1516,2333927,"TERMINAL",0,0,"cd slurm/",,terminal_command +1517,2337887,"TERMINAL",0,0,"git status",,terminal_command +1518,2337937,"TERMINAL",0,0,"]633;C",,terminal_output +1519,2339217,"TERMINAL",0,0,"Refresh index: 100% (883/883)\rRefresh index: 100% (883/883), done.\r\n",,terminal_output +1520,2339399,"TERMINAL",0,0,"On branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tjobs/franz/berlin/coinrun/mila_submission/50M_dataset/\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n",,terminal_output +1521,2339400,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1522,2344766,"TERMINAL",0,0,"git add jobs/franz/berlin/coinrun/mila_submission/50M_dataset/",,terminal_command +1523,2344824,"TERMINAL",0,0,"]633;C",,terminal_output +1524,2345133,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1525,2368913,"TERMINAL",0,0,"git commit -m ""added base and arch ablations for 50m dataset""",,terminal_command +1526,2368967,"TERMINAL",0,0,"]633;C",,terminal_output +1527,2369979,"TERMINAL",0,0,"[main fdfba38] added base and arch ablations for 50m dataset\r\n 7 files changed, 535 insertions(+)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_causal.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_dynamics_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_lam_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/50M_dataset/coinrun_tokenizer_base.sh\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1528,2371537,"TERMINAL",0,0,"git push",,terminal_command +1529,2371555,"TERMINAL",0,0,"]633;C",,terminal_output +1530,2372889,"TERMINAL",0,0,"Enumerating objects: 22, done.\r\nCounting objects: 4% (1/22)\rCounting objects: 9% (2/22)\rCounting objects: 13% (3/22)\rCounting objects: 18% (4/22)\rCounting objects: 22% (5/22)\rCounting objects: 27% (6/22)\rCounting objects: 31% (7/22)\rCounting objects: 36% (8/22)\rCounting objects: 40% (9/22)\rCounting objects: 45% (10/22)\rCounting objects: 50% (11/22)\rCounting objects: 54% (12/22)\rCounting objects: 59% (13/22)\rCounting objects: 63% (14/22)\rCounting objects: 68% (15/22)\rCounting objects: 72% (16/22)\rCounting objects: 77% (17/22)\rCounting objects: 81% (18/22)\rCounting objects: 86% (19/22)\rCounting objects: 90% (20/22)\rCounting objects: 95% (21/22)\rCounting objects: 100% (22/22)\rCounting objects: 100% (22/22), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 6% (1/16)\rCompressing objects: 12% (2/16)\rCompressing objects: 18% (3/16)\rCompressing objects: 25% (4/16)\rCompressing objects: 31% (5/16)\rCompressing objects: 37% (6/16)\rCompressing objects: 43% (7/16)\rCompressing objects: 50% (8/16)\rCompressing objects: 56% (9/16)\rCompressing objects: 62% (10/16)\rCompressing objects: 68% (11/16)\rCompressing objects: 75% (12/16)\rCompressing objects: 81% (13/16)\rCompressing objects: 87% (14/16)\rCompressing objects: 93% (15/16)\rCompressing objects: 100% (16/16)\rCompressing objects: 100% (16/16), done.\r\nWriting objects: 6% (1/16)\rWriting objects: 12% (2/16)\rWriting objects: 18% (3/16)\rWriting objects: 25% (4/16)\rWriting objects: 31% (5/16)\rWriting objects: 37% (6/16)\rWriting objects: 43% (7/16)\rWriting objects: 50% (8/16)\rWriting objects: 56% (9/16)\rWriting objects: 68% (11/16)\rWriting objects: 75% (12/16)\rWriting objects: 81% (13/16)\rWriting objects: 87% (14/16)\rWriting objects: 100% (16/16)\rWriting objects: 100% (16/16), 2.63 KiB | 672.00 KiB/s, done.\r\nTotal 16 (delta 8), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1531,2373188,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/8)\rremote: Resolving deltas: 12% (1/8)\rremote: Resolving deltas: 25% (2/8)\rremote: Resolving deltas: 37% (3/8)\rremote: Resolving deltas: 50% (4/8)\rremote: Resolving deltas: 62% (5/8)\rremote: Resolving deltas: 75% (6/8)\rremote: Resolving deltas: 87% (7/8)\rremote: Resolving deltas: 100% (8/8)\rremote: Resolving deltas: 100% (8/8), completed with 2 local objects.\r\nTo github.com:p-doom/slurm.git\r\n f7a73c2..fdfba38 main -> main\r\n",,terminal_output +1532,2373230,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-72520711-a485-48f6-9ba4-58828d05d5d11752670146212-2025_07_16-14.49.27.572/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-72520711-a485-48f6-9ba4-58828d05d5d11752670146212-2025_07_16-14.49.27.572/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..5e3f078b26fc00b5ed4d701ba7d7f215144af908 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-72520711-a485-48f6-9ba4-58828d05d5d11752670146212-2025_07_16-14.49.27.572/source.csv @@ -0,0 +1,6560 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,373,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:49:27 PM [info] Activating crowd-code\n2:49:27 PM [info] Recording started\n2:49:27 PM [info] Initializing git provider using file system watchers...\n2:49:27 PM [info] Git repository found\n2:49:27 PM [info] Git provider initialized successfully\n",Log,tab +3,566,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"2:49:27 PM [info] Initial git state: [object Object]\n",Log,content +4,3273,"TERMINAL",0,0,"smi",,terminal_command +5,3324,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:30 smi;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h(B[?7hEvery 1.0s: nvidia-smihkn1991.localdomain: Wed Jul 16 14:49:30 2025sh: line 1: nvidia-smi: command not found",,terminal_output +6,3417,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +7,3467,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:30 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;05c9abe3-7ac4-4d27-8fc5-bcd2c02771e9]633;C",,terminal_output +8,3498,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +9,3908,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +10,5445,"TERMINAL",0,0,"suque",,terminal_command +11,5454,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:32 suque;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;Cbash: suque: command not found...\r\n",,terminal_output +12,6355,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;130",,terminal_output +13,7523,"TERMINAL",0,0,"queue",,terminal_command +14,7562,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:35 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C",,terminal_output +15,7627,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Jul 16 14:49:35 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 14:49:13\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 20:10:55\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 20:10:55\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 20:10:55\t 2 hkn[0604,0608]3350302 accelerat interact tum_cte0 R40:18\t 2 hkn[0509,0511]3345116 accelerat train_dy tum_cte0 R 1-20:54:40\t 2 hkn[0503,0506]",,terminal_output +16,8646,"TERMINAL",0,0,"6466691",,terminal_output +17,9741,"TERMINAL",0,0,"75777202",,terminal_output +18,10525,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +19,12435,"TERMINAL",0,0,"scancel 3350302",,terminal_command +20,12524,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:39 scancel 3350302;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C",,terminal_output +21,12597,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +22,13911,"TERMINAL",0,0,"queue",,terminal_command +23,14081,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:41 queue;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Jul 16 14:49:41 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3350302 accelerat interact tum_cte0 CG40:22\t 2 hkn[0509,0511]3348592 accelerat train_dy tum_cte0 R 14:49:19\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 20:11:01\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 20:11:01\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 20:11:01\t 2 hkn[0604,0608]3345116 accelerat train_dy tum_cte0 R 1-20:54:46\t 2 hkn[0503,0506]",,terminal_output +24,15017,"TERMINAL",0,0,"2202227",,terminal_output +25,16058,"TERMINAL",0,0,"313338",,terminal_output +26,16261,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +27,19347,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5",,terminal_command +28,19397,"TERMINAL",0,0,"]633;E;2025-07-16 14:49:46 salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5;97c203bb-2de3-4bf0-b19e-fa122ab0b933]633;Csalloc: Granted job allocation 3350418\r\n",,terminal_output +29,19550,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +30,46568,"TERMINAL",0,0,"salloc: Nodes hkn[0710-0711] are ready for job\r\n",,terminal_output +31,47713,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h[tum_cte0515@hkn0710 jafar]$ ",,terminal_output +32,48343,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +33,48481,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +34,48623,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +35,49134,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +36,49239,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +37,49300,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +38,49442,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +39,49647,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +40,49851,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +41,50125,"TERMINAL",0,0,"nv/",,terminal_output +42,50625,"TERMINAL",0,0,"",,terminal_output +43,50727,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +44,51254,"TERMINAL",0,0,"in/",,terminal_output +45,51483,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +46,51612,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +47,51788,"TERMINAL",0,0,"tivate",,terminal_output +48,52057,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ [?25h",,terminal_output +49,52282,"TERMINAL",0,0,"[?25lls[?25h[?25ls[?25h",,terminal_output +50,52468,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +51,52967,"TERMINAL",0,0,"data generation_1752513384.5762262.gif input_pipeline scripts_horeka\r\ndebug generation_1752513923.7489405.gif LICENSE slurm\r\nframe-knoms.png generation_1752579157.0310874.gif local-logs slurm-3309772.out\r\nframe.png generation_1752579372.4300406.gif logs tests\r\nframes generation_1752579794.2949483.gif models train_dynamics.py\r\ngenerate_dataset.py generation_1752579931.2817705.gif overfit_dir train_lam.py\r\ngeneration_1752489078.1856709.gif generation_1752580458.8344245.gif __pycache__ train_tokenizer_bak.py\r\ngeneration_1752489445.163335.gif generation_1752580934.2848504.gif README.md train_tokenizer.py\r\ngeneration_1752501077.2698705.gif generation_1752581091.8428152.gif read_tf_record.py utils\r\ngeneration_1752502813.7130806.gif generation_1752581503.520897.gif requirements-franz.txt wandb\r\ngeneration_1752503689.8298378.gif generation_1752581641.3452077.gif requirements.txt weekend-job-requeuer.sh\r\ngeneration_1752504934.1629438.gif generation_1752588193.6372015.gif sample.py weekend-job-starter.sh\r\ngeneration_1752505829.3945305.gif genie.py sample.py_bak\r\ngeneration_1752513109.1235461.gif gifs scripts_cremers\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +52,54376,"TERMINAL",0,0,"[?25lqu[?25h[?25lu[?25h",,terminal_output +53,54463,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +54,54654,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +55,54761,"TERMINAL",0,0,"[?25l[?2004l\r[?25h[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 14:50:22 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3350302 accelerat interact tum_cte0 CG40:22\t 2 hkn[0509,0511]3348592 accelerat train_dy tum_cte0 R 14:50:00\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 20:11:42\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 20:11:42\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 20:11:42\t 2 hkn[0604,0608]3350418 accelerat interact tum_cte0 R\t0:36\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-20:55:27\t 2 hkn[0503,0506]",,terminal_output +56,55598,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +57,57917,"TERMINAL",0,0,"",,terminal_output +58,58329,"TERMINAL",0,0,"queue",,terminal_output +59,58545,"TERMINAL",0,0,"ls",,terminal_output +60,58854,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +61,59254,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_output +62,59820,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5\r\n\r\r\n\r",,terminal_output +63,60731,"TERMINAL",0,0,"\ridling",,terminal_output +64,60997,"TERMINAL",0,0,"queue",,terminal_output +65,61755,"TERMINAL",0,0,"uee",,terminal_output +66,62140,"TERMINAL",0,0,"python",,terminal_output +67,62427,"TERMINAL",0,0,"git branch",,terminal_output +68,62784,"TERMINAL",0,0,"ls ""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4""",,terminal_output +69,63392,"TERMINAL",0,0,"dynamics-cotraining-modelsize-scaling/train_dynamics_modelsize_scaling_36M_2_node""",,terminal_output +70,64337,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +71,64731,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +72,65074,"TERMINAL",0,0,"s': ls",,terminal_output +73,65232,"TERMINAL",0,0,"[?25ls\rfailed reverse-i-search)`sj': ls[?25h",,terminal_output +74,66147,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +75,66544,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +76,66861,"TERMINAL",0,0,"s': ls",,terminal_output +77,66936,"TERMINAL",0,0,"[?25lsh': /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt[?25h",,terminal_output +78,67067,"TERMINAL",0,0," ': sh slurm/jobs/mihir/horeka/modelsize_scaling/runner.sh\r\n\r\r\n\r",,terminal_output +79,74334,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""debug-mihir""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,tab +80,78977,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +81,80053,"TERMINAL",0,0,"s",,terminal_output +82,80159,"TERMINAL",0,0,"[?25lh[?25h[?25l [?25h",,terminal_output +83,80514,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +84,85746,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\nclass STBlock2(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\nclass CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n\nclass CausalTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # Input projection and normalization\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n # Causal transformer blocks\n for _ in range(self.num_blocks):\n x = STBlock2(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # x = CausalTransformerBlock(\n # model_dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # Output projection\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +85,85749,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",969,0,"",python,selection_mouse +86,85846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",968,2,"pe",python,selection_mouse +87,96688,"TERMINAL",0,0,"[?25l\rslurm/jobs/mihir/horeka/yolo-runs/tester.sh\r\n[?2004l\r[?25h\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +88,96921,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +89,97086,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +90,98262,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"",python,tab +91,98265,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",3295,0,"",python,selection_mouse +92,98335,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",3294,0,"",python,selection_command +93,112608,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",4954,0,"",python,selection_mouse +94,112774,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",4951,8,"STBlock2",python,selection_mouse +95,121555,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +96,127031,"TERMINAL",0,0,"2025-07-16 14:51:34.528655: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.528652: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.529779: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.529781: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.529995: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.530411: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.530921: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:34.533961: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +97,141848,"TERMINAL",0,0,"2025-07-16 14:51:49.282586: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:49.342411: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +98,142001,"TERMINAL",0,0,"2025-07-16 14:51:49.501717: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:49.506690: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +99,142066,"TERMINAL",0,0,"2025-07-16 14:51:49.528058: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:49.557389: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:49.557502: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +100,142122,"TERMINAL",0,0,"2025-07-16 14:51:49.594293: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +101,145424,"TERMINAL",0,0,"2025-07-16 14:51:52.865963: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:52.914224: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +102,145623,"TERMINAL",0,0,"2025-07-16 14:51:53.090840: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:53.114928: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:53.114932: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:53.115416: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:51:53.125014: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +103,145686,"TERMINAL",0,0,"2025-07-16 14:51:53.186282: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +104,148072,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +105,148802,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_145155-votpmez5\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/votpmez5\r\n",,terminal_output +106,185566,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3350418.0 tasks 0-7: running\r\n",,terminal_output +107,185709,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\n",,terminal_output +108,185765,"TERMINAL",0,0,"srun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3350418.0 ON hkn0710 CANCELLED AT 2025-07-16T14:52:33 ***\r\n",,terminal_output +109,185834,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\nsrun: job abort in progress\r\n",,terminal_output +110,186015,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\n",,terminal_output +111,186287,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\n",,terminal_output +112,186465,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\n",,terminal_output +113,186693,"TERMINAL",0,0,"^C",,terminal_output +114,186944,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.0\r\nsrun: job abort in progress\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +115,187172,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +116,187907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"",python,tab +117,187908,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",9429,0,"",python,selection_mouse +118,187999,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",9428,0,"",python,selection_command +119,190607,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +120,191949,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"",python,tab +121,193819,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",9116,0,"",python,selection_mouse +122,195258,"genie.py",0,0,"",python,tab +123,195779,"utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\nclass STBlock2(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\nclass CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n\nclass CausalTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # Input projection and normalization\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n # Causal transformer blocks\n for _ in range(self.num_blocks):\n x = STBlock2(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # x = CausalTransformerBlock(\n # model_dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # Output projection\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +124,197480,"utils/nn.py",2805,0,"",python,selection_mouse +125,197499,"utils/nn.py",2804,0,"",python,selection_command +126,198931,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer, CausalTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = CausalTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=None)\n",python,tab +127,203481,"models/dynamics.py",1310,0,"",python,selection_mouse +128,203630,"models/dynamics.py",1308,9,"mask_prob",python,selection_mouse +129,204173,"models/dynamics.py",1506,0,"",python,selection_mouse +130,204359,"models/dynamics.py",1504,9,"vid_embed",python,selection_mouse +131,204887,"models/dynamics.py",1463,0,"",python,selection_mouse +132,205058,"models/dynamics.py",1460,4,"mask",python,selection_mouse +133,205287,"models/dynamics.py",1460,6,"mask =",python,selection_mouse +134,205289,"models/dynamics.py",1460,11,"mask = mask",python,selection_mouse +135,205289,"models/dynamics.py",1460,14,"mask = mask.at",python,selection_mouse +136,205336,"models/dynamics.py",1460,17,"mask = mask.at[:,",python,selection_mouse +137,205337,"models/dynamics.py",1460,20,"mask = mask.at[:, 0]",python,selection_mouse +138,205338,"models/dynamics.py",1460,24,"mask = mask.at[:, 0].set",python,selection_mouse +139,205367,"models/dynamics.py",1460,30,"mask = mask.at[:, 0].set(False",python,selection_mouse +140,205408,"models/dynamics.py",1460,31,"mask = mask.at[:, 0].set(False)",python,selection_mouse +141,205721,"models/dynamics.py",1491,0,"",python,selection_mouse +142,205752,"models/dynamics.py",1490,0,"",python,selection_command +143,205872,"models/dynamics.py",1491,0,"",python,selection_mouse +144,205877,"models/dynamics.py",1490,0,"",python,selection_command +145,206052,"models/dynamics.py",1490,1,")",python,selection_mouse +146,206052,"models/dynamics.py",1485,5,"False",python,selection_mouse +147,206112,"models/dynamics.py",1491,0,"",python,selection_command +148,206113,"models/dynamics.py",1484,7,"(False)",python,selection_mouse +149,206114,"models/dynamics.py",1480,11,".set(False)",python,selection_mouse +150,206114,"models/dynamics.py",1491,34,"\n vid_embed = jnp.where",python,selection_mouse +151,206193,"models/dynamics.py",1491,28,"\n vid_embed = jnp",python,selection_mouse +152,206194,"models/dynamics.py",1491,25,"\n vid_embed = ",python,selection_mouse +153,206195,"models/dynamics.py",1491,24,"\n vid_embed =",python,selection_mouse +154,206195,"models/dynamics.py",1491,22,"\n vid_embed",python,selection_mouse +155,206388,"models/dynamics.py",1491,13,"\n ",python,selection_mouse +156,206442,"models/dynamics.py",1491,12,"\n ",python,selection_mouse +157,206525,"models/dynamics.py",1459,32," mask = mask.at[:, 0].set(False)",python,selection_mouse +158,206997,"models/dynamics.py",1459,0,"",python,selection_mouse +159,206998,"models/dynamics.py",1448,12," ",python,selection_mouse +160,207307,"models/dynamics.py",1448,16," mask",python,selection_mouse +161,207308,"models/dynamics.py",1448,18," mask =",python,selection_mouse +162,207308,"models/dynamics.py",1448,23," mask = mask",python,selection_mouse +163,207309,"models/dynamics.py",1448,26," mask = mask.at",python,selection_mouse +164,207309,"models/dynamics.py",1448,28," mask = mask.at[:",python,selection_mouse +165,207309,"models/dynamics.py",1448,31," mask = mask.at[:, 0",python,selection_mouse +166,207310,"models/dynamics.py",1448,33," mask = mask.at[:, 0].",python,selection_mouse +167,207310,"models/dynamics.py",1448,36," mask = mask.at[:, 0].set",python,selection_mouse +168,207348,"models/dynamics.py",1448,37," mask = mask.at[:, 0].set(",python,selection_mouse +169,207348,"models/dynamics.py",1448,42," mask = mask.at[:, 0].set(False",python,selection_mouse +170,207413,"models/dynamics.py",1448,43," mask = mask.at[:, 0].set(False)",python,selection_mouse +171,207685,"models/dynamics.py",1491,0,"",python,selection_mouse +172,207746,"models/dynamics.py",1490,0,"",python,selection_command +173,207844,"models/dynamics.py",1491,0,"",python,selection_mouse +174,207854,"models/dynamics.py",1490,0,"",python,selection_command +175,207985,"models/dynamics.py",1490,1,")",python,selection_mouse +176,207988,"models/dynamics.py",1491,0,"",python,selection_command +177,208186,"models/dynamics.py",1490,1,")",python,selection_mouse +178,208186,"models/dynamics.py",1484,7,"(False)",python,selection_mouse +179,208187,"models/dynamics.py",1480,11,".set(False)",python,selection_mouse +180,208187,"models/dynamics.py",1478,13,"0].set(False)",python,selection_mouse +181,208187,"models/dynamics.py",1475,16,":, 0].set(False)",python,selection_mouse +182,208188,"models/dynamics.py",1474,17,"[:, 0].set(False)",python,selection_mouse +183,208188,"models/dynamics.py",1472,19,"at[:, 0].set(False)",python,selection_mouse +184,208188,"models/dynamics.py",1471,20,".at[:, 0].set(False)",python,selection_mouse +185,208189,"models/dynamics.py",1467,24,"mask.at[:, 0].set(False)",python,selection_mouse +186,208229,"models/dynamics.py",1491,22,"\n vid_embed",python,selection_mouse +187,208388,"models/dynamics.py",1491,13,"\n ",python,selection_mouse +188,212091,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +189,213155,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +190,213285,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +191,213400,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +192,215329,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +193,215386,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +194,215441,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +195,215913,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +196,220372,"TERMINAL",0,0,"2025-07-16 14:53:07.801429: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.802284: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.818209: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.818498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.821408: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.829965: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +197,220485,"TERMINAL",0,0,"2025-07-16 14:53:07.952055: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:07.985493: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +198,235008,"TERMINAL",0,0,"2025-07-16 14:53:22.481573: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +199,235062,"TERMINAL",0,0,"2025-07-16 14:53:22.560840: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +200,235122,"TERMINAL",0,0,"2025-07-16 14:53:22.614661: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +201,235181,"TERMINAL",0,0,"2025-07-16 14:53:22.675211: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:22.676621: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +202,235241,"TERMINAL",0,0,"2025-07-16 14:53:22.740282: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +203,235295,"TERMINAL",0,0,"2025-07-16 14:53:22.769147: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +204,235353,"TERMINAL",0,0,"2025-07-16 14:53:22.820516: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +205,238538,"TERMINAL",0,0,"2025-07-16 14:53:26.031175: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +206,238688,"TERMINAL",0,0,"2025-07-16 14:53:26.177311: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:26.190482: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +207,238784,"TERMINAL",0,0,"2025-07-16 14:53:26.242316: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +208,238892,"TERMINAL",0,0,"2025-07-16 14:53:26.359555: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 14:53:26.394383: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +209,239048,"TERMINAL",0,0,"2025-07-16 14:53:26.547963: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +210,239257,"TERMINAL",0,0,"2025-07-16 14:53:26.756316: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +211,241522,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +212,241993,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_145328-24srqm6z\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/24srqm6z\r\n",,terminal_output +213,326456,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +214,328782,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +215,330128,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +216,333392,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +217,333475,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\n",,terminal_output +218,333569,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\n",,terminal_output +219,333624,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\n",,terminal_output +220,333707,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\n",,terminal_output +221,338542,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +222,338599,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +223,343858,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +224,343915,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +225,343970,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +226,344662,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +227,344718,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +228,344823,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +229,345015,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +230,345069,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +231,345179,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +232,345298,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +233,345360,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +234,345413,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +235,345470,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +236,345625,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +237,345917,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +238,346121,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +239,346293,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +240,346400,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +241,346462,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +242,346627,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +243,347068,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +244,357579,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +245,359327,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +246,359494,"TERMINAL",0,0,"Filtering out episode with length 14, which is shorter than the requested sequence length 16.\r\n",,terminal_output +247,359836,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +248,360051,"TERMINAL",0,0,"Filtering out episode with length 8, which is shorter than the requested sequence length 16.\r\n",,terminal_output +249,360159,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +250,360218,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +251,360449,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 7, which is shorter than the requested sequence length 16.\r\n",,terminal_output +252,360508,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +253,362725,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +254,362787,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +255,363696,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +256,363756,"TERMINAL",0,0,"Filtering out episode with length 10, which is shorter than the requested sequence length 16.\r\n",,terminal_output +257,363863,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +258,363970,"TERMINAL",0,0,"Filtering out episode with length 14, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +259,364078,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +260,366219,"TERMINAL",0,0,"Filtering out episode with length 7, which is shorter than the requested sequence length 16.\r\n",,terminal_output +261,367330,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-debug-run-debug-mihir at: https://wandb.ai/instant-uv/jafar/runs/24srqm6z\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_145328-24srqm6z/logs\r\n",,terminal_output +262,368170,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +263,368513,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 327, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 133, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 99, in dynamics_loss_fn\r\n ce_loss = (mask * ce_loss).sum() / mask.sum()\r\nTypeError: unsupported operand type(s) for *: 'NoneType' and 'JVPTracer'\r\n",,terminal_output +264,370792,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +265,372603,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +266,372725,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +267,373258,"TERMINAL",0,0,"srun: error: hkn0711: task 6: Exited with exit code 1\r\nsrun: error: hkn0710: tasks 1-2: Exited with exit code 1\r\n",,terminal_output +268,373379,"TERMINAL",0,0,"srun: error: hkn0711: tasks 4-5: Exited with exit code 1\r\nsrun: error: hkn0710: task 0: Exited with exit code 1\r\n",,terminal_output +269,373578,"TERMINAL",0,0,"srun: error: hkn0711: task 7: Exited with exit code 1\r\n",,terminal_output +270,373654,"TERMINAL",0,0,"srun: error: hkn0710: task 3: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +271,461670,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +272,463638,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2820,0,"",python,selection_mouse +273,463659,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2819,0,"",python,selection_command +274,464170,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2885,0,"",python,selection_mouse +275,464668,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2886,0,"",python,selection_mouse +276,464819,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2881,7,"outputs",python,selection_mouse +277,465412,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2837,0,"",python,selection_mouse +278,465554,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,4,"mask",python,selection_mouse +279,466155,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2896,0,"",python,selection_mouse +280,466291,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2890,12,"token_logits",python,selection_mouse +281,466942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2841,0,"",python,selection_mouse +282,467120,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2840,1," ",python,selection_mouse +283,467736,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2839,0,"",python,selection_mouse +284,467866,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,4,"mask",python,selection_mouse +285,468448,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2847,0,"",python,selection_mouse +286,468583,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2843,7,"ce_loss",python,selection_mouse +287,470164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2837,0,"",python,selection_mouse +288,470331,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,4,"mask",python,selection_mouse +289,470899,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,0,"",python,selection_mouse +290,471046,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2843,7,"ce_loss",python,selection_mouse +291,473931,"models/dynamics.py",0,0,"",python,tab +292,473932,"models/dynamics.py",2933,0,"",python,selection_mouse +293,473972,"models/dynamics.py",2932,0,"",python,selection_command +294,474897,"models/dynamics.py",2978,0,"",python,selection_mouse +295,479583,"models/dynamics.py",1884,0,"",python,selection_mouse +296,479601,"models/dynamics.py",1883,0,"",python,selection_command +297,479980,"models/dynamics.py",1879,0,"",python,selection_mouse +298,480144,"models/dynamics.py",1879,4,"mask",python,selection_mouse +299,481186,"models/dynamics.py",1382,0,"",python,selection_mouse +300,481321,"models/dynamics.py",1381,4,"mask",python,selection_mouse +301,483743,"models/dynamics.py",1437,0,"",python,selection_mouse +302,483895,"models/dynamics.py",1436,5,"shape",python,selection_mouse +303,484594,"models/dynamics.py",1491,0,"",python,selection_mouse +304,484605,"models/dynamics.py",1490,0,"",python,selection_command +305,484710,"models/dynamics.py",1491,0,"",python,selection_mouse +306,484713,"models/dynamics.py",1490,0,"",python,selection_command +307,484972,"models/dynamics.py",1490,1,")",python,selection_mouse +308,484975,"models/dynamics.py",1491,0,"",python,selection_command +309,485532,"models/dynamics.py",1430,0,"",python,selection_mouse +310,485663,"models/dynamics.py",1426,9,"vid_embed",python,selection_mouse +311,485948,"models/dynamics.py",1426,10,"vid_embed.",python,selection_mouse +312,485949,"models/dynamics.py",1426,15,"vid_embed.shape",python,selection_mouse +313,486152,"models/dynamics.py",1426,16,"vid_embed.shape[",python,selection_mouse +314,486153,"models/dynamics.py",1426,17,"vid_embed.shape[:",python,selection_mouse +315,486154,"models/dynamics.py",1426,19,"vid_embed.shape[:-1",python,selection_mouse +316,486154,"models/dynamics.py",1426,20,"vid_embed.shape[:-1]",python,selection_mouse +317,486436,"models/dynamics.py",1446,0,"",python,selection_mouse +318,488590,"models/dynamics.py",1377,0,"",python,selection_mouse +319,489188,"models/dynamics.py",1384,0,"",python,selection_mouse +320,489362,"models/dynamics.py",1381,4,"mask",python,selection_mouse +321,489503,"models/dynamics.py",1369,79," mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n",python,selection_mouse +322,492726,"models/dynamics.py",2923,0,"",python,selection_mouse +323,494595,"models/dynamics.py",2924,0," ",python,content +324,494616,"models/dynamics.py",2924,0,"",python,selection_command +325,496332,"models/dynamics.py",2924,1,"",python,content +326,496342,"models/dynamics.py",2923,0,"",python,selection_command +327,496636,"models/dynamics.py",2933,0,"\n ",python,content +328,496928,"models/dynamics.py",2942,0," mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n",python,content +329,498182,"models/dynamics.py",2955,0,"",python,selection_mouse +330,498692,"models/dynamics.py",2954,0,"",python,selection_command +331,498935,"models/dynamics.py",2950,4,"",python,content +332,499459,"models/dynamics.py",2946,4,"",python,content +333,499672,"models/dynamics.py",2942,4,"",python,content +334,500051,"models/dynamics.py",3009,0,"",python,selection_command +335,500284,"models/dynamics.py",3008,1,"",python,content +336,501301,"models/dynamics.py",2956,0,"",python,selection_mouse +337,501528,"models/dynamics.py",2953,6,"random",python,selection_mouse +338,502341,"models/dynamics.py",2950,0,"",python,selection_mouse +339,502489,"models/dynamics.py",2949,3,"jax",python,selection_mouse +340,502567,"models/dynamics.py",2949,10,"jax.random",python,selection_mouse +341,502630,"models/dynamics.py",2949,11,"jax.random.",python,selection_mouse +342,502630,"models/dynamics.py",2949,20,"jax.random.bernoulli",python,selection_mouse +343,503359,"models/dynamics.py",2949,20,"j",python,content +344,503361,"models/dynamics.py",2950,0,"",python,selection_keyboard +345,503532,"models/dynamics.py",2950,0,"n",python,content +346,503534,"models/dynamics.py",2951,0,"",python,selection_keyboard +347,503778,"models/dynamics.py",2951,0,"p",python,content +348,503778,"models/dynamics.py",2952,0,"",python,selection_keyboard +349,504084,"models/dynamics.py",2952,0,".",python,content +350,504086,"models/dynamics.py",2953,0,"",python,selection_keyboard +351,504298,"models/dynamics.py",2953,0,"o",python,content +352,504299,"models/dynamics.py",2954,0,"",python,selection_keyboard +353,504471,"models/dynamics.py",2954,0,"n",python,content +354,504473,"models/dynamics.py",2955,0,"",python,selection_keyboard +355,504569,"models/dynamics.py",2955,0,"e",python,content +356,504570,"models/dynamics.py",2956,0,"",python,selection_keyboard +357,504748,"models/dynamics.py",2956,0,"s",python,content +358,504749,"models/dynamics.py",2957,0,"",python,selection_keyboard +359,505518,"models/dynamics.py",2957,0,"_",python,content +360,505519,"models/dynamics.py",2958,0,"",python,selection_keyboard +361,505769,"models/dynamics.py",2958,0,"l",python,content +362,505770,"models/dynamics.py",2959,0,"",python,selection_keyboard +363,506627,"models/dynamics.py",2958,1,"",python,content +364,506760,"models/dynamics.py",2957,1,"",python,content +365,508310,"models/dynamics.py",2975,0,"",python,selection_command +366,508645,"models/dynamics.py",2973,2,"",python,content +367,508995,"models/dynamics.py",2964,9,"",python,content +368,509300,"models/dynamics.py",2962,2,"",python,content +369,509877,"models/dynamics.py",2958,4,"",python,content +370,514799,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +371,514800,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3712,0,"",python,selection_mouse +372,514851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3711,0,"",python,selection_command +373,515426,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +374,515426,"extension-output-pdoom-org.crowd-code-#1-crowd-code",298,0,"",Log,selection_mouse +375,520396,"models/dynamics.py",0,0,"",python,tab +376,520397,"models/dynamics.py",1512,0,"",python,selection_mouse +377,520470,"models/dynamics.py",1504,9,"vid_embed",python,selection_mouse +378,582010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +379,582011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3353,0,"",python,selection_mouse +380,582024,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3352,0,"",python,selection_command +381,583000,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3308,0,"",python,selection_mouse +382,583011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3307,0,"",python,selection_command +383,584492,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3295,0,"",python,selection_mouse +384,584672,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3295,1," ",python,selection_mouse +385,584843,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3295,5," type",python,selection_mouse +386,584911,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3295,7," type: ",python,selection_mouse +387,584911,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3295,13," type: ignore",python,selection_mouse +388,585401,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3308,0,"",python,selection_mouse +389,585402,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3302,6,"ignore",python,selection_mouse +390,585485,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3307,0,"",python,selection_command +391,585486,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3303,5,"gnore",python,selection_command +392,585711,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3303,0,"",python,selection_mouse +393,585711,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3302,1,"i",python,selection_mouse +394,585712,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3301,2," i",python,selection_mouse +395,585712,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3300,3,": i",python,selection_mouse +396,585768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3296,7,"type: i",python,selection_mouse +397,585964,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3242,61," type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: i",python,selection_mouse +398,586038,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3241,62,"# type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: i",python,selection_mouse +399,586611,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3241,0,"",python,selection_mouse +400,586612,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,1," ",python,selection_mouse +401,586774,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,2," #",python,selection_mouse +402,586807,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,7," # type",python,selection_mouse +403,586833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,16," # type: ignore\n",python,selection_mouse +404,586855,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,15," # type: ignore",python,selection_mouse +405,586908,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,16," # type: ignore\n",python,selection_mouse +406,586912,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3240,15," # type: ignore",python,selection_mouse +407,587146,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3255,0,"",python,selection_mouse +408,587176,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3254,0,"",python,selection_command +409,727567,"TERMINAL",0,0,"bash",,terminal_focus +410,732786,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +411,733324,"TERMINAL",0,0,"ls",,terminal_command +412,733353,"TERMINAL",0,0,"]633;E;2025-07-16 15:01:40 ls;22762ecb-63fc-44db-bc05-6ba77d772526]633;Ccheckpoints count_items.sh data data_new huggingface logs scripts\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +413,738528,"TERMINAL",0,0,"cd data_new/",,terminal_command +414,738549,"TERMINAL",0,0,"]633;E;2025-07-16 15:01:46 cd data_new/;22762ecb-63fc-44db-bc05-6ba77d772526]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new]633;D;0",,terminal_output +415,738833,"TERMINAL",0,0,"ls",,terminal_command +416,738884,"TERMINAL",0,0,"]633;E;2025-07-16 15:01:46 ls;22762ecb-63fc-44db-bc05-6ba77d772526]633;C",,terminal_output +417,739215,"TERMINAL",0,0,"open_ai_minecraft open_ai_minecraft_arrayrecords_chunked open_ai_minecraft_arrayrecords_chunked_subset open_ai_minecraft_npy open_ai_minecraft_tfrecord_uncurrupted-2\r\nopen_ai_minecraft_arrayrecords open_ai_minecraft_arrayrecords_chunked_compressed open_ai_minecraft_arrayrecords_chunked_uncompressed open_ai_minecraft_tfrecord_uncurrupted\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new]633;D;0",,terminal_output +418,749040,"TERMINAL",0,0,"cd open_ai_minecraft_arrayrecords_chunked",,terminal_command +419,753777,"TERMINAL",0,0,"ls -l | wc -l",,terminal_command +420,753815,"TERMINAL",0,0,"]633;E;2025-07-16 15:02:01 ls -l | wc -l;22762ecb-63fc-44db-bc05-6ba77d772526]633;C",,terminal_output +421,755462,"models/dynamics.py",0,0,"",python,tab +422,755463,"models/dynamics.py",1500,0,"",python,selection_mouse +423,756844,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +424,756845,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2474,0,"",python,selection_mouse +425,756854,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2473,0,"",python,selection_command +426,760261,"models/dynamics.py",0,0,"",python,tab +427,760262,"models/dynamics.py",2947,0,"",python,selection_mouse +428,761101,"models/dynamics.py",3027,0,"",python,selection_mouse +429,761204,"models/dynamics.py",3026,4,"None",python,selection_mouse +430,762050,"models/dynamics.py",3026,4,"",python,content +431,762334,"models/dynamics.py",3026,0,"m",python,content +432,762336,"models/dynamics.py",3027,0,"",python,selection_keyboard +433,762422,"models/dynamics.py",3027,0,"a",python,content +434,762425,"models/dynamics.py",3028,0,"",python,selection_keyboard +435,762504,"models/dynamics.py",3028,0,"s",python,content +436,762505,"models/dynamics.py",3029,0,"",python,selection_keyboard +437,762569,"models/dynamics.py",3029,0,"k",python,content +438,762571,"models/dynamics.py",3030,0,"",python,selection_keyboard +439,765002,"TERMINAL",0,0,"srun",,terminal_focus +440,766420,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +441,767538,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +442,767720,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +443,767860,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +444,771081,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +445,771391,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +446,776021,"TERMINAL",0,0,"ls",,terminal_focus +447,776029,"TERMINAL",0,0,"2025-07-16 15:02:23.445368: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.485348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.499253: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.499251: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.506377: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.513950: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.517344: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:23.517342: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +448,780791,"TERMINAL",0,0,"89396\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D;0",,terminal_output +449,790659,"TERMINAL",0,0,"2025-07-16 15:02:38.159609: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +450,790778,"TERMINAL",0,0,"2025-07-16 15:02:38.252366: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:38.280243: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +451,790882,"TERMINAL",0,0,"2025-07-16 15:02:38.305686: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:38.305686: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:38.378334: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:38.385249: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +452,790984,"TERMINAL",0,0,"2025-07-16 15:02:38.465339: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +453,794334,"TERMINAL",0,0,"2025-07-16 15:02:41.730223: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:41.836258: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +454,794437,"TERMINAL",0,0,"2025-07-16 15:02:41.868736: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:41.918031: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +455,794546,"TERMINAL",0,0,"2025-07-16 15:02:41.950703: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:41.982504: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:02:41.983040: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +456,794563,"TERMINAL",0,0,"2025-07-16 15:02:42.040455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +457,796857,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +458,797506,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_150244-044gl44u\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/044gl44u\r\n",,terminal_output +459,799653,"TERMINAL",0,0,"python",,terminal_command +460,799685,"TERMINAL",0,0,"]633;E;2025-07-16 15:02:47 python;22762ecb-63fc-44db-bc05-6ba77d772526]633;C",,terminal_output +461,799867,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +462,800354,"TERMINAL",0,0,">>> ",,terminal_output +463,802011,"TERMINAL",0,0,"5",,terminal_output +464,802073,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +465,803010,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +466,803530,"TERMINAL",0,0,"[?25l9[?25h[?25l0[?25h",,terminal_output +467,804187,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +468,804333,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +469,804521,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +470,805476,"TERMINAL",0,0,"\r\n0.0005555555555555556\r\n>>> ",,terminal_output +471,808252,"TERMINAL",0,0,"0.0005555555555555556",,terminal_output +472,808665,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +473,808989,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +474,809051,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +475,809202,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +476,809696,"TERMINAL",0,0,"\r\n0.05555555555555555\r\n>>> ",,terminal_output +477,823581,"TERMINAL",0,0,"^D\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D;0",,terminal_output +478,831193,"TERMINAL",0,0,"srun",,terminal_focus +479,874097,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +480,876862,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +481,897255,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +482,901787,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +483,903865,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +484,903986,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\n",,terminal_output +485,909034,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +486,914351,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +487,914423,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +488,914545,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +489,914601,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +490,914775,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +491,915069,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +492,915377,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +493,915437,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +494,915507,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +495,915619,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +496,915674,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +497,915736,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +498,915921,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +499,916049,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +500,916170,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +501,916276,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +502,916383,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +503,916648,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +504,916745,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +505,916824,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +506,916928,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +507,916988,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +508,917048,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +509,917156,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +510,917617,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +511,952618,"TERMINAL",0,0,"2025-07-16 15:05:20.109487: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:20.110453: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:20.110989: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:20.112296: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:20.112316: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:20.113795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +512,953878,"TERMINAL",0,0,"2025-07-16 15:05:21.378590: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:21.379565: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:21.380104: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:21.381418: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:21.381438: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:21.382930: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +513,954697,"TERMINAL",0,0,"2025-07-16 15:05:22.082270: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.083255: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.083804: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.085137: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.085157: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.086659: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.197894: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.198882: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.199421: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.200719: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.200740: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:22.202231: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +514,958068,"TERMINAL",0,0,"2025-07-16 15:05:25.540484: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.541466: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.542012: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.543328: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.543348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.544842: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +515,958305,"TERMINAL",0,0,"2025-07-16 15:05:25.804949: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.805922: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.806459: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.807755: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.807774: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:25.809252: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +516,964208,"TERMINAL",0,0,"2025-07-16 15:05:31.609871: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:31.610876: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:31.611431: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:31.612744: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:31.612763: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:31.614293: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +517,965236,"TERMINAL",0,0,"2025-07-16 15:05:32.668310: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:32.669893: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:32.670759: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:32.672845: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:32.672880: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 15:05:32.675334: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +518,1629812,"models/dynamics.py",0,0,"",python,tab +519,1629813,"models/dynamics.py",2494,0,"",python,selection_mouse +520,1629928,"models/dynamics.py",2492,5,"Dense",python,selection_mouse +521,1641729,"genie.py",0,0,"",python,tab +522,1644138,"genie.py",2661,0,"",python,selection_mouse +523,1644674,"genie.py",2687,0,"",python,selection_mouse +524,1644677,"genie.py",2686,0,"",python,selection_command +525,1644827,"genie.py",2686,1,")",python,selection_mouse +526,1644830,"genie.py",2687,0,"",python,selection_command +527,1645007,"genie.py",2655,32," # dtype=self.dtype,\n # )",python,selection_mouse +528,1645008,"genie.py",2565,122," # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +529,1645008,"genie.py",2527,160," # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +530,1645009,"genie.py",2481,206," # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +531,1645011,"genie.py",2433,254," # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +532,1645011,"genie.py",2432,255," # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +533,1645012,"genie.py",2382,305," # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +534,1645043,"genie.py",2342,345," # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +535,1645115,"genie.py",2299,388," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_mouse +536,1646025,"genie.py",2297,0,"",python,selection_mouse +537,1646063,"genie.py",2296,0,"",python,selection_command +538,1646174,"genie.py",2296,1,")",python,selection_mouse +539,1646176,"genie.py",2297,0,"",python,selection_command +540,1646334,"genie.py",2267,30," dtype=self.dtype,\n )",python,selection_mouse +541,1646335,"genie.py",2223,74," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +542,1646335,"genie.py",2187,110," dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +543,1646335,"genie.py",2142,155," num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +544,1646336,"genie.py",2096,201," num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +545,1646336,"genie.py",2047,250," num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +546,1646390,"genie.py",2009,288," model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +547,1646486,"genie.py",1961,336," self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +548,1647652,"genie.py",2273,0,"",python,selection_mouse +549,1647895,"genie.py",2270,5,"dtype",python,selection_mouse +550,1648038,"genie.py",2258,30," dtype=self.dtype,\n",python,selection_mouse +551,1648666,"genie.py",2232,0,"",python,selection_mouse +552,1648928,"genie.py",2228,11,"param_dtype",python,selection_mouse +553,1649089,"genie.py",2216,42," param_dtype=self.param_dtype,\n",python,selection_mouse +554,1649885,"genie.py",2616,0,"",python,selection_mouse +555,1650045,"genie.py",2614,11,"param_dtype",python,selection_mouse +556,1650200,"genie.py",2600,44," # param_dtype=self.param_dtype,\n",python,selection_mouse +557,1650647,"genie.py",2660,0,"",python,selection_mouse +558,1650798,"genie.py",2658,5,"dtype",python,selection_mouse +559,1650923,"genie.py",2644,32," # dtype=self.dtype,\n",python,selection_mouse +560,1652433,"models/dynamics.py",0,0,"",python,tab +561,1655206,"models/dynamics.py",2109,0,"",python,selection_mouse +562,1655359,"models/dynamics.py",2107,5,"dtype",python,selection_mouse +563,1655503,"models/dynamics.py",2080,32,"param_dtype: jnp.dtype\n dtype",python,selection_mouse +564,1656882,"models/dynamics.py",2376,0,"",python,selection_mouse +565,1657022,"models/dynamics.py",2372,4,"self",python,selection_mouse +566,1657192,"models/dynamics.py",2360,24," self.dtype,\n",python,selection_mouse +567,1657509,"models/dynamics.py",2330,54," self.param_dtype,\n self.dtype,\n",python,selection_mouse +568,1659009,"models/dynamics.py",2588,0,"",python,selection_mouse +569,1659147,"models/dynamics.py",2587,4,"self",python,selection_mouse +570,1659299,"models/dynamics.py",2569,30," dtype=self.dtype,\n",python,selection_mouse +571,1659588,"models/dynamics.py",2527,72," param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +572,1661800,"models/dynamics.py",2179,0,"",python,selection_mouse +573,1671653,"utils/nn.py",0,0,"",python,tab +574,1673486,"utils/nn.py",4249,0,"",python,selection_mouse +575,1673640,"utils/nn.py",4246,5,"dtype",python,selection_mouse +576,1673887,"utils/nn.py",4219,32,"param_dtype: jnp.dtype\n dtype",python,selection_mouse +577,1686984,"utils/nn.py",5105,0,"",python,selection_mouse +578,1687126,"utils/nn.py",5105,4,"self",python,selection_mouse +579,1687484,"utils/nn.py",5105,50,"self.param_dtype,\n dtype=self.dtype",python,selection_mouse +580,1688295,"utils/nn.py",5152,0,"",python,selection_mouse +581,1688295,"utils/nn.py",5150,5,"dtype",python,selection_mouse +582,1688444,"utils/nn.py",5123,34," dtype=self.dtype,\n",python,selection_mouse +583,1688655,"utils/nn.py",5077,80," param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +584,1688801,"utils/nn.py",5039,118," dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +585,1689111,"utils/nn.py",5077,80," param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +586,1694021,"utils/nn.py",4994,0,"",python,selection_mouse +587,1694614,"utils/nn.py",4960,0,"",python,selection_mouse +588,1694615,"utils/nn.py",4959,0,"",python,selection_command +589,1695638,"utils/nn.py",4955,0,"",python,selection_mouse +590,1700631,"utils/nn.py",1385,0,"",python,selection_mouse +591,1700759,"utils/nn.py",1382,5,"dtype",python,selection_mouse +592,1700909,"utils/nn.py",1359,30," dtype=self.dtype,\n",python,selection_mouse +593,1701232,"utils/nn.py",1317,72," param_dtype=self.param_dtype,\n dtype=self.dtype,\n",python,selection_mouse +594,1702846,"utils/nn.py",1658,0,"",python,selection_mouse +595,1702987,"utils/nn.py",1657,5,"dtype",python,selection_mouse +596,1703287,"utils/nn.py",1616,46,"self.param_dtype,\n dtype=self.dtype",python,selection_mouse +597,1862641,"utils/nn.py",2908,0,"",python,selection_mouse +598,1862686,"utils/nn.py",2907,0,"",python,selection_command +599,1959651,"utils/nn.py",6788,0,"",python,selection_mouse +600,1960014,"utils/nn.py",6794,0,"",python,selection_mouse +601,1960993,"utils/nn.py",6785,0,"",python,selection_mouse +602,1961327,"utils/nn.py",6795,0,"",python,selection_mouse +603,1963705,"utils/nn.py",6061,0,"",python,selection_mouse +604,1964013,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\nfrom __future__ import annotations\n\nimport functools\nimport inspect\nimport warnings\nfrom typing import Any, overload\nfrom collections.abc import Callable\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax.linen import initializers\nfrom flax.linen.dtypes import promote_dtype\nfrom flax.linen.linear import (\n DenseGeneral,\n default_kernel_init,\n)\nfrom flax.linen.module import Module, compact, merge_param\nfrom flax.linen.normalization import LayerNorm\nfrom flax.typing import (\n Array,\n PRNGKey,\n Dtype,\n Shape as Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in einsum.\n einsum: If unspecified, default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and `einsum` are\n specified.\n\n Returns:\n Output of shape ``[batch..., num_heads, q_length, kv_length]``.\n """"""\n if (precision or einsum_dot_general) and einsum:\n raise ValueError(\n 'precision/einsum_dot_general and einsum are mutually exclusive. Please'\n ' specify only one of them.'\n )\n if not einsum:\n einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n\n query, key = promote_dtype(query, key, dtype=dtype)\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = einsum('...qhd,...khd->...hqk', query, key)\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n if force_fp32_for_softmax and dtype != jnp.float32:\n attn_weights = jax.nn.softmax(attn_weights.astype(jnp.float32))\n else:\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow('intermediates', 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n qk_attn_weights_einsum: Callable[..., Array] | None = None,\n attn_weights_value_einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see ``jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in `jnp.einsum`.\n qk_attn_weights_einsum: the einsum for computing the attention weights. When\n unspecified, the default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n attn_weights_value_einsum: the einsum for computing the product of the\n attention weights and the values. When unspecified, the default\n `jnp.einsum` will be used. This argument is mutually exclusive with\n `precision` and `einsum_dot_general`.\n\n Returns:\n Output of shape ``[batch..., q_length, num_heads, v_depth_per_head]``.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and\n `qk_attn_weights_einsum`/`attn_weights_value_einsum` are\n specified.\n """"""\n if (qk_attn_weights_einsum and not attn_weights_value_einsum) or (\n not qk_attn_weights_einsum and attn_weights_value_einsum\n ):\n raise ValueError(\n 'qk_attn_weights_einsum and attn_weights_value_einsum must be specified'\n ' together.'\n )\n if (precision or einsum_dot_general) and (\n qk_attn_weights_einsum or attn_weights_value_einsum\n ):\n raise ValueError(\n 'precision/einsum_dot_general and'\n ' qk_attn_weights_einsum/attn_weights_value_einsum are mutually'\n ' exclusive. Please specify only one of them.'\n )\n\n query, key, value = promote_dtype(query, key, value, dtype=dtype)\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n force_fp32_for_softmax,\n einsum_dot_general=einsum_dot_general,\n einsum=qk_attn_weights_einsum,\n )\n if not attn_weights_value_einsum:\n attn_weights_value_einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n # return weighted sum over values for each query position\n return attn_weights_value_einsum(\n '...hqk,...khd->...qhd',\n attn_weights,\n value,\n )\n\n\nclass MultiHeadDotProductAttention(Module):\n """"""Multi-head dot-product attention.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: Number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: The dtype of the computation (default: infer from inputs and params)\n param_dtype: The dtype passed to parameter initializers (default: float32)\n qkv_features: Dimension of the key, query, and value.\n out_features: Dimension of the last projection\n broadcast_dropout: Use a broadcasted dropout along batch dims.\n dropout_rate: Dropout rate.\n deterministic: If False, the attention weight is masked randomly using\n dropout, whereas if True, the attention weights are deterministic.\n precision: Numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: Initializer for the kernel of the Dense layers.\n out_kernel_init: Optional Initializer for the kernel of the output Dense layer,\n if None, ``kernel_init`` will be used.\n bias_init: Initializer for the bias of the Dense layers.\n out_bias_init: Optional Initializer for the bias of the output Dense layer,\n if None, ``bias_init`` will be used.\n use_bias: Whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: Whether to prepare and use an autoregressive cache.\n normalize_qk: Should QK normalization be applied (arxiv.org/abs/2302.05442).\n qk_attn_weights_einsum_cls: factory function to create the einsum for\n computing the attention weights.\n attn_weights_value_einsum_cls: factory function to create the einsum for\n computing the product of the attention weights and the values.\n """"""\n\n num_heads: int\n dtype: Dtype | None = None\n param_dtype: Dtype = jnp.float32\n qkv_features: int | None = None\n out_features: int | None = None\n broadcast_dropout: bool = True\n dropout_rate: float = 0.0\n deterministic: bool | None = None\n precision: PrecisionLike = None\n kernel_init: Initializer = default_kernel_init\n out_kernel_init: Initializer | None = None\n bias_init: Initializer = initializers.zeros_init()\n out_bias_init: Initializer | None = None\n use_bias: bool = True\n attention_fn: Callable[..., Array] = dot_product_attention\n decode: bool = False\n normalize_qk: bool = False\n force_fp32_for_softmax: bool = False\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None\n out_dot_general: DotGeneralT | None = None\n qkv_dot_general_cls: Any = None\n out_dot_general_cls: Any = None\n qk_attn_weights_einsum_cls: Callable[..., Callable[..., Array]] | None = None\n attn_weights_value_einsum_cls: Callable[..., Callable[..., Array]] | None = (\n None\n )\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @compact\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n inputs_k: key of shape ``[batch_sizes..., length, features]``. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape ``[batch_sizes..., length, features]``. If None,\n inputs_v will copy the value of inputs_k.\n inputs_kv: key/values of shape ``[batch_sizes..., length, features]``. If\n None, inputs_kv will copy the value of inputs_q. This arg will be\n deprecated soon. Use inputs_k and inputs_v instead.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n dropout_rng: optional rng key to pass to the attention layer's dropout\n mask. Otherwise, self.make_rng('dropout') is used instead.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection. Remember to mark 'intermediates' as\n mutable via ``mutable=['intermediates']`` in order to have that\n collection returned.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n if inputs_kv is not None:\n if inputs_k is not None or inputs_v is not None:\n raise ValueError(\n 'If either `inputs_k` or `inputs_v` is not None, '\n '`inputs_kv` must be None. If `inputs_kv` is not None, both `inputs_k` '\n 'and `inputs_v` must be None. We recommend using `inputs_k` and '\n '`inputs_v` args, since `inputs_kv` will be deprecated soon. See '\n 'https://github.com/google/flax/discussions/3389 for more '\n 'information.'\n )\n inputs_k = inputs_v = inputs_kv\n warnings.warn(\n 'The inputs_kv arg will be deprecated soon. '\n 'Use inputs_k and inputs_v instead. See '\n 'https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n else:\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n elif inputs_v.shape[-1] == inputs_v.shape[-2]:\n warnings.warn(\n f'You are passing an array of shape {inputs_v.shape} '\n 'to the `inputs_v` arg, when you may have intended '\n 'to pass it to the `mask` arg. As of Flax version '\n '0.7.4, the function signature of '\n ""MultiHeadDotProductAttention's `__call__` method ""\n 'has changed to `__call__(inputs_q, inputs_k=None, '\n 'inputs_v=None, *, inputs_kv=None, mask=None, '\n 'deterministic=None)`. Use the kwarg `mask` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'and read the docstring for more information.',\n DeprecationWarning,\n )\n\n features = self.out_features or inputs_q.shape[-1]\n qkv_features = self.qkv_features or inputs_q.shape[-1]\n assert qkv_features % self.num_heads == 0, (\n f'Memory dimension ({qkv_features}) must be divisible by number of'\n f' heads ({self.num_heads}).'\n )\n head_dim = qkv_features // self.num_heads\n\n dense = functools.partial(\n DenseGeneral,\n axis=-1,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n features=(self.num_heads, head_dim),\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n query, key, value = (\n dense(name='query')(inputs_q),\n dense(name='key')(inputs_k),\n dense(name='value')(inputs_v),\n )\n\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = LayerNorm(\n name='query_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(query) # type: ignore[call-arg]\n key = LayerNorm(\n name='key_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(key) # type: ignore[call-arg]\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n if self.decode:\n # detect if we're initializing by absence of existing cache data.\n is_initialized = self.has_variable('cache', 'cached_key')\n cached_key = self.variable(\n 'cache', 'cached_key', jnp.zeros, key.shape, key.dtype\n )\n cached_value = self.variable(\n 'cache', 'cached_value', jnp.zeros, value.shape, value.dtype\n )\n cache_index = self.variable(\n 'cache', 'cache_index', lambda: jnp.array(0, dtype=jnp.int32)\n )\n if is_initialized:\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = cache_index.value\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices: tuple[int | jax.Array, ...] = (zero,) * len(\n batch_dims\n ) + (\n cur_index,\n zero,\n zero,\n )\n key = lax.dynamic_update_slice(cached_key.value, key, indices)\n value = lax.dynamic_update_slice(cached_value.value, value, indices)\n cached_key.value = key\n cached_value.value = value\n cache_index.value = cache_index.value + 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n m_deterministic = merge_param(\n 'deterministic', self.deterministic, deterministic\n )\n if not m_deterministic and dropout_rng is None:\n dropout_rng = self.make_rng('dropout')\n else:\n m_deterministic = True\n\n # `qk_attn_weights_einsum` and `attn_weights_value_einsum` are optional\n # arguments that can be used to override the default `jnp.einsum`. They\n # exist for quantized einsum support in AQT.\n qk_attn_weights_einsum = (\n self.qk_attn_weights_einsum_cls()\n if self.qk_attn_weights_einsum_cls\n else None\n )\n attn_weights_value_einsum = (\n self.attn_weights_value_einsum_cls()\n if self.attn_weights_value_einsum_cls\n else None\n )\n # apply attention\n attn_args = (query, key, value)\n # This kwargs list match the default nn.dot_product_attention.\n # For custom `attention_fn`s, invalid kwargs will be filtered.\n attn_kwargs = dict(\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=m_deterministic,\n dtype=self.dtype,\n precision=self.precision,\n force_fp32_for_softmax=self.force_fp32_for_softmax,\n qk_attn_weights_einsum=qk_attn_weights_einsum,\n attn_weights_value_einsum=attn_weights_value_einsum,\n )\n attn_kwargs = {\n k: v\n for k, v in attn_kwargs.items()\n if k in inspect.signature(self.attention_fn).parameters\n }\n if sow_weights:\n x = self.attention_fn(*attn_args, **attn_kwargs, module=self)\n else:\n x = self.attention_fn(*attn_args, **attn_kwargs)\n # back to the original inputs dimensions\n out = DenseGeneral(\n features=features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n name='out', # type: ignore[call-arg]\n )(x)\n return out\n\n\nclass MultiHeadAttention(MultiHeadDotProductAttention):\n """"""Multi-head dot-product attention.\n Alias for ``MultiHeadDotProductAttention``.\n\n **NOTE**: ``MultiHeadAttention`` is a wrapper of ``MultiHeadDotProductAttention``,\n and so their implementations are identical. However ``MultiHeadAttention`` layers\n will, by default, be named ``MultiHeadAttention_{index}``, whereas ``MultiHeadDotProductAttention``\n will be named ``MultiHeadDotProductAttention_{index}``. Therefore, this could affect\n checkpointing, param collection names and RNG threading (since the layer name is\n used when generating new RNG's) within the module.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n bias_init: initializer for the bias of the Dense layers.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n """"""\n\n\nclass SelfAttention(MultiHeadDotProductAttention):\n """"""Self-attention special case of multi-head dot-product attention.\n This layer is deprecated in favor of ``MultiHeadDotProductAttention``.\n\n Example usage::\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> variables = layer.init(jax.random.key(0), jnp.ones((4, 3, 2, 5)))\n """"""\n\n @compact\n def __call__( # type: ignore\n self,\n inputs_q: Array,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product self-attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n warnings.warn(\n 'SelfAttention will be deprecated soon. Use '\n '`MultiHeadDotProductAttention.__call__(inputs_q)` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n return super().__call__(\n inputs_q,\n mask=mask,\n deterministic=deterministic,\n dropout_rng=dropout_rng,\n sow_weights=sow_weights,\n )\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., ``[batch..., len_q]``, ``[batch..., len_kv]``, the\n attention weights will be ``[batch..., heads, len_q, len_kv]`` and this\n function will produce ``[batch..., 1, len_q, len_kv]``.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len_q, len_kv]`` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., ``[batch..., len]``, the self-attention weights\n will be ``[batch..., heads, len, len]`` and this function will produce a\n causal mask of shape ``[batch..., 1, len, len]``.\n\n Args:\n x: input array of shape ``[batch..., len]``\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len, len]`` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +605,1987907,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27285,0,"",python,selection_mouse +606,1995502,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15741,0,"",python,selection_mouse +607,2002836,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,0,"",python,selection_mouse +608,2002967,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,4,"mask",python,selection_mouse +609,2003859,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16887,0,"",python,selection_mouse +610,2004011,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16885,4,"None",python,selection_mouse +611,2004628,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16895,0,"",python,selection_mouse +612,2004756,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16892,4,"None",python,selection_mouse +613,2005666,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16875,0,"",python,selection_mouse +614,2005802,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,4,"mask",python,selection_mouse +615,2006525,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16888,0,"",python,selection_mouse +616,2006661,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16885,4,"None",python,selection_mouse +617,2007199,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16893,0,"",python,selection_mouse +618,2007325,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16892,4,"None",python,selection_mouse +619,2008147,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16873,0,"",python,selection_mouse +620,2008286,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,4,"mask",python,selection_mouse +621,2009083,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16896,0,"",python,selection_mouse +622,2009635,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16894,0,"",python,selection_mouse +623,2009797,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16892,4,"None",python,selection_mouse +624,2010755,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,0,"",python,selection_mouse +625,2010905,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16871,4,"mask",python,selection_mouse +626,2011931,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16893,0,"",python,selection_mouse +627,2012112,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16892,4,"None",python,selection_mouse +628,2021844,"utils/nn.py",0,0,"",python,tab +629,2031789,"utils/nn.py",5765,0,"",python,selection_mouse +630,2033053,"utils/nn.py",5740,0,"",python,selection_mouse +631,2035141,"utils/nn.py",6528,0,"",python,selection_mouse +632,2035750,"utils/nn.py",6534,0,"",python,selection_mouse +633,2035899,"utils/nn.py",6532,3,"jnp",python,selection_mouse +634,2036224,"utils/nn.py",6532,4,"jnp.",python,selection_mouse +635,2036225,"utils/nn.py",6532,7,"jnp.tri",python,selection_mouse +636,2036225,"utils/nn.py",6532,8,"jnp.tri(",python,selection_mouse +637,2036225,"utils/nn.py",6532,9,"jnp.tri(z",python,selection_mouse +638,2036226,"utils/nn.py",6532,15,"jnp.tri(z.shape",python,selection_mouse +639,2036347,"utils/nn.py",6532,16,"jnp.tri(z.shape[",python,selection_mouse +640,2036367,"utils/nn.py",6532,17,"jnp.tri(z.shape[-",python,selection_mouse +641,2036394,"utils/nn.py",6532,18,"jnp.tri(z.shape[-2",python,selection_mouse +642,2036423,"utils/nn.py",6532,19,"jnp.tri(z.shape[-2]",python,selection_mouse +643,2036458,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +644,2036788,"utils/nn.py",6552,0,"",python,selection_mouse +645,2036835,"utils/nn.py",6551,0,"",python,selection_command +646,2037203,"utils/nn.py",6552,0,"",python,selection_mouse +647,2037206,"utils/nn.py",6551,0,"",python,selection_command +648,2037392,"utils/nn.py",6551,1,")",python,selection_mouse +649,2037420,"utils/nn.py",6552,0,"",python,selection_command +650,2037515,"utils/nn.py",6551,1,")",python,selection_mouse +651,2037516,"utils/nn.py",6550,2,"])",python,selection_mouse +652,2037516,"utils/nn.py",6549,3,"2])",python,selection_mouse +653,2037586,"utils/nn.py",6547,5,"[-2])",python,selection_mouse +654,2037587,"utils/nn.py",6545,7,"pe[-2])",python,selection_mouse +655,2037588,"utils/nn.py",6544,8,"ape[-2])",python,selection_mouse +656,2037621,"utils/nn.py",6543,9,"hape[-2])",python,selection_mouse +657,2037657,"utils/nn.py",6541,11,".shape[-2])",python,selection_mouse +658,2037679,"utils/nn.py",6540,12,"z.shape[-2])",python,selection_mouse +659,2037718,"utils/nn.py",6539,13,"(z.shape[-2])",python,selection_mouse +660,2037738,"utils/nn.py",6538,14,"i(z.shape[-2])",python,selection_mouse +661,2037766,"utils/nn.py",6537,15,"ri(z.shape[-2])",python,selection_mouse +662,2037815,"utils/nn.py",6536,16,"tri(z.shape[-2])",python,selection_mouse +663,2037874,"utils/nn.py",6535,17,".tri(z.shape[-2])",python,selection_mouse +664,2037891,"utils/nn.py",6534,18,"p.tri(z.shape[-2])",python,selection_mouse +665,2037964,"utils/nn.py",6533,19,"np.tri(z.shape[-2])",python,selection_mouse +666,2037981,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +667,2038766,"utils/nn.py",6532,0,"",python,selection_mouse +668,2038903,"utils/nn.py",6532,3,"jnp",python,selection_mouse +669,2039199,"utils/nn.py",6532,4,"jnp.",python,selection_mouse +670,2039200,"utils/nn.py",6532,7,"jnp.tri",python,selection_mouse +671,2039200,"utils/nn.py",6532,8,"jnp.tri(",python,selection_mouse +672,2039201,"utils/nn.py",6532,9,"jnp.tri(z",python,selection_mouse +673,2039232,"utils/nn.py",6532,15,"jnp.tri(z.shape",python,selection_mouse +674,2039318,"utils/nn.py",6532,16,"jnp.tri(z.shape[",python,selection_mouse +675,2039318,"utils/nn.py",6532,17,"jnp.tri(z.shape[-",python,selection_mouse +676,2039319,"utils/nn.py",6532,18,"jnp.tri(z.shape[-2",python,selection_mouse +677,2039344,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +678,2039883,"utils/nn.py",6552,0,"",python,selection_mouse +679,2039888,"utils/nn.py",6551,0,"",python,selection_command +680,2040502,"utils/nn.py",6552,0,"",python,selection_mouse +681,2040505,"utils/nn.py",6551,0,"",python,selection_command +682,2040697,"utils/nn.py",6551,1,")",python,selection_mouse +683,2040717,"utils/nn.py",6552,0,"",python,selection_command +684,2040780,"utils/nn.py",6551,1,")",python,selection_mouse +685,2040781,"utils/nn.py",6548,4,"-2])",python,selection_mouse +686,2040880,"utils/nn.py",6546,6,"e[-2])",python,selection_mouse +687,2040880,"utils/nn.py",6545,7,"pe[-2])",python,selection_mouse +688,2040881,"utils/nn.py",6544,8,"ape[-2])",python,selection_mouse +689,2040881,"utils/nn.py",6543,9,"hape[-2])",python,selection_mouse +690,2040882,"utils/nn.py",6542,10,"shape[-2])",python,selection_mouse +691,2040910,"utils/nn.py",6541,11,".shape[-2])",python,selection_mouse +692,2040939,"utils/nn.py",6540,12,"z.shape[-2])",python,selection_mouse +693,2040960,"utils/nn.py",6539,13,"(z.shape[-2])",python,selection_mouse +694,2040988,"utils/nn.py",6538,14,"i(z.shape[-2])",python,selection_mouse +695,2041015,"utils/nn.py",6536,16,"tri(z.shape[-2])",python,selection_mouse +696,2041067,"utils/nn.py",6535,17,".tri(z.shape[-2])",python,selection_mouse +697,2041070,"utils/nn.py",6534,18,"p.tri(z.shape[-2])",python,selection_mouse +698,2041123,"utils/nn.py",6533,19,"np.tri(z.shape[-2])",python,selection_mouse +699,2041203,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +700,2041942,"utils/nn.py",6532,0,"",python,selection_mouse +701,2041984,"utils/nn.py",6532,3,"jnp",python,selection_mouse +702,2042185,"utils/nn.py",6532,7,"jnp.tri",python,selection_mouse +703,2042240,"utils/nn.py",6532,9,"jnp.tri(z",python,selection_mouse +704,2042240,"utils/nn.py",6532,15,"jnp.tri(z.shape",python,selection_mouse +705,2042290,"utils/nn.py",6532,17,"jnp.tri(z.shape[-",python,selection_mouse +706,2042320,"utils/nn.py",6532,18,"jnp.tri(z.shape[-2",python,selection_mouse +707,2042346,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +708,2042698,"utils/nn.py",6552,0,"",python,selection_mouse +709,2042733,"utils/nn.py",6551,0,"",python,selection_command +710,2042903,"utils/nn.py",6552,0,"",python,selection_mouse +711,2042907,"utils/nn.py",6551,0,"",python,selection_command +712,2043149,"utils/nn.py",6551,1,")",python,selection_mouse +713,2043150,"utils/nn.py",6552,0,"",python,selection_command +714,2043250,"utils/nn.py",6550,2,"])",python,selection_mouse +715,2043251,"utils/nn.py",6547,5,"[-2])",python,selection_mouse +716,2043251,"utils/nn.py",6542,10,"shape[-2])",python,selection_mouse +717,2043360,"utils/nn.py",6541,11,".shape[-2])",python,selection_mouse +718,2043387,"utils/nn.py",6540,12,"z.shape[-2])",python,selection_mouse +719,2043420,"utils/nn.py",6539,13,"(z.shape[-2])",python,selection_mouse +720,2043470,"utils/nn.py",6536,16,"tri(z.shape[-2])",python,selection_mouse +721,2043586,"utils/nn.py",6535,17,".tri(z.shape[-2])",python,selection_mouse +722,2043632,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +723,2044341,"utils/nn.py",6532,0,"",python,selection_mouse +724,2044342,"utils/nn.py",6532,3,"jnp",python,selection_mouse +725,2044642,"utils/nn.py",6532,4,"jnp.",python,selection_mouse +726,2044642,"utils/nn.py",6532,7,"jnp.tri",python,selection_mouse +727,2044643,"utils/nn.py",6532,9,"jnp.tri(z",python,selection_mouse +728,2044643,"utils/nn.py",6532,15,"jnp.tri(z.shape",python,selection_mouse +729,2044722,"utils/nn.py",6532,16,"jnp.tri(z.shape[",python,selection_mouse +730,2044722,"utils/nn.py",6532,18,"jnp.tri(z.shape[-2",python,selection_mouse +731,2044723,"utils/nn.py",6532,20,"jnp.tri(z.shape[-2])",python,selection_mouse +732,2045002,"utils/nn.py",6552,0,"",python,selection_mouse +733,2045005,"utils/nn.py",6551,0,"",python,selection_command +734,2049534,"utils/nn.py",3217,0,"",python,selection_mouse +735,2050776,"utils/nn.py",3295,0,"",python,selection_mouse +736,2050807,"utils/nn.py",3294,0,"",python,selection_command +737,2051884,"utils/nn.py",3286,0,"",python,selection_mouse +738,2052393,"utils/nn.py",3214,0,"",python,selection_mouse +739,2054556,"utils/nn.py",2867,0,"",python,selection_mouse +740,2055123,"utils/nn.py",2891,0,"",python,selection_mouse +741,2055134,"utils/nn.py",2890,0,"",python,selection_command +742,2091357,"TERMINAL",0,0,"Step 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 240, loss: 7.603146553039551\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 240, loss: 7.603146553039551\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 240, loss: 7.603146553039551\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 240, loss: 7.603146553039551\r\nStep 240, loss: 7.603146553039551\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 240, loss: 7.603146553039551\r\nStep 240, loss: 7.603146553039551\r\nStep 0, loss: 23.316503524780273\r\nStep 1, loss: 23.494535446166992\r\nStep 2, loss: 23.730228424072266\r\nStep 3, loss: 23.78915023803711\r\nStep 4, loss: 23.80891227722168\r\nStep 5, loss: 23.793149948120117\r\nStep 6, loss: 23.390960693359375\r\nStep 7, loss: 23.15038299560547\r\nStep 8, loss: 23.4328556060791\r\nStep 9, loss: 23.83868408203125\r\nStep 10, loss: 23.229887008666992\r\nStep 11, loss: 22.968379974365234\r\nStep 12, loss: 23.104434967041016\r\nStep 13, loss: 22.9370174407959\r\nStep 14, loss: 23.07267189025879\r\nStep 15, loss: 22.69853401184082\r\nStep 16, loss: 22.833133697509766\r\nStep 17, loss: 22.623390197753906\r\nStep 18, loss: 22.805448532104492\r\nStep 19, loss: 22.46842384338379\r\nStep 20, loss: 22.126110076904297\r\nStep 21, loss: 22.013687133789062\r\nStep 22, loss: 22.06637954711914\r\nStep 23, loss: 22.074575424194336\r\nStep 24, loss: 21.657609939575195\r\nStep 25, loss: 21.65119171142578\r\nStep 26, loss: 21.30472183227539\r\nStep 27, loss: 21.28836441040039\r\nStep 28, loss: 21.253355026245117\r\nStep 29, loss: 21.13030242919922\r\nStep 30, loss: 20.80233383178711\r\nStep 31, loss: 20.6905460357666\r\nStep 32, loss: 20.343433380126953\r\nStep 33, loss: 20.149208068847656\r\nStep 34, loss: 20.130001068115234\r\nStep 35, loss: 19.854604721069336\r\nStep 36, loss: 19.82932472229004\r\nStep 37, loss: 19.715286254882812\r\nStep 38, loss: 19.43071174621582\r\nStep 39, loss: 19.190099716186523\r\nStep 40, loss: 19.299043655395508\r\nStep 41, loss: 19.005701065063477\r\nStep 42, loss: 19.012231826782227\r\nStep 43, loss: 18.8094539642334\r\nStep 44, loss: 18.803787231445312\r\nStep 45, loss: 18.627302169799805\r\nStep 46, loss: 18.533035278320312\r\nStep 47, loss: 18.443218231201172\r\nStep 48, loss: 18.240646362304688\r\nStep 49, loss: 18.08544921875\r\nStep 50, loss: 18.155040740966797\r\nStep 51, loss: 18.1156063079834\r\nStep 52, loss: 18.010835647583008\r\nStep 53, loss: 17.92870330810547\r\nStep 54, loss: 17.777223587036133\r\nStep 55, loss: 17.731197357177734\r\nStep 56, loss: 17.60744857788086\r\nStep 57, loss: 17.33075523376465\r\nStep 58, loss: 17.37189292907715\r\nStep 59, loss: 17.39516258239746\r\nStep 60, loss: 17.215341567993164\r\nStep 61, loss: 17.019044876098633\r\nStep 62, loss: 16.908300399780273\r\nStep 63, loss: 16.93767738342285\r\nStep 64, loss: 16.618053436279297\r\nStep 65, loss: 16.66626739501953\r\nStep 66, loss: 16.52454376220703\r\nStep 67, loss: 16.4086971282959\r\nStep 68, loss: 16.41490364074707\r\nStep 69, loss: 16.233192443847656\r\nStep 70, loss: 15.901132583618164\r\nStep 71, loss: 15.888304710388184\r\nStep 72, loss: 15.797022819519043\r\nStep 73, loss: 15.717949867248535\r\nStep 74, loss: 15.905962944030762\r\nStep 75, loss: 15.710003852844238\r\nStep 76, loss: 15.411543846130371\r\nStep 77, loss: 15.431794166564941\r\nStep 78, loss: 15.41840648651123\r\nStep 79, loss: 15.312957763671875\r\nStep 80, loss: 15.165640830993652\r\nStep 81, loss: 15.194469451904297\r\nStep 82, loss: 15.253994941711426\r\nStep 83, loss: 14.950176239013672\r\nStep 84, loss: 15.123098373413086\r\nStep 85, loss: 14.940669059753418\r\nStep 86, loss: 14.846564292907715\r\nStep 87, loss: 14.621622085571289\r\nStep 88, loss: 14.527816772460938\r\nStep 89, loss: 14.486361503601074\r\nStep 90, loss: 14.469058990478516\r\nStep 91, loss: 14.704410552978516\r\nStep 92, loss: 14.415651321411133\r\nStep 93, loss: 14.259278297424316\r\nStep 94, loss: 14.137989044189453\r\nStep 95, loss: 13.88222599029541\r\nStep 96, loss: 14.01948070526123\r\nStep 97, loss: 13.856632232666016\r\nStep 98, loss: 13.616273880004883\r\nStep 99, loss: 13.495616912841797\r\nStep 100, loss: 13.21973991394043\r\nStep 101, loss: 13.387418746948242\r\nStep 102, loss: 13.056963920593262\r\nStep 103, loss: 13.581015586853027\r\nStep 104, loss: 13.344913482666016\r\nStep 105, loss: 12.812665939331055\r\nStep 106, loss: 12.263603210449219\r\nStep 107, loss: 12.624507904052734\r\nStep 108, loss: 12.448198318481445\r\nStep 109, loss: 12.28633975982666\r\nStep 110, loss: 12.160036087036133\r\nStep 111, loss: 12.113042831420898\r\nStep 112, loss: 11.980876922607422\r\nStep 113, loss: 11.889405250549316\r\nStep 114, loss: 11.633279800415039\r\nStep 115, loss: 11.594181060791016\r\nStep 116, loss: 11.282090187072754\r\nStep 117, loss: 11.348597526550293\r\nStep 118, loss: 11.286100387573242\r\nStep 119, loss: 11.263978958129883\r\nStep 120, loss: 11.043057441711426\r\nStep 121, loss: 11.527467727661133\r\nStep 122, loss: 11.375204086303711\r\nStep 123, loss: 11.068164825439453\r\nStep 124, loss: 10.53603458404541\r\nStep 125, loss: 10.457991600036621\r\nStep 126, loss: 10.58580493927002\r\nStep 127, loss: 10.604734420776367\r\nStep 128, loss: 10.664406776428223\r\nStep 129, loss: 10.992063522338867\r\nStep 130, loss: 10.639398574829102\r\nStep 131, loss: 10.583847999572754\r\nStep 132, loss: 10.397967338562012\r\nStep 133, loss: 10.10909366607666\r\nStep 134, loss: 10.0088529586792\r\nStep 135, loss: 10.267728805541992\r\nStep 136, loss: 10.040645599365234\r\nStep 137, loss: 10.11013126373291\r\nStep 138, loss: 10.320940971374512\r\nStep 139, loss: 9.622902870178223\r\nStep 140, loss: 9.467279434204102\r\nStep 141, loss: 9.398171424865723\r\nStep 142, loss: 9.29957389831543\r\nStep 143, loss: 9.813874244689941\r\nStep 144, loss: 9.170883178710938\r\nStep 145, loss: 9.16419506072998\r\nStep 146, loss: 9.248353958129883\r\nStep 147, loss: 9.185125350952148\r\nStep 148, loss: 9.238696098327637\r\nStep 149, loss: 8.950000762939453\r\nStep 150, loss: 8.954133033752441\r\nStep 151, loss: 9.005464553833008\r\nStep 152, loss: 8.958908081054688\r\nStep 153, loss: 9.176563262939453\r\nStep 154, loss: 9.108915328979492\r\nStep 155, loss: 8.684147834777832\r\nStep 156, loss: 8.772013664245605\r\nStep 157, loss: 9.038022994995117\r\nStep 158, loss: 8.802122116088867\r\nStep 159, loss: 8.756733894348145\r\nStep 160, loss: 8.731100082397461\r\nStep 161, loss: 8.391814231872559\r\nStep 162, loss: 8.301539421081543\r\nStep 163, loss: 8.23969841003418\r\nStep 164, loss: 8.091272354125977\r\nStep 165, loss: 8.552835464477539\r\nStep 166, loss: 8.256057739257812\r\nStep 167, loss: 8.189535140991211\r\nStep 168, loss: 7.996348857879639\r\nStep 169, loss: 8.250320434570312\r\nStep 170, loss: 8.180702209472656\r\nStep 171, loss: 8.127228736877441\r\nStep 172, loss: 7.963306427001953\r\nStep 173, loss: 7.968501091003418\r\nStep 174, loss: 7.795525074005127\r\nStep 175, loss: 8.193002700805664\r\nStep 176, loss: 7.875818252563477\r\nStep 177, loss: 7.85451602935791\r\nStep 178, loss: 7.488790512084961\r\nStep 179, loss: 7.770381927490234\r\nStep 180, loss: 7.7291259765625\r\nStep 181, loss: 7.844414234161377\r\nStep 182, loss: 7.45212459564209\r\nStep 183, loss: 7.668381690979004\r\nStep 184, loss: 7.651785850524902\r\nStep 185, loss: 7.51216983795166\r\nStep 186, loss: 7.267374515533447\r\nStep 187, loss: 7.263881683349609\r\nStep 188, loss: 7.486958980560303\r\nStep 189, loss: 7.246147632598877\r\nStep 190, loss: 7.460647106170654\r\nStep 191, loss: 7.761508941650391\r\nStep 192, loss: 7.566250324249268\r\nStep 193, loss: 7.0336151123046875\r\nStep 194, loss: 7.259377479553223\r\nStep 195, loss: 7.042681694030762\r\nStep 196, loss: 7.074146747589111\r\nStep 197, loss: 7.113814353942871\r\nStep 198, loss: 6.973835468292236\r\nStep 199, loss: 7.288123607635498\r\nStep 200, loss: 6.95873498916626\r\nStep 201, loss: 7.105992794036865\r\nStep 202, loss: 7.1333746910095215\r\nStep 203, loss: 6.847956657409668\r\nStep 204, loss: 6.746923923492432\r\nStep 205, loss: 6.967642784118652\r\nStep 206, loss: 6.736564636230469\r\nStep 207, loss: 6.832950115203857\r\nStep 208, loss: 7.1226701736450195\r\nStep 209, loss: 7.433104515075684\r\nStep 210, loss: 7.321633815765381\r\nStep 211, loss: 7.312641143798828\r\nStep 212, loss: 7.144681453704834\r\nStep 213, loss: 7.056727886199951\r\nStep 214, loss: 6.913035869598389\r\nStep 215, loss: 7.270552158355713\r\nStep 216, loss: 6.888924598693848\r\nStep 217, loss: 6.686014652252197\r\nStep 218, loss: 6.850816249847412\r\nStep 219, loss: 7.136499404907227\r\nStep 220, loss: 7.07992696762085\r\nStep 221, loss: 7.151065349578857\r\nStep 222, loss: 7.051300048828125\r\nStep 223, loss: 7.2389750480651855\r\nStep 224, loss: 6.9565348625183105\r\nStep 225, loss: 6.872081756591797\r\nStep 226, loss: 7.034422874450684\r\nStep 227, loss: 6.866305351257324\r\nStep 228, loss: 7.02532434463501\r\nStep 229, loss: 7.398115634918213\r\nStep 230, loss: 6.955196380615234\r\nStep 231, loss: 7.1861066818237305\r\nStep 232, loss: 6.930147647857666\r\nStep 233, loss: 6.815465450286865\r\nStep 234, loss: 6.942207336425781\r\nStep 235, loss: 7.350676536560059\r\nStep 236, loss: 7.424262046813965\r\nStep 237, loss: 7.144227027893066\r\nStep 238, loss: 7.344553470611572\r\nStep 239, loss: 7.27285623550415\r\nStep 240, loss: 7.603146553039551\r\n",,terminal_output +743,2782164,"utils/nn.py",3223,0,"",python,selection_mouse +744,2829288,"utils/nn.py",0,0,"",python,tab +745,2829289,"utils/nn.py",2802,0,"",python,selection_mouse +746,2830543,"utils/nn.py",2786,0,"",python,selection_command +747,2831900,"utils/nn.py",4084,0,"",python,selection_mouse +748,2832651,"utils/nn.py",2763,0,"",python,selection_mouse +749,2833431,"utils/nn.py",2752,22,"CausalTransformerBlock",python,selection_mouse +750,2835082,"utils/nn.py",2763,0,"",python,selection_mouse +751,2836200,"utils/nn.py",2762,0,"",python,selection_mouse +752,2838150,"utils/nn.py",2755,0,"",python,selection_mouse +753,2841128,"utils/nn.py",4084,0,"",python,selection_mouse +754,2841363,"utils/nn.py",2751,1333," CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_mouse +755,2841364,"utils/nn.py",2750,1334,"s CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_mouse +756,2841364,"utils/nn.py",2748,1336,"ass CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_mouse +757,2841364,"utils/nn.py",2745,1339,"\nclass CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_mouse +758,2841369,"utils/nn.py",2751,1333," CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_command +759,2841369,"utils/nn.py",2745,1339,"\nclass CausalTransformerBlock(nn.Module):\n model_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # LayerNorm + Causal Self-Attention\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n seq_len = z.shape[1]\n # Causal mask: (1, 1, seq_len, seq_len)\n causal_mask = jnp.tril(jnp.ones((seq_len, seq_len), dtype=bool))\n jax.debug.breakpoint()\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n # Feedforward\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n jax.debug.breakpoint()\n z = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n jax.debug.breakpoint()\n z = nn.gelu(z)\n jax.debug.breakpoint()\n x = x + z\n jax.debug.breakpoint()\n\n return x\n",python,selection_mouse +760,2847365,"utils/nn.py",2745,1340,"",python,content +761,2849521,"utils/nn.py",2744,0,"\n ",python,content +762,2849595,"utils/nn.py",2745,8,"",python,content +763,2853301,"utils/nn.py",3939,0,"",python,selection_mouse +764,2854165,"utils/nn.py",4107,0,"",python,selection_mouse +765,2854186,"utils/nn.py",4106,0,"",python,selection_command +766,2854329,"utils/nn.py",4107,0,"",python,selection_mouse +767,2854337,"utils/nn.py",4106,0,"",python,selection_command +768,2854558,"utils/nn.py",4106,1,")",python,selection_mouse +769,2854559,"utils/nn.py",4106,0,"",python,selection_mouse +770,2854559,"utils/nn.py",4103,3,")(x",python,selection_mouse +771,2854560,"utils/nn.py",4063,43," # dtype=self.dtype,\n # )(x",python,selection_mouse +772,2854560,"utils/nn.py",4011,95," # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x",python,selection_mouse +773,2854567,"utils/nn.py",4107,0,"",python,selection_command +774,2854568,"utils/nn.py",4009,98," # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +775,2854612,"utils/nn.py",4006,101," # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +776,2854642,"utils/nn.py",3965,142," # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +777,2854684,"utils/nn.py",3921,186," # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +778,2854709,"utils/nn.py",3877,230," # model_dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +779,2854760,"utils/nn.py",3835,272," # x = CausalTransformerBlock(\n # model_dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)",python,selection_mouse +780,2855504,"utils/nn.py",3835,272,"",python,content +781,2857580,"utils/nn.py",3618,0,"",python,selection_mouse +782,3103106,"models/dynamics.py",0,0,"",python,tab +783,3106494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +784,3119223,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12972,0,"",python,selection_mouse +785,3119371,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12967,10,"log_images",python,selection_mouse +786,3120755,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12622,0,"",python,selection_mouse +787,3120898,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12621,7,"asarray",python,selection_mouse +788,3121532,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12619,0,"",python,selection_mouse +789,3121664,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12618,2,"np",python,selection_mouse +790,3121872,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12618,3,"np.",python,selection_mouse +791,3121872,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12618,10,"np.asarray",python,selection_mouse +792,3122379,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12626,0,"",python,selection_mouse +793,3122380,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12621,7,"asarray",python,selection_mouse +794,3122532,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12572,85," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n",python,selection_mouse +795,3123294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12626,0,"",python,selection_mouse +796,3123875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12607,0,"",python,selection_mouse +797,3124049,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12606,5,"wandb",python,selection_mouse +798,3124595,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12601,0,"",python,selection_mouse +799,3124738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12600,5,"image",python,selection_mouse +800,3124893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12572,85," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n",python,selection_mouse +801,3128412,"TERMINAL",0,0,"Step 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 481, loss: 4.526846885681152\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 481, loss: 4.526846885681152\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 481, loss: 4.526846885681152\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 481, loss: 4.526846885681152\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 481, loss: 4.526846885681152\r\nStep 481, loss: 4.526846885681152\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 481, loss: 4.526846885681152\r\nStep 241, loss: 7.379404067993164\r\nStep 242, loss: 7.04669713973999\r\nStep 243, loss: 7.223360061645508\r\nStep 244, loss: 7.122011661529541\r\nStep 245, loss: 6.9363813400268555\r\nStep 246, loss: 6.78315544128418\r\nStep 247, loss: 6.789515018463135\r\nStep 248, loss: 6.8189568519592285\r\nStep 249, loss: 6.757500171661377\r\nStep 250, loss: 6.645450115203857\r\nStep 251, loss: 6.835147857666016\r\nStep 252, loss: 7.028203964233398\r\nStep 253, loss: 6.857334136962891\r\nStep 254, loss: 6.791317462921143\r\nStep 255, loss: 7.043140888214111\r\nStep 256, loss: 7.165127277374268\r\nStep 257, loss: 6.969034671783447\r\nStep 258, loss: 7.095194339752197\r\nStep 259, loss: 7.076196193695068\r\nStep 260, loss: 7.128316402435303\r\nStep 261, loss: 6.900633811950684\r\nStep 262, loss: 6.888290882110596\r\nStep 263, loss: 6.734719753265381\r\nStep 264, loss: 6.55846643447876\r\nStep 265, loss: 6.635307312011719\r\nStep 266, loss: 6.654285907745361\r\nStep 267, loss: 6.610708236694336\r\nStep 268, loss: 6.6228346824646\r\nStep 269, loss: 6.684624195098877\r\nStep 270, loss: 6.578184604644775\r\nStep 271, loss: 6.8313798904418945\r\nStep 272, loss: 6.631593227386475\r\nStep 273, loss: 6.816318511962891\r\nStep 274, loss: 6.809188365936279\r\nStep 275, loss: 6.645437240600586\r\nStep 276, loss: 6.999457359313965\r\nStep 277, loss: 7.248863220214844\r\nStep 278, loss: 7.295817852020264\r\nStep 279, loss: 7.22598934173584\r\nStep 280, loss: 6.736786365509033\r\nStep 281, loss: 6.742730617523193\r\nStep 282, loss: 6.917891979217529\r\nStep 283, loss: 7.006328105926514\r\nStep 284, loss: 6.849727153778076\r\nStep 285, loss: 6.71075963973999\r\nStep 286, loss: 6.544010639190674\r\nStep 287, loss: 6.987166881561279\r\nStep 288, loss: 7.016972064971924\r\nStep 289, loss: 6.533799171447754\r\nStep 290, loss: 6.333832263946533\r\nStep 291, loss: 6.664097309112549\r\nStep 292, loss: 6.857154846191406\r\nStep 293, loss: 6.740937232971191\r\nStep 294, loss: 6.428022384643555\r\nStep 295, loss: 6.590887069702148\r\nStep 296, loss: 6.396702766418457\r\nStep 297, loss: 6.453038692474365\r\nStep 298, loss: 6.415592670440674\r\nStep 299, loss: 6.50046443939209\r\nStep 300, loss: 6.6542067527771\r\nStep 301, loss: 6.522833347320557\r\nStep 302, loss: 6.843961715698242\r\nStep 303, loss: 6.503088474273682\r\nStep 304, loss: 6.348232269287109\r\nStep 305, loss: 6.257919788360596\r\nStep 306, loss: 6.217457294464111\r\nStep 307, loss: 6.710884094238281\r\nStep 308, loss: 6.380974769592285\r\nStep 309, loss: 6.220357418060303\r\nStep 310, loss: 6.108943462371826\r\nStep 311, loss: 5.8724164962768555\r\nStep 312, loss: 5.9643707275390625\r\nStep 313, loss: 5.961871147155762\r\nStep 314, loss: 5.758096694946289\r\nStep 315, loss: 6.158802509307861\r\nStep 316, loss: 6.0735673904418945\r\nStep 317, loss: 5.8323073387146\r\nStep 318, loss: 6.177542209625244\r\nStep 319, loss: 6.067553520202637\r\nStep 320, loss: 6.038662910461426\r\nStep 321, loss: 6.32551908493042\r\nStep 322, loss: 6.123026371002197\r\nStep 323, loss: 6.1038031578063965\r\nStep 324, loss: 6.278502464294434\r\nStep 325, loss: 5.9980788230896\r\nStep 326, loss: 6.238011837005615\r\nStep 327, loss: 6.08726167678833\r\nStep 328, loss: 6.122094631195068\r\nStep 329, loss: 6.312901496887207\r\nStep 330, loss: 6.060505390167236\r\nStep 331, loss: 5.6425580978393555\r\nStep 332, loss: 6.180875301361084\r\nStep 333, loss: 6.137739181518555\r\nStep 334, loss: 5.982623100280762\r\nStep 335, loss: 6.006547451019287\r\nStep 336, loss: 5.891482830047607\r\nStep 337, loss: 6.1084723472595215\r\nStep 338, loss: 5.797332286834717\r\nStep 339, loss: 5.9624152183532715\r\nStep 340, loss: 6.293918609619141\r\nStep 341, loss: 5.97660493850708\r\nStep 342, loss: 5.8210835456848145\r\nStep 343, loss: 5.732250690460205\r\nStep 344, loss: 5.8108367919921875\r\nStep 345, loss: 5.393507480621338\r\nStep 346, loss: 5.4268798828125\r\nStep 347, loss: 5.90861177444458\r\nStep 348, loss: 5.9017534255981445\r\nStep 349, loss: 5.544797897338867\r\nStep 350, loss: 5.644172668457031\r\nStep 351, loss: 5.834247589111328\r\nStep 352, loss: 6.078111171722412\r\nStep 353, loss: 5.5367608070373535\r\nStep 354, loss: 6.098783016204834\r\nStep 355, loss: 6.018643856048584\r\nStep 356, loss: 5.954596519470215\r\nStep 357, loss: 6.137462615966797\r\nStep 358, loss: 5.710295677185059\r\nStep 359, loss: 5.741447925567627\r\nStep 360, loss: 5.361110687255859\r\nStep 361, loss: 5.926032066345215\r\nStep 362, loss: 5.800010681152344\r\nStep 363, loss: 5.716776371002197\r\nStep 364, loss: 5.779480934143066\r\nStep 365, loss: 5.552876949310303\r\nStep 366, loss: 5.847739219665527\r\nStep 367, loss: 5.747087478637695\r\nStep 368, loss: 6.04551362991333\r\nStep 369, loss: 5.895529270172119\r\nStep 370, loss: 5.590581893920898\r\nStep 371, loss: 5.84631872177124\r\nStep 372, loss: 5.53010368347168\r\nStep 373, loss: 5.684707164764404\r\nStep 374, loss: 5.96873664855957\r\nStep 375, loss: 5.770754814147949\r\nStep 376, loss: 5.8693976402282715\r\nStep 377, loss: 6.002660274505615\r\nStep 378, loss: 5.831007957458496\r\nStep 379, loss: 5.539312362670898\r\nStep 380, loss: 5.310413837432861\r\nStep 381, loss: 5.491917610168457\r\nStep 382, loss: 5.797224044799805\r\nStep 383, loss: 5.502389907836914\r\nStep 384, loss: 5.429259777069092\r\nStep 385, loss: 5.718020439147949\r\nStep 386, loss: 5.498144626617432\r\nStep 387, loss: 4.996325492858887\r\nStep 388, loss: 5.29017448425293\r\nStep 389, loss: 5.036089897155762\r\nStep 390, loss: 5.375723361968994\r\nStep 391, loss: 5.41914701461792\r\nStep 392, loss: 5.57274866104126\r\nStep 393, loss: 5.621382713317871\r\nStep 394, loss: 5.528846740722656\r\nStep 395, loss: 5.220247268676758\r\nStep 396, loss: 5.568932056427002\r\nStep 397, loss: 5.186644077301025\r\nStep 398, loss: 5.383299350738525\r\nStep 399, loss: 5.2024078369140625\r\nStep 400, loss: 5.268125534057617\r\nStep 401, loss: 5.126521587371826\r\nStep 402, loss: 5.441715717315674\r\nStep 403, loss: 4.971039772033691\r\nStep 404, loss: 4.975629806518555\r\nStep 405, loss: 5.007051467895508\r\nStep 406, loss: 5.2241597175598145\r\nStep 407, loss: 5.050140380859375\r\nStep 408, loss: 5.0859479904174805\r\nStep 409, loss: 5.251633644104004\r\nStep 410, loss: 5.043816089630127\r\nStep 411, loss: 5.1554694175720215\r\nStep 412, loss: 5.479846000671387\r\nStep 413, loss: 5.171293258666992\r\nStep 414, loss: 5.044966697692871\r\nStep 415, loss: 4.938406467437744\r\nStep 416, loss: 4.896639823913574\r\nStep 417, loss: 4.654478549957275\r\nStep 418, loss: 5.08234977722168\r\nStep 419, loss: 5.110918045043945\r\nStep 420, loss: 5.158133506774902\r\nStep 421, loss: 5.083363056182861\r\nStep 422, loss: 4.816728591918945\r\nStep 423, loss: 5.111112117767334\r\nStep 424, loss: 4.970766067504883\r\nStep 425, loss: 5.160951137542725\r\nStep 426, loss: 4.560585021972656\r\nStep 427, loss: 4.5675554275512695\r\nStep 428, loss: 4.727843284606934\r\nStep 429, loss: 4.88224983215332\r\nStep 430, loss: 5.060454845428467\r\nStep 431, loss: 4.964059829711914\r\nStep 432, loss: 5.124404430389404\r\nStep 433, loss: 4.908259391784668\r\nStep 434, loss: 5.03548526763916\r\nStep 435, loss: 4.721578121185303\r\nStep 436, loss: 4.8141913414001465\r\nStep 437, loss: 4.8371782302856445\r\nStep 438, loss: 5.0658063888549805\r\nStep 439, loss: 4.997641086578369\r\nStep 440, loss: 4.761614799499512\r\nStep 441, loss: 5.140580654144287\r\nStep 442, loss: 4.914675235748291\r\nStep 443, loss: 4.541126728057861\r\nStep 444, loss: 4.878077983856201\r\nStep 445, loss: 4.9191365242004395\r\nStep 446, loss: 4.62006139755249\r\nStep 447, loss: 4.408695697784424\r\nStep 448, loss: 4.690447807312012\r\nStep 449, loss: 4.7643327713012695\r\nStep 450, loss: 4.939113140106201\r\nStep 451, loss: 4.701648235321045\r\nStep 452, loss: 4.490898132324219\r\nStep 453, loss: 4.7736101150512695\r\nStep 454, loss: 4.64280366897583\r\nStep 455, loss: 4.667891025543213\r\nStep 456, loss: 4.512481689453125\r\nStep 457, loss: 4.70444917678833\r\nStep 458, loss: 4.716007232666016\r\nStep 459, loss: 4.634371757507324\r\nStep 460, loss: 4.296895503997803\r\nStep 461, loss: 4.487729072570801\r\nStep 462, loss: 4.345639705657959\r\nStep 463, loss: 4.462018966674805\r\nStep 464, loss: 4.95395565032959\r\nStep 465, loss: 4.456920146942139\r\nStep 466, loss: 4.7270612716674805\r\nStep 467, loss: 4.68767786026001\r\nStep 468, loss: 4.089179515838623\r\nStep 469, loss: 4.443053722381592\r\nStep 470, loss: 4.601764678955078\r\nStep 471, loss: 4.1854753494262695\r\nStep 472, loss: 4.367209434509277\r\nStep 473, loss: 4.561801910400391\r\nStep 474, loss: 4.419254302978516\r\nStep 475, loss: 4.415527820587158\r\nStep 476, loss: 4.368729114532471\r\nStep 477, loss: 4.194905757904053\r\nStep 478, loss: 4.435650825500488\r\nStep 479, loss: 4.502296447753906\r\nStep 480, loss: 4.542525291442871\r\nStep 481, loss: 4.526846885681152\r\n",,terminal_output +802,3130426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12868,0,"",python,selection_mouse +803,3130546,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12868,5,"uint8",python,selection_mouse +804,3131290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12871,0,"",python,selection_mouse +805,3131290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12868,5,"uint8",python,selection_mouse +806,3132002,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,0,"",python,selection_mouse +807,3132148,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,2,"np",python,selection_mouse +808,3132402,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,3,"np.",python,selection_mouse +809,3132455,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,8,"np.uint8",python,selection_mouse +810,3133421,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12869,0,"",python,selection_mouse +811,3133422,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12868,5,"uint8",python,selection_mouse +812,3133827,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12867,6,".uint8",python,selection_mouse +813,3133828,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,8,"np.uint8",python,selection_mouse +814,3134270,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12865,0,"",python,selection_mouse +815,3134809,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12853,0,"",python,selection_mouse +816,3134969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12843,14,"comparison_seq",python,selection_mouse +817,3156437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12838,0,"",python,selection_mouse +818,3158076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12444,0,"",python,selection_mouse +819,3164399,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12444,1,"-",python,content +820,3165179,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12445,0,"",python,selection_command +821,3166297,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12445,1,">",python,content +822,3197085,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12567,0,"",python,selection_mouse +823,3197499,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12626,0,"",python,selection_mouse +824,3197669,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12621,7,"asarray",python,selection_mouse +825,3199465,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12632,0,"",python,selection_mouse +826,3199619,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12629,6,"gt_seq",python,selection_mouse +827,3212111,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12094,0,"",python,selection_mouse +828,3212165,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12093,0,"",python,selection_command +829,3667530,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12388,0,"",python,selection_mouse +830,3667550,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12387,0,"",python,selection_command +831,3668296,"models/dynamics.py",0,0,"",python,tab +832,3668297,"models/dynamics.py",2329,0,"",python,selection_mouse +833,3668820,"models/dynamics.py",2393,0,"",python,selection_mouse +834,3669361,"models/dynamics.py",2392,0,"",python,selection_command +835,3674614,"models/dynamics.py",2181,0,"",python,selection_mouse +836,3675006,"utils/nn.py",0,0,"",python,tab +837,3698167,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +838,3699853,"utils/nn.py",0,0,"",python,tab +839,3704857,"utils/nn.py",3621,0,"",python,selection_mouse +840,3704876,"utils/nn.py",3620,0,"",python,selection_command +841,3715890,"utils/nn.py",0,0,"",python,tab +842,3715891,"utils/nn.py",5771,0,"",python,selection_mouse +843,3716001,"utils/nn.py",5766,7,"out_dim",python,selection_mouse +844,3716643,"utils/nn.py",5751,0,"",python,selection_mouse +845,3825303,"utils/nn.py",0,0,"",python,tab +846,3825304,"utils/nn.py",3657,0,"",python,selection_mouse +847,3825339,"utils/nn.py",3656,0,"",python,selection_command +848,3890403,"utils/nn.py",3518,0,"",python,selection_mouse +849,3890414,"utils/nn.py",3517,0,"",python,selection_command +850,3891049,"utils/nn.py",3518,0,"",python,selection_mouse +851,3891055,"utils/nn.py",3517,0,"",python,selection_command +852,3891641,"utils/nn.py",3621,0,"",python,selection_mouse +853,3891643,"utils/nn.py",3620,0,"",python,selection_command +854,3892491,"utils/nn.py",3505,0,"",python,selection_mouse +855,3892502,"utils/nn.py",3504,0,"",python,selection_command +856,3893132,"utils/nn.py",3518,0,"",python,selection_mouse +857,3893153,"utils/nn.py",3517,0,"",python,selection_command +858,3894515,"utils/nn.py",3834,0,"",python,selection_mouse +859,3894520,"utils/nn.py",3833,0,"",python,selection_command +860,3895468,"utils/nn.py",3996,0,"",python,selection_mouse +861,3895487,"utils/nn.py",3995,0,"",python,selection_command +862,3896481,"utils/nn.py",3817,0,"",python,selection_mouse +863,3896482,"utils/nn.py",3816,0,"",python,selection_command +864,3897132,"utils/nn.py",3834,0,"",python,selection_mouse +865,3897150,"utils/nn.py",3833,0,"",python,selection_command +866,3898039,"utils/nn.py",4026,0,"",python,selection_mouse +867,3898041,"utils/nn.py",4025,0,"",python,selection_command +868,3898734,"utils/nn.py",3996,0,"",python,selection_mouse +869,3898737,"utils/nn.py",3995,0,"",python,selection_command +870,3899979,"utils/nn.py",3835,0,"",python,selection_mouse +871,3900653,"utils/nn.py",3834,0,"",python,selection_mouse +872,3900654,"utils/nn.py",3833,0,"",python,selection_command +873,4149741,"TERMINAL",0,0,"Step 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 482, loss: 4.3853254318237305\r\nStep 483, loss: 4.474046230316162\r\nStep 484, loss: 4.242269515991211\r\nStep 485, loss: 4.255805969238281\r\nStep 486, loss: 4.194794654846191\r\nStep 487, loss: 4.244462013244629\r\nStep 488, loss: 4.268708229064941\r\nStep 489, loss: 3.969525098800659\r\nStep 490, loss: 4.171164512634277\r\nStep 491, loss: 4.380814075469971\r\nStep 492, loss: 4.288428783416748\r\nStep 493, loss: 4.365719795227051\r\nStep 494, loss: 4.464323997497559\r\nStep 495, loss: 4.350237846374512\r\nStep 496, loss: 4.316108703613281\r\nStep 497, loss: 4.205101490020752\r\nStep 498, loss: 4.068417549133301\r\nStep 499, loss: 4.244319915771484\r\nStep 500, loss: 4.047806262969971\r\nStep 501, loss: 3.975944757461548\r\nStep 502, loss: 4.43609619140625\r\nStep 503, loss: 4.468508720397949\r\nStep 504, loss: 4.048294544219971\r\nStep 505, loss: 3.994170904159546\r\nStep 506, loss: 4.321256637573242\r\nStep 507, loss: 4.043905258178711\r\nStep 508, loss: 4.4178900718688965\r\nStep 509, loss: 4.065129280090332\r\nStep 510, loss: 4.241158485412598\r\nStep 511, loss: 3.9915621280670166\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 512, loss: 3.9075660705566406\r\nStep 513, loss: 3.8637518882751465\r\nStep 514, loss: 3.8473100662231445\r\nStep 515, loss: 4.2350029945373535\r\nStep 516, loss: 3.786646604537964\r\nStep 517, loss: 3.9079995155334473\r\nStep 518, loss: 3.9554286003112793\r\nStep 519, loss: 3.867163896560669\r\nStep 520, loss: 3.8811142444610596\r\nStep 521, loss: 3.914848566055298\r\nStep 522, loss: 4.0971574783325195\r\nStep 523, loss: 3.88108491897583\r\nStep 524, loss: 3.814976930618286\r\nStep 525, loss: 3.887089967727661\r\nStep 526, loss: 4.043391227722168\r\nStep 527, loss: 3.730632781982422\r\nStep 528, loss: 3.6962013244628906\r\nStep 529, loss: 3.8378422260284424\r\nStep 530, loss: 3.835442543029785\r\nStep 531, loss: 3.5887434482574463\r\nStep 532, loss: 4.028667449951172\r\nStep 533, loss: 3.6937999725341797\r\nStep 534, loss: 4.025508403778076\r\nStep 535, loss: 3.7450599670410156\r\nStep 536, loss: 3.7843148708343506\r\nStep 537, loss: 3.6222667694091797\r\nStep 538, loss: 3.6854655742645264\r\nStep 539, loss: 3.70296311378479\r\nStep 540, loss: 3.2827460765838623\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 541, loss: 3.7216079235076904\r\nStep 542, loss: 3.5700554847717285\r\nStep 543, loss: 3.8967983722686768\r\nStep 544, loss: 3.790783643722534\r\nStep 545, loss: 3.584078550338745\r\nStep 546, loss: 3.5597901344299316\r\nStep 547, loss: 3.646446943283081\r\nStep 548, loss: 3.711361885070801\r\nStep 549, loss: 3.7877070903778076\r\nStep 550, loss: 3.465378522872925\r\nStep 551, loss: 3.707456588745117\r\nStep 552, loss: 3.5275001525878906\r\nStep 553, loss: 3.353496551513672\r\nStep 554, loss: 3.4623632431030273\r\nStep 555, loss: 3.475719451904297\r\nStep 556, loss: 3.495713949203491\r\nStep 557, loss: 3.4770467281341553\r\nStep 558, loss: 3.152740478515625\r\nStep 559, loss: 3.252735137939453\r\nStep 560, loss: 3.3104660511016846\r\nStep 561, loss: 3.5264663696289062\r\nStep 562, loss: 3.4836809635162354\r\nStep 563, loss: 3.15254807472229\r\nStep 564, loss: 3.3043899536132812\r\nStep 565, loss: 3.50072979927063\r\nStep 566, loss: 3.4052367210388184\r\nStep 567, loss: 3.131976842880249\r\nStep 568, loss: 3.2124147415161133\r\nStep 569, loss: 3.3213326930999756\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 570, loss: 3.091555595397949\r\nStep 571, loss: 3.2759578227996826\r\nStep 572, loss: 3.418632745742798\r\nStep 573, loss: 3.1149401664733887\r\nStep 574, loss: 3.0675806999206543\r\nStep 575, loss: 2.9732444286346436\r\nStep 576, loss: 3.1387381553649902\r\nStep 577, loss: 3.2389020919799805\r\nStep 578, loss: 3.1159331798553467\r\nStep 579, loss: 2.9998369216918945\r\nStep 580, loss: 3.088414430618286\r\nStep 581, loss: 2.9799458980560303\r\nStep 582, loss: 3.36187744140625\r\nStep 583, loss: 2.9302356243133545\r\nStep 584, loss: 3.3849270343780518\r\nStep 585, loss: 3.2034754753112793\r\nStep 586, loss: 2.928591728210449\r\nStep 587, loss: 3.0352797508239746\r\nStep 588, loss: 3.0135860443115234\r\nStep 589, loss: 3.0202817916870117\r\nStep 590, loss: 2.826688289642334\r\nStep 591, loss: 2.6751976013183594\r\nStep 592, loss: 3.218351364135742\r\nStep 593, loss: 3.181223154067993\r\nStep 594, loss: 3.014542818069458\r\nStep 595, loss: 2.643012762069702\r\nStep 596, loss: 2.905032157897949\r\nStep 597, loss: 2.6379575729370117\r\nStep 598, loss: 3.007197380065918\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 599, loss: 2.896688938140869\r\nStep 600, loss: 2.944932460784912\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 601, loss: 2.9037160873413086\r\nStep 602, loss: 2.7188162803649902\r\nStep 603, loss: 2.781883716583252\r\nStep 604, loss: 2.9003241062164307\r\nStep 605, loss: 2.796058416366577\r\nStep 606, loss: 2.7689990997314453\r\nStep 607, loss: 2.46022629737854\r\nStep 608, loss: 2.457448959350586\r\nStep 609, loss: 2.344287633895874\r\nStep 610, loss: 2.6712772846221924\r\nStep 611, loss: 2.4444828033447266\r\nStep 612, loss: 2.5832462310791016\r\nStep 613, loss: 2.4971086978912354\r\nStep 614, loss: 2.454069137573242\r\nStep 615, loss: 2.696230411529541\r\nStep 616, loss: 2.6572659015655518\r\nStep 617, loss: 2.6498055458068848\r\nStep 618, loss: 2.5998013019561768\r\nStep 619, loss: 2.3630194664001465\r\nStep 620, loss: 2.4266152381896973\r\nStep 621, loss: 2.4623427391052246\r\nStep 622, loss: 2.321441888809204\r\nStep 623, loss: 2.4584178924560547\r\nStep 624, loss: 2.482469320297241\r\nStep 625, loss: 2.2834560871124268\r\nStep 626, loss: 2.3113930225372314\r\nStep 627, loss: 2.4526360034942627\r\nStep 628, loss: 2.2880873680114746\r\nStep 629, loss: 2.4126415252685547\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 630, loss: 2.3625783920288086\r\nStep 631, loss: 2.3061347007751465\r\nStep 632, loss: 2.405839204788208\r\nStep 633, loss: 2.464611291885376\r\nStep 634, loss: 2.31024169921875\r\nStep 635, loss: 2.0871760845184326\r\nStep 636, loss: 2.0073516368865967\r\nStep 637, loss: 2.0476179122924805\r\nStep 638, loss: 2.038602352142334\r\nStep 639, loss: 2.2984771728515625\r\nStep 640, loss: 2.206892967224121\r\nStep 641, loss: 2.29052734375\r\nStep 642, loss: 2.1445133686065674\r\nStep 643, loss: 2.1084718704223633\r\nStep 644, loss: 2.023301362991333\r\nStep 645, loss: 2.015864372253418\r\nStep 646, loss: 1.9396090507507324\r\nStep 647, loss: 1.9555383920669556\r\nStep 648, loss: 2.0622682571411133\r\nStep 649, loss: 1.9497393369674683\r\nStep 650, loss: 2.119673252105713\r\nStep 651, loss: 2.160611391067505\r\nStep 652, loss: 2.1908388137817383\r\nStep 653, loss: 1.8586710691452026\r\nStep 654, loss: 1.915079951286316\r\nStep 655, loss: 1.9191725254058838\r\nStep 656, loss: 1.8408746719360352\r\nStep 657, loss: 1.9316825866699219\r\nStep 658, loss: 1.6584789752960205\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\nStep 659, loss: 1.934404730796814\r\nStep 660, loss: 1.9520847797393799\r\nStep 661, loss: 1.6461926698684692\r\nStep 662, loss: 1.8641172647476196\r\nStep 663, loss: 2.0075435638427734\r\nStep 664, loss: 1.793962836265564\r\nStep 665, loss: 1.6933248043060303\r\nStep 666, loss: 1.562666654586792\r\nStep 667, loss: 1.7256025075912476\r\nStep 668, loss: 1.7440224885940552\r\nStep 669, loss: 1.8067965507507324\r\nStep 670, loss: 1.9205952882766724\r\nStep 671, loss: 1.7904599905014038\r\nStep 672, loss: 1.5283936262130737\r\nStep 673, loss: 1.7493884563446045\r\nStep 674, loss: 1.6520557403564453\r\nStep 675, loss: 1.7281827926635742\r\nStep 676, loss: 1.7933940887451172\r\nStep 677, loss: 1.6730395555496216\r\nStep 678, loss: 1.801688551902771\r\nStep 679, loss: 1.5686649084091187\r\nStep 680, loss: 1.623876690864563\r\nStep 681, loss: 1.581923246383667\r\nStep 682, loss: 1.5212339162826538\r\nStep 683, loss: 1.6720107793807983\r\nStep 684, loss: 1.5798733234405518\r\nStep 685, loss: 1.3882195949554443\r\nStep 686, loss: 1.5178817510604858\r\nStep 687, loss: 1.5441278219223022\r\nStep 688, loss: 1.4192278385162354\r\nStep 689, loss: 1.4919184446334839\r\nStep 690, loss: 1.4075671434402466\r\nStep 691, loss: 1.3063969612121582\r\nStep 692, loss: 1.600636601448059\r\nStep 693, loss: 1.2134226560592651\r\nStep 694, loss: 1.4430506229400635\r\nStep 695, loss: 1.3199187517166138\r\nStep 696, loss: 1.379400372505188\r\nStep 697, loss: 1.4107956886291504\r\nStep 698, loss: 1.2786378860473633\r\nStep 699, loss: 1.2791872024536133\r\nStep 700, loss: 1.2918277978897095\r\nStep 701, loss: 1.3163681030273438\r\nStep 702, loss: 1.2354602813720703\r\nStep 703, loss: 1.2271771430969238\r\nStep 704, loss: 1.2237366437911987\r\nStep 705, loss: 1.256729245185852\r\nStep 706, loss: 1.2308158874511719\r\nStep 707, loss: 1.253267765045166\r\nStep 708, loss: 1.110314965248108\r\nStep 709, loss: 1.0858540534973145\r\nStep 710, loss: 1.1923928260803223\r\nStep 711, loss: 1.2343688011169434\r\nStep 712, loss: 1.1243330240249634\r\nStep 713, loss: 1.1487013101577759\r\nStep 714, loss: 1.1245248317718506\r\nStep 715, loss: 1.0461413860321045\r\nStep 716, loss: 1.1010693311691284\r\nStep 717, loss: 1.0119333267211914\r\nStep 718, loss: 0.9026587605476379\r\n",,terminal_output +874,4382537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +875,4391974,"utils/nn.py",0,0,"",python,tab +876,4395457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +877,4396373,"utils/nn.py",0,0,"",python,tab +878,4400805,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +879,4409415,"utils/nn.py",0,0,"",python,tab +880,4409416,"utils/nn.py",6350,0,"",python,selection_mouse +881,4409534,"utils/nn.py",6349,0,"",python,selection_command +882,4412172,"TERMINAL",0,0,"bash",,terminal_focus +883,4422633,"utils/nn.py",6833,0,"",python,selection_mouse +884,4422636,"utils/nn.py",6832,0,"",python,selection_command +885,4423324,"utils/nn.py",6761,0,"",python,selection_mouse +886,4423350,"utils/nn.py",6760,0,"",python,selection_command +887,4423963,"utils/nn.py",6833,0,"",python,selection_mouse +888,4423970,"utils/nn.py",6832,0,"",python,selection_command +889,4424467,"utils/nn.py",6846,0,"",python,selection_mouse +890,4424495,"utils/nn.py",6845,0,"",python,selection_command +891,4425149,"utils/nn.py",6761,0,"",python,selection_mouse +892,4425156,"utils/nn.py",6760,0,"",python,selection_command +893,4425657,"utils/nn.py",6735,0,"",python,selection_mouse +894,4425659,"utils/nn.py",6734,0,"",python,selection_command +895,4426503,"utils/nn.py",6846,0,"",python,selection_mouse +896,4426506,"utils/nn.py",6845,0,"",python,selection_command +897,4445807,"utils/nn.py",6876,0,"",python,selection_mouse +898,4445844,"utils/nn.py",6875,0,"",python,selection_command +899,4446291,"utils/nn.py",6833,0,"",python,selection_mouse +900,4446293,"utils/nn.py",6832,0,"",python,selection_command +901,4447007,"utils/nn.py",6833,0,"",python,selection_mouse +902,4447016,"utils/nn.py",6832,0,"",python,selection_command +903,4447643,"utils/nn.py",6846,0,"",python,selection_mouse +904,4447662,"utils/nn.py",6845,0,"",python,selection_command +905,4461094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +906,4461095,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",13157,0,"",python,selection_mouse +907,4461124,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",13156,0,"",python,selection_command +908,4461658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12906,0,"",python,selection_mouse +909,4461694,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12905,0,"",python,selection_command +910,4462357,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12875,0,"",python,selection_mouse +911,4462380,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",12874,0,"",python,selection_command +912,4534356,"utils/nn.py",0,0,"",python,tab +913,4534358,"utils/nn.py",2004,0,"",python,selection_mouse +914,4534452,"utils/nn.py",1993,18,"MultiHeadAttention",python,selection_mouse +915,4539076,"utils/nn.py",1471,0,"",python,selection_mouse +916,4540885,"utils/nn.py",1466,0,"",python,selection_mouse +917,4543493,"utils/nn.py",1675,0,"",python,selection_mouse +918,4544682,"utils/nn.py",1674,0,"",python,selection_mouse +919,4579996,"utils/nn.py",2245,0,"",python,selection_mouse +920,4580017,"utils/nn.py",2244,0,"",python,selection_command +921,4581024,"utils/nn.py",2270,0,"",python,selection_mouse +922,4581576,"utils/nn.py",2262,0,"",python,selection_mouse +923,4581759,"utils/nn.py",2260,8,"swapaxes",python,selection_mouse +924,4582401,"utils/nn.py",2258,0,"",python,selection_mouse +925,4582531,"utils/nn.py",2258,1,"x",python,selection_mouse +926,4583078,"utils/nn.py",2275,0,"",python,selection_mouse +927,4583761,"utils/nn.py",2265,0,"",python,selection_mouse +928,4583905,"utils/nn.py",2260,8,"swapaxes",python,selection_mouse +929,4585357,"TERMINAL",0,0,"srun",,terminal_focus +930,4596403,"utils/nn.py",0,0,"",python,tab +931,4597811,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +932,4604688,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3233,0,"",python,selection_mouse +933,4605267,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3138,0,"",python,selection_mouse +934,4606610,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3057,0,"",python,selection_mouse +935,4612350,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3165,0,"",python,selection_mouse +936,4613993,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,0,"",python,selection_mouse +937,4614192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3126,5,"recon",python,selection_mouse +938,4632057,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2995,0,"",python,selection_mouse +939,4632199,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2989,12,"select_probs",python,selection_mouse +940,4638116,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2957,0,"",python,selection_mouse +941,4639897,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2704,0,"",python,selection_mouse +942,4639984,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2699,7,"ce_loss",python,selection_mouse +943,4642807,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2772,0,"",python,selection_mouse +944,4643128,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2766,7,"outputs",python,selection_mouse +945,4644155,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2794,0,"",python,selection_mouse +946,4644355,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2791,7,"outputs",python,selection_mouse +947,4706812,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2415,0,"",python,selection_mouse +948,4707301,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3039,0,"",python,selection_mouse +949,4707968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2884,0,"",python,selection_mouse +950,4708539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3061,0,"",python,selection_mouse +951,4709245,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3053,0,"",python,selection_mouse +952,4709971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3588,0,"",python,selection_mouse +953,4709974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3587,0,"",python,selection_command +954,4738488,"utils/nn.py",0,0,"",python,tab +955,4738489,"utils/nn.py",2196,0,"",python,selection_mouse +956,4738545,"utils/nn.py",2195,0,"",python,selection_command +957,4739183,"utils/nn.py",2225,0,"",python,selection_command +958,4739356,"utils/nn.py",2244,0,"",python,selection_command +959,4739523,"utils/nn.py",2273,0,"",python,selection_command +960,4739669,"utils/nn.py",2275,0,"",python,selection_command +961,4739879,"utils/nn.py",2304,0,"",python,selection_command +962,4740024,"utils/nn.py",2330,0,"",python,selection_command +963,4740193,"utils/nn.py",2360,0,"",python,selection_command +964,4740393,"utils/nn.py",2402,0,"",python,selection_command +965,4740535,"utils/nn.py",2415,0,"",python,selection_command +966,4740693,"utils/nn.py",2445,0,"",python,selection_command +967,4740853,"utils/nn.py",2577,0,"",python,selection_command +968,4741013,"utils/nn.py",2599,0,"",python,selection_command +969,4741251,"utils/nn.py",2629,0,"",python,selection_command +970,4741577,"utils/nn.py",2671,0,"",python,selection_command +971,4742124,"utils/nn.py",2684,0,"",python,selection_command +972,4742125,"utils/nn.py",2707,0,"",python,selection_command +973,4742241,"utils/nn.py",2725,0,"",python,selection_command +974,4742267,"utils/nn.py",2727,0,"",python,selection_command +975,4742267,"utils/nn.py",2743,0,"",python,selection_command +976,4742268,"utils/nn.py",2745,0,"",python,selection_command +977,4742474,"utils/nn.py",2774,0,"",python,selection_command +978,4742856,"utils/nn.py",2799,0,"",python,selection_command +979,4743186,"utils/nn.py",2816,0,"",python,selection_command +980,4743715,"utils/nn.py",2836,0,"",python,selection_command +981,4743766,"utils/nn.py",2855,0,"",python,selection_command +982,4743808,"utils/nn.py",2874,0,"",python,selection_command +983,4743809,"utils/nn.py",2901,0,"",python,selection_command +984,4743823,"utils/nn.py",2922,0,"",python,selection_command +985,4743866,"utils/nn.py",2924,0,"",python,selection_command +986,4743982,"utils/nn.py",2939,0,"",python,selection_command +987,4743983,"utils/nn.py",2969,0,"",python,selection_command +988,4743995,"utils/nn.py",3020,0,"",python,selection_command +989,4743995,"utils/nn.py",3062,0,"",python,selection_command +990,4744219,"utils/nn.py",3076,0,"",python,selection_command +991,4744467,"utils/nn.py",3106,0,"",python,selection_command +992,4749371,"genie.py",0,0,"",python,tab +993,4752555,"genie.py",2334,0,"",python,selection_mouse +994,4753122,"genie.py",2297,0,"",python,selection_mouse +995,4753125,"genie.py",2296,0,"",python,selection_command +996,4753790,"genie.py",1995,0,"",python,selection_mouse +997,4753959,"genie.py",1985,22,"DynamicsAutoregressive",python,selection_mouse +998,4754901,"genie.py",1988,0,"",python,selection_mouse +999,4754902,"genie.py",1985,22,"DynamicsAutoregressive",python,selection_mouse +1000,4755571,"genie.py",1988,0,"",python,selection_mouse +1001,4756248,"genie.py",1985,22,"DynamicsAutoregressive",python,selection_mouse +1002,4757202,"genie.py",1988,0,"",python,selection_mouse +1003,4759328,"genie.py",1985,22,"DynamicsAutoregressive",python,selection_mouse +1004,4760185,"genie.py",1992,0,"",python,selection_mouse +1005,4761509,"models/dynamics.py",0,0,"",python,tab +1006,4882969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +1007,4882970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2838,0,"",python,selection_mouse +1008,4883544,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2845,0,"",python,selection_mouse +1009,4883660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2843,7,"ce_loss",python,selection_mouse +1010,4885076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2820,0,"",python,selection_mouse +1011,4885101,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2819,0,"",python,selection_command +1012,4886274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2636,0,"",python,selection_mouse +1013,4886431,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2634,12,"token_logits",python,selection_mouse +1014,4943126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4179,0,"",python,selection_mouse +1015,4944603,"models/dynamics.py",0,0,"",python,tab +1016,4944604,"models/dynamics.py",3032,0,"",python,selection_mouse +1017,4948591,"models/dynamics.py",3032,0,"#",python,content +1018,4948593,"models/dynamics.py",3033,0,"",python,selection_keyboard +1019,4948942,"models/dynamics.py",3033,0," ",python,content +1020,4948943,"models/dynamics.py",3034,0,"",python,selection_keyboard +1021,4949483,"models/dynamics.py",3034,0,"a",python,content +1022,4949484,"models/dynamics.py",3035,0,"",python,selection_keyboard +1023,4949606,"models/dynamics.py",3035,0," ",python,content +1024,4949607,"models/dynamics.py",3036,0,"",python,selection_keyboard +1025,4949905,"models/dynamics.py",3036,0,"b",python,content +1026,4949906,"models/dynamics.py",3037,0,"",python,selection_keyboard +1027,4950075,"models/dynamics.py",3037,0," ",python,content +1028,4950076,"models/dynamics.py",3038,0,"",python,selection_keyboard +1029,4950371,"models/dynamics.py",3038,0,"c",python,content +1030,4950372,"models/dynamics.py",3039,0,"",python,selection_keyboard +1031,4950558,"models/dynamics.py",3039,0," ",python,content +1032,4950559,"models/dynamics.py",3040,0,"",python,selection_keyboard +1033,4950908,"models/dynamics.py",3040,0,"f",python,content +1034,4950909,"models/dynamics.py",3041,0,"",python,selection_keyboard +1035,4951473,"models/dynamics.py",3040,1,"",python,content +1036,4951612,"models/dynamics.py",3040,0,"d",python,content +1037,4951614,"models/dynamics.py",3041,0,"",python,selection_keyboard +1038,4951888,"models/dynamics.py",3041,0,"\n",python,content +1039,4952729,"models/dynamics.py",3042,0,"#",python,content +1040,4952730,"models/dynamics.py",3043,0,"",python,selection_keyboard +1041,4953091,"models/dynamics.py",3043,0," ",python,content +1042,4953093,"models/dynamics.py",3044,0,"",python,selection_keyboard +1043,4954398,"models/dynamics.py",3044,0,"b",python,content +1044,4954400,"models/dynamics.py",3045,0,"",python,selection_keyboard +1045,4954543,"models/dynamics.py",3045,0," ",python,content +1046,4954545,"models/dynamics.py",3046,0,"",python,selection_keyboard +1047,4955907,"models/dynamics.py",3045,1,"",python,content +1048,4956015,"models/dynamics.py",3044,1,"",python,content +1049,4957824,"models/dynamics.py",3044,0,"b",python,content +1050,4957826,"models/dynamics.py",3045,0,"",python,selection_keyboard +1051,4957905,"models/dynamics.py",3045,0," ",python,content +1052,4957905,"models/dynamics.py",3046,0,"",python,selection_keyboard +1053,4958337,"models/dynamics.py",3046,0,"c",python,content +1054,4958338,"models/dynamics.py",3047,0,"",python,selection_keyboard +1055,4958426,"models/dynamics.py",3047,0," ",python,content +1056,4958428,"models/dynamics.py",3048,0,"",python,selection_keyboard +1057,4958759,"models/dynamics.py",3048,0,"d",python,content +1058,4958760,"models/dynamics.py",3049,0,"",python,selection_keyboard +1059,4958907,"models/dynamics.py",3049,0," ",python,content +1060,4958908,"models/dynamics.py",3050,0,"",python,selection_keyboard +1061,4964413,"models/dynamics.py",3040,0,"",python,selection_command +1062,4964744,"models/dynamics.py",3039,0,"",python,selection_command +1063,4964946,"models/dynamics.py",3038,0,"",python,selection_command +1064,4965141,"models/dynamics.py",3037,0,"",python,selection_command +1065,4965282,"models/dynamics.py",3036,0,"",python,selection_command +1066,4965722,"models/dynamics.py",3035,0,"",python,selection_command +1067,4965881,"models/dynamics.py",3034,0,"",python,selection_command +1068,4968753,"models/dynamics.py",3034,0,"b",python,content +1069,4968755,"models/dynamics.py",3035,0,"",python,selection_keyboard +1070,4968920,"models/dynamics.py",3035,0,"o",python,content +1071,4968921,"models/dynamics.py",3036,0,"",python,selection_keyboard +1072,4968975,"models/dynamics.py",3036,0,"s",python,content +1073,4968976,"models/dynamics.py",3037,0,"",python,selection_keyboard +1074,4969067,"models/dynamics.py",3037,0," ",python,content +1075,4969068,"models/dynamics.py",3038,0,"",python,selection_keyboard +1076,4969729,"models/dynamics.py",3052,0,"",python,selection_command +1077,4970200,"models/dynamics.py",3051,0,"",python,selection_command +1078,4971619,"models/dynamics.py",3048,0,"a",python,content +1079,4971621,"models/dynamics.py",3049,0,"",python,selection_keyboard +1080,4971655,"models/dynamics.py",3049,0," ",python,content +1081,4971656,"models/dynamics.py",3050,0,"",python,selection_keyboard +1082,4972118,"models/dynamics.py",3050,0," ",python,content +1083,4972119,"models/dynamics.py",3051,0,"",python,selection_keyboard +1084,4972282,"models/dynamics.py",3051,0," ",python,content +1085,4972283,"models/dynamics.py",3052,0,"",python,selection_keyboard +1086,4973539,"models/dynamics.py",3053,0,"",python,selection_command +1087,4973775,"models/dynamics.py",3054,0,"",python,selection_command +1088,4973962,"models/dynamics.py",3055,0,"",python,selection_command +1089,4974530,"models/dynamics.py",3054,0,"",python,selection_command +1090,4974806,"models/dynamics.py",3055,0,"",python,selection_command +1091,4974987,"models/dynamics.py",3056,0,"",python,selection_command +1092,4975158,"models/dynamics.py",3057,0,"",python,selection_command +1093,4975330,"models/dynamics.py",3058,0,"",python,selection_command +1094,4977364,"models/dynamics.py",3044,0,"",python,selection_command +1095,4977715,"models/dynamics.py",3045,0,"",python,selection_command +1096,4978256,"models/dynamics.py",3045,0," ",python,content +1097,4978257,"models/dynamics.py",3046,0,"",python,selection_keyboard +1098,4978603,"models/dynamics.py",3046,0,"e",python,content +1099,4978604,"models/dynamics.py",3047,0,"",python,selection_keyboard +1100,4978731,"models/dynamics.py",3047,0,"o",python,content +1101,4978732,"models/dynamics.py",3048,0,"",python,selection_keyboard +1102,4978826,"models/dynamics.py",3048,0,"s",python,content +1103,4978827,"models/dynamics.py",3049,0,"",python,selection_keyboard +1104,4979048,"models/dynamics.py",3062,0,"",python,selection_command +1105,4993331,"models/dynamics.py",3062,0,"e",python,content +1106,4993333,"models/dynamics.py",3063,0,"",python,selection_keyboard +1107,4994820,"models/dynamics.py",3063,0,"o",python,content +1108,4994821,"models/dynamics.py",3064,0,"",python,selection_keyboard +1109,4994866,"models/dynamics.py",3064,0,"s",python,content +1110,4994867,"models/dynamics.py",3065,0,"",python,selection_keyboard +1111,4995497,"models/dynamics.py",3047,0,"",python,selection_command +1112,4995814,"models/dynamics.py",3046,0,"",python,selection_command +1113,4996136,"models/dynamics.py",3046,0," ",python,content +1114,4996137,"models/dynamics.py",3047,0,"",python,selection_keyboard +1115,4996296,"models/dynamics.py",3047,0," ",python,content +1116,4996297,"models/dynamics.py",3048,0,"",python,selection_keyboard +1117,4997245,"models/dynamics.py",3047,1,"",python,content +1118,4997700,"models/dynamics.py",3047,0," ",python,content +1119,4997701,"models/dynamics.py",3048,0,"",python,selection_keyboard +1120,4998077,"models/dynamics.py",3049,0,"",python,selection_command +1121,4999083,"models/dynamics.py",3067,0,"",python,selection_command +1122,4999833,"models/dynamics.py",3067,0," ",python,content +1123,4999835,"models/dynamics.py",3068,0,"",python,selection_keyboard +1124,5040875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +1125,5040877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1345,0,"",python,selection_mouse +1126,5040889,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1344,0,"",python,selection_command +1127,5041423,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",807,0,"",python,selection_mouse +1128,5041426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",806,0,"",python,selection_command +1129,5042071,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1152,0,"",python,selection_mouse +1130,5042633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",807,0,"",python,selection_mouse +1131,5042640,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",806,0,"",python,selection_command +1132,5043468,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",885,0,"",python,selection_mouse +1133,5043483,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",884,0,"",python,selection_command +1134,5044019,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",866,0,"",python,selection_mouse +1135,5044035,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",865,0,"",python,selection_command +1136,5046425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1850,0,"",python,selection_mouse +1137,5046428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1849,0,"",python,selection_command +1138,5047323,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1745,0,"",python,selection_mouse +1139,5047324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1744,0,"",python,selection_command +1140,5047802,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1773,0,"",python,selection_mouse +1141,5047818,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1772,0,"",python,selection_command +1142,5055505,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1773,0,"\n ",python,content +1143,5056273,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,0,"z",python,content +1144,5056274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1779,0,"",python,selection_keyboard +1145,5056370,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1779,0,"s",python,content +1146,5056371,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,0,"",python,selection_keyboard +1147,5056553,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,0,"e",python,content +1148,5056554,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1781,0,"",python,selection_keyboard +1149,5057107,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,1,"",python,content +1150,5057184,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1779,1,"",python,content +1151,5057365,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,1,"",python,content +1152,5057906,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,0,"u",python,content +1153,5057907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1779,0,"",python,selection_keyboard +1154,5057972,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1779,0,"s",python,content +1155,5057973,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,0,"",python,selection_keyboard +1156,5058177,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,0,"e",python,content +1157,5058178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1781,0,"",python,selection_keyboard +1158,5058322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1781,0,"_",python,content +1159,5058323,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1782,0,"",python,selection_keyboard +1160,5058685,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1782,0,"m",python,content +1161,5058686,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,0,"",python,selection_keyboard +1162,5058776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,0,"a",python,content +1163,5058777,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1784,0,"",python,selection_keyboard +1164,5058917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1784,0,"s",python,content +1165,5058918,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1785,0,"",python,selection_keyboard +1166,5058941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1785,0,"k",python,content +1167,5058942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1786,0,"",python,selection_keyboard +1168,5059261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1786,0,"g",python,content +1169,5059261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1787,0,"",python,selection_keyboard +1170,5059332,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1787,0,"i",python,content +1171,5059333,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1788,0,"",python,selection_keyboard +1172,5059423,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1788,0,"t",python,content +1173,5059423,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1789,0,"",python,selection_keyboard +1174,5060674,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1789,0,":",python,content +1175,5060675,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1790,0,"",python,selection_keyboard +1176,5060841,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1790,0," ",python,content +1177,5060842,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1791,0,"",python,selection_keyboard +1178,5061076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1791,0,"b",python,content +1179,5061077,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1792,0,"",python,selection_keyboard +1180,5061279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1792,0,"o",python,content +1181,5061280,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1793,0,"",python,selection_keyboard +1182,5061389,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1793,0,"o",python,content +1183,5061390,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1794,0,"",python,selection_keyboard +1184,5061584,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1794,0,"l",python,content +1185,5061585,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1795,0,"",python,selection_keyboard +1186,5062886,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1795,0," ",python,content +1187,5062887,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1796,0,"",python,selection_keyboard +1188,5063255,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1796,0,"=",python,content +1189,5063256,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1797,0,"",python,selection_keyboard +1190,5063335,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1797,0," ",python,content +1191,5063336,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,0,"",python,selection_keyboard +1192,5063856,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,0,"F",python,content +1193,5063856,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"",python,selection_keyboard +1194,5064090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"a",python,content +1195,5064091,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"",python,selection_keyboard +1196,5064188,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"l",python,content +1197,5064189,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,0,"",python,selection_keyboard +1198,5064306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,0,"s",python,content +1199,5064307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,0,"",python,selection_keyboard +1200,5064507,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,0,"e",python,content +1201,5064507,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1803,0,"",python,selection_keyboard +1202,5064968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,5,"",python,content +1203,5066599,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,0,"T",python,content +1204,5066600,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"",python,selection_keyboard +1205,5066922,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"r",python,content +1206,5066923,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"",python,selection_keyboard +1207,5067113,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"u",python,content +1208,5067114,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,0,"",python,selection_keyboard +1209,5067562,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,3,"True",python,content +1210,5068528,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1782,0,"",python,selection_mouse +1211,5068677,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +1212,5082273,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +1213,5084491,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",792,0,"",shellscript,selection_mouse +1214,5084503,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",791,0,"",shellscript,selection_command +1215,5087633,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",792,0,"\n ",shellscript,content +1216,5089161,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",797,0,"-",shellscript,content +1217,5089163,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",798,0,"",shellscript,selection_keyboard +1218,5089280,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",798,0,"-",shellscript,content +1219,5089281,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",799,0,"",shellscript,selection_keyboard +1220,5089595,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",799,0,"u",shellscript,content +1221,5089596,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",800,0,"",shellscript,selection_keyboard +1222,5089606,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",800,0,"s",shellscript,content +1223,5089607,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",801,0,"",shellscript,selection_keyboard +1224,5089852,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",801,0,"e",shellscript,content +1225,5089853,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",802,0,"",shellscript,selection_keyboard +1226,5090533,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",802,0,"_",shellscript,content +1227,5090534,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",803,0,"",shellscript,selection_keyboard +1228,5091055,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",803,0,"m",shellscript,content +1229,5091056,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",804,0,"",shellscript,selection_keyboard +1230,5091174,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",804,0,"a",shellscript,content +1231,5091175,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",805,0,"",shellscript,selection_keyboard +1232,5091282,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",805,0,"s",shellscript,content +1233,5091283,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",806,0,"",shellscript,selection_keyboard +1234,5091471,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",806,0,"k",shellscript,content +1235,5091472,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",807,0,"",shellscript,selection_keyboard +1236,5092678,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",807,0,"g",shellscript,content +1237,5092678,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",808,0,"",shellscript,selection_keyboard +1238,5092861,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",808,0,"i",shellscript,content +1239,5092862,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",809,0,"",shellscript,selection_keyboard +1240,5092975,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",809,0,"t",shellscript,content +1241,5092975,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",810,0,"",shellscript,selection_keyboard +1242,5093513,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",810,0,"=",shellscript,content +1243,5093513,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",811,0,"",shellscript,selection_keyboard +1244,5094104,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",811,0,"F",shellscript,content +1245,5094105,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",812,0,"",shellscript,selection_keyboard +1246,5094353,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",812,0,"a",shellscript,content +1247,5094354,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",813,0,"",shellscript,selection_keyboard +1248,5094510,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",813,0,"l",shellscript,content +1249,5094511,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,0,"",shellscript,selection_keyboard +1250,5094601,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,0,"s",shellscript,content +1251,5094602,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,0,"",shellscript,selection_keyboard +1252,5094751,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,0,"e",shellscript,content +1253,5094752,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",816,0,"",shellscript,selection_keyboard +1254,5095088,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,1,"",shellscript,content +1255,5095235,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,1,"",shellscript,content +1256,5095386,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",813,1,"",shellscript,content +1257,5095539,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",812,1,"",shellscript,content +1258,5095675,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",811,1,"",shellscript,content +1259,5095838,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",811,0,"f",shellscript,content +1260,5095839,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",812,0,"",shellscript,selection_keyboard +1261,5096058,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",812,0,"a",shellscript,content +1262,5096059,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",813,0,"",shellscript,selection_keyboard +1263,5096120,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",813,0,"l",shellscript,content +1264,5096121,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,0,"",shellscript,selection_keyboard +1265,5096870,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,0,"e",shellscript,content +1266,5096871,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,0,"",shellscript,selection_keyboard +1267,5097218,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,1,"",shellscript,content +1268,5097303,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",814,0,"s",shellscript,content +1269,5097304,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,0,"",shellscript,selection_keyboard +1270,5097447,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",815,0,"e",shellscript,content +1271,5097448,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",816,0,"",shellscript,selection_keyboard +1272,5098537,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",811,5,"",shellscript,content +1273,5099196,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",810,1,"",shellscript,content +1274,5100103,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",799,11,"",shellscript,content +1275,5100300,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",797,2,"",shellscript,content +1276,5100704,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",793,4,"",shellscript,content +1277,5100878,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",792,1,"",shellscript,content +1278,5104964,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +1279,5105811,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +1280,5107243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"",python,selection_mouse +1281,5107381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,4,"True",python,selection_mouse +1282,5108264,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,4,"F",python,content +1283,5108265,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"",python,selection_keyboard +1284,5108580,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1799,0,"a",python,content +1285,5108581,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"",python,selection_keyboard +1286,5108784,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,0,"l",python,content +1287,5108785,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,0,"",python,selection_keyboard +1288,5109052,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1798,3,"False",python,content +1289,5109736,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,0,"",python,selection_command +1290,5112364,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2738,0,"",python,selection_mouse +1291,5113728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",5104,0,"",python,selection_mouse +1292,5115681,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",5261,0,"",python,selection_mouse +1293,5116758,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6163,0,"",python,selection_mouse +1294,5117243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6205,0,"",python,selection_mouse +1295,5117285,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6204,0,"",python,selection_command +1296,5118221,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6099,0,"",python,selection_mouse +1297,5118226,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6098,0,"",python,selection_command +1298,5118985,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6135,0,"",python,selection_mouse +1299,5118991,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6134,0,"",python,selection_command +1300,5119954,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6135,0,"\n ",python,content +1301,5120521,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6144,0,"u",python,content +1302,5120522,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6145,0,"",python,selection_keyboard +1303,5120659,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6145,0,"s",python,content +1304,5120660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6146,0,"",python,selection_keyboard +1305,5120838,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6146,0,"e",python,content +1306,5120839,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6147,0,"",python,selection_keyboard +1307,5121604,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6147,0,"_",python,content +1308,5121605,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6148,0,"",python,selection_keyboard +1309,5122249,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6148,0,"m",python,content +1310,5122250,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6149,0,"",python,selection_keyboard +1311,5122350,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6149,0,"a",python,content +1312,5122351,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6150,0,"",python,selection_keyboard +1313,5122437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6150,0,"s",python,content +1314,5122438,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6151,0,"",python,selection_keyboard +1315,5122508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6151,0,"k",python,content +1316,5122509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6152,0,"",python,selection_keyboard +1317,5122919,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6152,0,"g",python,content +1318,5122920,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6153,0,"",python,selection_keyboard +1319,5123082,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6153,0,"i",python,content +1320,5123083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6154,0,"",python,selection_keyboard +1321,5123548,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6154,0,"t",python,content +1322,5123549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6155,0,"",python,selection_keyboard +1323,5123981,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6155,0,"=",python,content +1324,5123982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6156,0,"",python,selection_keyboard +1325,5124616,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6156,0,"u",python,content +1326,5124617,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6157,0,"",python,selection_keyboard +1327,5124804,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6157,0,"s",python,content +1328,5124805,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6158,0,"",python,selection_keyboard +1329,5125041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6158,0,"e",python,content +1330,5125042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6159,0,"",python,selection_keyboard +1331,5125505,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6159,0,"_",python,content +1332,5125506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6160,0,"",python,selection_keyboard +1333,5125768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6160,0,"m",python,content +1334,5125769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6161,0,"",python,selection_keyboard +1335,5125878,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6161,0,"a",python,content +1336,5125879,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6162,0,"",python,selection_keyboard +1337,5125935,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6162,0,"s",python,content +1338,5125938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6163,0,"",python,selection_keyboard +1339,5126023,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6163,0,"k",python,content +1340,5126024,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6164,0,"",python,selection_keyboard +1341,5126340,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6164,0,"g",python,content +1342,5126341,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6165,0,"",python,selection_keyboard +1343,5126443,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6165,0,"i",python,content +1344,5126444,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6166,0,"",python,selection_keyboard +1345,5126555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6166,0,"t",python,content +1346,5126555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6167,0,"",python,selection_keyboard +1347,5127300,"TERMINAL",0,0,"Step 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 945, loss: 0.0012783808633685112\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 719, loss: 1.220973014831543\r\nStep 720, loss: 0.9833794236183167\r\nStep 721, loss: 1.0190064907073975\r\nStep 722, loss: 1.0484683513641357\r\nStep 723, loss: 0.8690718412399292\r\nStep 724, loss: 1.0035357475280762\r\nStep 725, loss: 0.8748283982276917\r\nStep 726, loss: 0.8470965027809143\r\nStep 727, loss: 0.9299373626708984\r\nStep 728, loss: 0.8637204766273499\r\nStep 729, loss: 0.8922073245048523\r\nStep 730, loss: 0.9801859259605408\r\nStep 731, loss: 0.8642475008964539\r\nStep 732, loss: 0.9375776052474976\r\nStep 733, loss: 0.8263726830482483\r\nStep 734, loss: 0.9256225228309631\r\nStep 735, loss: 0.8987894058227539\r\nStep 736, loss: 0.7645560503005981\r\nStep 737, loss: 0.7850275635719299\r\nStep 738, loss: 0.8330867290496826\r\nStep 739, loss: 0.8554867506027222\r\nStep 740, loss: 0.7485871911048889\r\nStep 741, loss: 0.8861126899719238\r\nStep 742, loss: 0.7585428357124329\r\nStep 743, loss: 0.655667245388031\r\nStep 744, loss: 0.6370968222618103\r\nStep 745, loss: 0.6907960772514343\r\nStep 746, loss: 0.7447969913482666\r\nStep 747, loss: 0.7245917916297913\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 748, loss: 0.664436399936676\r\nStep 749, loss: 0.648061215877533\r\nStep 750, loss: 0.7264807820320129\r\nStep 751, loss: 0.6756706237792969\r\nStep 752, loss: 0.6391420960426331\r\nStep 753, loss: 0.7257218956947327\r\nStep 754, loss: 0.7944856286048889\r\nStep 755, loss: 0.734170138835907\r\nStep 756, loss: 0.6503342390060425\r\nStep 757, loss: 0.6048266887664795\r\nStep 758, loss: 0.4937676787376404\r\nStep 759, loss: 0.5998534560203552\r\nStep 760, loss: 0.5442175269126892\r\nStep 761, loss: 0.5713749527931213\r\nStep 762, loss: 0.5717068314552307\r\nStep 763, loss: 0.4830510914325714\r\nStep 764, loss: 0.5118291974067688\r\nStep 765, loss: 0.5356676578521729\r\nStep 766, loss: 0.5298662781715393\r\nStep 767, loss: 0.51924729347229\r\nStep 768, loss: 0.5166446566581726\r\nStep 769, loss: 0.48934441804885864\r\nStep 770, loss: 0.4826201796531677\r\nStep 771, loss: 0.49853208661079407\r\nStep 772, loss: 0.4026469886302948\r\nStep 773, loss: 0.41336214542388916\r\nStep 774, loss: 0.4546590745449066\r\nStep 775, loss: 0.4068276286125183\r\nStep 776, loss: 0.4530925154685974\r\nStep 945, loss: 0.0012783808633685112\r\nStep 777, loss: 0.4294380247592926\r\nStep 778, loss: 0.3810833692550659\r\nStep 779, loss: 0.424375057220459\r\nStep 780, loss: 0.43166396021842957\r\nStep 781, loss: 0.35067734122276306\r\nStep 782, loss: 0.3669264018535614\r\nStep 783, loss: 0.3237971365451813\r\nStep 784, loss: 0.3336354196071625\r\nStep 785, loss: 0.2884513735771179\r\nStep 786, loss: 0.3607689440250397\r\nStep 787, loss: 0.3404114246368408\r\nStep 788, loss: 0.3231687545776367\r\nStep 789, loss: 0.32390743494033813\r\nStep 790, loss: 0.2996169924736023\r\nStep 791, loss: 0.2855353057384491\r\nStep 792, loss: 0.29889044165611267\r\nStep 793, loss: 0.3160989582538605\r\nStep 794, loss: 0.29843971133232117\r\nStep 795, loss: 0.29880401492118835\r\nStep 796, loss: 0.31128159165382385\r\nStep 797, loss: 0.21979154646396637\r\nStep 798, loss: 0.24554967880249023\r\nStep 799, loss: 0.28617969155311584\r\nStep 800, loss: 0.27300775051116943\r\nStep 801, loss: 0.2160603404045105\r\nStep 802, loss: 0.22639624774456024\r\nStep 803, loss: 0.23033708333969116\r\nStep 804, loss: 0.2188936322927475\r\nStep 805, loss: 0.22509171068668365\r\nStep 806, loss: 0.21209244430065155\r\nStep 807, loss: 0.2119956761598587\r\nStep 808, loss: 0.1925811618566513\r\nStep 809, loss: 0.18950898945331573\r\nStep 810, loss: 0.18069443106651306\r\nStep 811, loss: 0.1602306216955185\r\nStep 812, loss: 0.1660311371088028\r\nStep 813, loss: 0.20090986788272858\r\nStep 814, loss: 0.14590783417224884\r\nStep 815, loss: 0.176430344581604\r\nStep 816, loss: 0.14476066827774048\r\nStep 817, loss: 0.16519533097743988\r\nStep 818, loss: 0.1586688607931137\r\nStep 819, loss: 0.133210226893425\r\nStep 820, loss: 0.16687864065170288\r\nStep 821, loss: 0.13211916387081146\r\nStep 822, loss: 0.1396285444498062\r\nStep 823, loss: 0.1226123720407486\r\nStep 824, loss: 0.12260997295379639\r\nStep 825, loss: 0.10799338668584824\r\nStep 826, loss: 0.12551075220108032\r\nStep 827, loss: 0.12864422798156738\r\nStep 828, loss: 0.12160828709602356\r\nStep 829, loss: 0.11189549416303635\r\nStep 830, loss: 0.11334454268217087\r\nStep 831, loss: 0.11749739199876785\r\nStep 832, loss: 0.10241300612688065\r\nStep 833, loss: 0.09917576611042023\r\nStep 834, loss: 0.09489981085062027\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\nStep 835, loss: 0.10510732978582382\r\nStep 836, loss: 0.09771240502595901\r\nStep 837, loss: 0.09724751859903336\r\nStep 838, loss: 0.0845051035284996\r\nStep 839, loss: 0.0796673446893692\r\nStep 840, loss: 0.07855331897735596\r\nStep 841, loss: 0.08152555674314499\r\nStep 842, loss: 0.06811615079641342\r\nStep 843, loss: 0.0750921368598938\r\nStep 844, loss: 0.06377741694450378\r\nStep 845, loss: 0.05747470259666443\r\nStep 846, loss: 0.07420796155929565\r\nStep 847, loss: 0.05876259505748749\r\nStep 848, loss: 0.06456019729375839\r\nStep 849, loss: 0.059282127767801285\r\nStep 850, loss: 0.060376886278390884\r\nStep 851, loss: 0.05626952275633812\r\nStep 852, loss: 0.059031061828136444\r\nStep 853, loss: 0.053722843527793884\r\nStep 854, loss: 0.06425199657678604\r\nStep 855, loss: 0.0529995933175087\r\nStep 856, loss: 0.04632771015167236\r\nStep 857, loss: 0.039598628878593445\r\nStep 858, loss: 0.040767405182123184\r\nStep 859, loss: 0.037018269300460815\r\nStep 860, loss: 0.04212583228945732\r\nStep 861, loss: 0.034677814692258835\r\nStep 862, loss: 0.03315940499305725\r\nStep 863, loss: 0.03647122532129288\r\nStep 864, loss: 0.032859236001968384\r\nStep 865, loss: 0.030932841822504997\r\nStep 866, loss: 0.025249309837818146\r\nStep 867, loss: 0.03187650814652443\r\nStep 868, loss: 0.028432000428438187\r\nStep 869, loss: 0.024833550676703453\r\nStep 870, loss: 0.02209923230111599\r\nStep 871, loss: 0.028132932260632515\r\nStep 872, loss: 0.02348645031452179\r\nStep 873, loss: 0.024038558825850487\r\nStep 874, loss: 0.023453161120414734\r\nStep 875, loss: 0.024175075814127922\r\nStep 876, loss: 0.02278744988143444\r\nStep 877, loss: 0.018196746706962585\r\nStep 878, loss: 0.020178772509098053\r\nStep 879, loss: 0.0219156201928854\r\nStep 880, loss: 0.016626497730612755\r\nStep 881, loss: 0.01572035253047943\r\nStep 882, loss: 0.0183713361620903\r\nStep 883, loss: 0.015464250929653645\r\nStep 884, loss: 0.018759163096547127\r\nStep 885, loss: 0.013734908774495125\r\nStep 886, loss: 0.01374769490212202\r\nStep 887, loss: 0.01274916809052229\r\nStep 888, loss: 0.012545258738100529\r\nStep 889, loss: 0.01247946172952652\r\nStep 890, loss: 0.011726918630301952\r\nStep 891, loss: 0.011799908243119717\r\nStep 892, loss: 0.011401084251701832\r\nStep 893, loss: 0.01146245002746582\r\nStep 894, loss: 0.011753150261938572\r\nStep 895, loss: 0.009770526550710201\r\nStep 896, loss: 0.00918026827275753\r\nStep 897, loss: 0.009717998094856739\r\nStep 898, loss: 0.010698080994188786\r\nStep 899, loss: 0.011061342433094978\r\nStep 900, loss: 0.008935529738664627\r\nStep 901, loss: 0.006869297008961439\r\nStep 902, loss: 0.006277004256844521\r\nStep 903, loss: 0.008669532835483551\r\nStep 904, loss: 0.006541823968291283\r\nStep 905, loss: 0.0066695669665932655\r\nStep 906, loss: 0.00531776575371623\r\nStep 907, loss: 0.0049531166441738605\r\nStep 908, loss: 0.006072796415537596\r\nStep 909, loss: 0.006032004952430725\r\nStep 910, loss: 0.004231479484587908\r\nStep 911, loss: 0.006922027561813593\r\nStep 912, loss: 0.0051920753903687\r\nStep 913, loss: 0.005717420019209385\r\nStep 914, loss: 0.0050996956415474415\r\nStep 915, loss: 0.0047828759998083115\r\nStep 916, loss: 0.004346886184066534\r\nStep 917, loss: 0.004139788914471865\r\nStep 918, loss: 0.0035523693077266216\r\nStep 919, loss: 0.0033755891490727663\r\nStep 920, loss: 0.003731628879904747\r\nStep 921, loss: 0.0035391338169574738\r\nStep 922, loss: 0.002928556641563773\r\nStep 923, loss: 0.003112994134426117\r\nStep 924, loss: 0.0037353208754211664\r\nStep 925, loss: 0.0028575067408382893\r\nStep 926, loss: 0.0026243829634040594\r\nStep 927, loss: 0.002966775093227625\r\nStep 928, loss: 0.0025999462231993675\r\nStep 929, loss: 0.0024162132758647203\r\nStep 930, loss: 0.0025764652527868748\r\nStep 931, loss: 0.0019511310383677483\r\nStep 932, loss: 0.002274678787216544\r\nStep 933, loss: 0.0023877897765487432\r\nStep 934, loss: 0.0020910671446472406\r\nStep 935, loss: 0.0020653163082897663\r\nStep 936, loss: 0.0020628725178539753\r\nStep 937, loss: 0.0019124513491988182\r\nStep 938, loss: 0.0016171099850907922\r\nStep 939, loss: 0.0015059879515320063\r\nStep 940, loss: 0.0014522024430334568\r\nStep 941, loss: 0.0015267037088051438\r\nStep 942, loss: 0.0015720752999186516\r\nStep 943, loss: 0.001392936916090548\r\nStep 944, loss: 0.0016334506217390299\r\nStep 945, loss: 0.0012783808633685112\r\n",,terminal_output +1348,5127374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6156,0,"a",python,content +1349,5127374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6157,0,"",python,selection_keyboard +1350,5127877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6157,0,"r",python,content +1351,5127878,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6158,0,"",python,selection_keyboard +1352,5128004,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6158,0,"g",python,content +1353,5128005,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6159,0,"",python,selection_keyboard +1354,5128149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6159,0,"s",python,content +1355,5128150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6160,0,"",python,selection_keyboard +1356,5128357,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6160,0,".",python,content +1357,5128358,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6161,0,"",python,selection_keyboard +1358,5129765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6172,0,",",python,content +1359,5129766,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6173,0,"",python,selection_keyboard +1360,5131083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6166,0,"",python,selection_mouse +1361,5131426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6161,11,"use_maskgit",python,selection_mouse +1362,5132269,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6149,0,"",python,selection_mouse +1363,5132442,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",6144,11,"use_maskgit",python,selection_mouse +1364,5134995,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",5260,0,"",python,selection_mouse +1365,5135308,"genie.py",0,0,"",python,tab +1366,5138832,"genie.py",925,0,"",python,selection_mouse +1367,5138847,"genie.py",924,0,"",python,selection_command +1368,5140105,"genie.py",877,0,"",python,selection_mouse +1369,5140120,"genie.py",876,0,"",python,selection_command +1370,5141246,"genie.py",877,0,"\n ",python,content +1371,5141492,"genie.py",882,0,"use_maskgit",python,content +1372,5143246,"genie.py",893,0,":",python,content +1373,5143247,"genie.py",894,0,"",python,selection_keyboard +1374,5143354,"genie.py",894,0," ",python,content +1375,5143354,"genie.py",895,0,"",python,selection_keyboard +1376,5143610,"genie.py",895,0,"b",python,content +1377,5143611,"genie.py",896,0,"",python,selection_keyboard +1378,5143793,"genie.py",896,0,"o",python,content +1379,5143795,"genie.py",897,0,"",python,selection_keyboard +1380,5143916,"genie.py",897,0,"o",python,content +1381,5143917,"genie.py",898,0,"",python,selection_keyboard +1382,5144044,"genie.py",898,0,"l",python,content +1383,5144045,"genie.py",899,0,"",python,selection_keyboard +1384,5144482,"genie.py",898,0,"",python,selection_command +1385,5148137,"genie.py",887,0,"",python,selection_mouse +1386,5148271,"genie.py",882,11,"use_maskgit",python,selection_mouse +1387,5150678,"genie.py",2305,0,"",python,selection_mouse +1388,5151185,"genie.py",2236,0,"",python,selection_mouse +1389,5152089,"genie.py",1998,0,"",python,selection_mouse +1390,5152545,"genie.py",1982,0,"",python,selection_mouse +1391,5152552,"genie.py",1981,0,"",python,selection_command +1392,5158820,"genie.py",1991,0,"",python,selection_command +1393,5159178,"genie.py",1992,0,"",python,selection_command +1394,5159704,"genie.py",1993,0,"",python,selection_command +1395,5159726,"genie.py",1994,0,"",python,selection_command +1396,5159726,"genie.py",1995,0,"",python,selection_command +1397,5159808,"genie.py",1996,0,"",python,selection_command +1398,5159814,"genie.py",1997,0,"",python,selection_command +1399,5159849,"genie.py",1998,0,"",python,selection_command +1400,5159859,"genie.py",1999,0,"",python,selection_command +1401,5159906,"genie.py",2000,0,"",python,selection_command +1402,5160033,"genie.py",2001,0,"",python,selection_command +1403,5160034,"genie.py",2002,0,"",python,selection_command +1404,5160034,"genie.py",2003,0,"",python,selection_command +1405,5160151,"genie.py",2004,0,"",python,selection_command +1406,5160331,"genie.py",2005,0,"",python,selection_command +1407,5160480,"genie.py",2006,0,"",python,selection_command +1408,5161079,"genie.py",2054,0,"",python,selection_command +1409,5161381,"genie.py",2091,0,"",python,selection_command +1410,5161545,"genie.py",2139,0,"",python,selection_command +1411,5161855,"genie.py",2091,0,"",python,selection_command +1412,5162040,"genie.py",2054,0,"",python,selection_command +1413,5162185,"genie.py",2006,0,"",python,selection_command +1414,5162279,"genie.py",2054,0,"",python,selection_command +1415,5162813,"genie.py",2091,0,"",python,selection_command +1416,5162843,"genie.py",2139,0,"",python,selection_command +1417,5162866,"genie.py",2184,0,"",python,selection_command +1418,5162883,"genie.py",2227,0,"",python,selection_command +1419,5162923,"genie.py",2261,0,"",python,selection_command +1420,5162933,"genie.py",2303,0,"",python,selection_command +1421,5163058,"genie.py",2318,0,"",python,selection_command +1422,5163209,"genie.py",2343,0,"",python,selection_command +1423,5163962,"genie.py",2320,42," # self.dynamics = DynamicsMaskGIT(",python,selection_command +1424,5164184,"genie.py",2320,81," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,",python,selection_command +1425,5164284,"genie.py",2320,131," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,",python,selection_command +1426,5164784,"genie.py",2320,178," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,",python,selection_command +1427,5164975,"genie.py",2320,223," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,",python,selection_command +1428,5164976,"genie.py",2320,259," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,",python,selection_command +1429,5165013,"genie.py",2320,301," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,",python,selection_command +1430,5165013,"genie.py",2320,345," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,",python,selection_command +1431,5165014,"genie.py",2320,377," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,",python,selection_command +1432,5165083,"genie.py",2320,389," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_command +1433,5165188,"genie.py",2320,390," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )\n",python,selection_command +1434,5165507,"genie.py",2320,389," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_command +1435,5165782,"genie.py",2320,390,"",python,content +1436,5166014,"genie.py",2310,11,"",python,content +1437,5166081,"genie.py",2314,0,"",python,selection_command +1438,5166746,"genie.py",2314,0," )\n\n ",python,content +1439,5166759,"genie.py",2320,0,"",python,selection_command +1440,5167176,"genie.py",2310,0,"",python,selection_command +1441,5167406,"genie.py",2280,0,"",python,selection_command +1442,5167512,"genie.py",2238,0,"",python,selection_command +1443,5167662,"genie.py",2204,0,"",python,selection_command +1444,5167808,"genie.py",2161,0,"",python,selection_command +1445,5167966,"genie.py",2116,0,"",python,selection_command +1446,5168145,"genie.py",2068,0,"",python,selection_command +1447,5168300,"genie.py",2031,0,"",python,selection_command +1448,5168464,"genie.py",1983,0,"",python,selection_command +1449,5168636,"genie.py",1973,0,"",python,selection_command +1450,5168834,"genie.py",1982,0,"\n )\n",python,content +1451,5168912,"genie.py",1991,0,"",python,selection_command +1452,5169506,"genie.py",1991,11,"",python,content +1453,5169509,"genie.py",1973,0,"",python,selection_command +1454,5169837,"genie.py",2320,0," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )\n",python,content +1455,5169891,"genie.py",2343,0,"",python,selection_command +1456,5171115,"genie.py",2320,42," # self.dynamics = DynamicsMaskGIT(",python,selection_command +1457,5171331,"genie.py",2320,81," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,",python,selection_command +1458,5171835,"genie.py",2320,131," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,",python,selection_command +1459,5171893,"genie.py",2320,178," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,",python,selection_command +1460,5171988,"genie.py",2320,223," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,",python,selection_command +1461,5171989,"genie.py",2320,259," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,",python,selection_command +1462,5172025,"genie.py",2320,301," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,",python,selection_command +1463,5172025,"genie.py",2320,345," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,",python,selection_command +1464,5172053,"genie.py",2320,377," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,",python,selection_command +1465,5172119,"genie.py",2320,389," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_command +1466,5172506,"genie.py",2320,390,"",python,content +1467,5172600,"genie.py",2310,0,"",python,selection_command +1468,5172741,"genie.py",2280,0,"",python,selection_command +1469,5172899,"genie.py",2238,0,"",python,selection_command +1470,5173071,"genie.py",2204,0,"",python,selection_command +1471,5173223,"genie.py",2161,0,"",python,selection_command +1472,5173344,"genie.py",2116,0,"",python,selection_command +1473,5173514,"genie.py",2068,0,"",python,selection_command +1474,5173698,"genie.py",2031,0,"",python,selection_command +1475,5173828,"genie.py",1983,0,"",python,selection_command +1476,5173962,"genie.py",1973,0,"",python,selection_command +1477,5174243,"genie.py",1982,0,"\n # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,content +1478,5174306,"genie.py",1991,0,"",python,selection_command +1479,5175284,"genie.py",1982,0,"\n ",python,content +1480,5176115,"genie.py",1983,8,"",python,content +1481,5176233,"genie.py",1984,0,"",python,selection_command +1482,5176965,"genie.py",1984,42," # self.dynamics = DynamicsMaskGIT(",python,selection_command +1483,5177149,"genie.py",1984,81," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,",python,selection_command +1484,5177647,"genie.py",1984,131," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,",python,selection_command +1485,5177880,"genie.py",1984,178," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,",python,selection_command +1486,5177881,"genie.py",1984,223," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,",python,selection_command +1487,5177927,"genie.py",1984,259," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,",python,selection_command +1488,5177928,"genie.py",1984,301," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,",python,selection_command +1489,5177928,"genie.py",1984,345," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,",python,selection_command +1490,5177928,"genie.py",1984,377," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,",python,selection_command +1491,5178321,"genie.py",1984,389," # self.dynamics = DynamicsMaskGIT(\n # model_dim=self.dyna_dim,\n # num_latents=self.num_patch_latents,\n # num_blocks=self.dyna_num_blocks,\n # num_heads=self.dyna_num_heads,\n # dropout=self.dropout,\n # mask_limit=self.mask_limit,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )",python,selection_command +1492,5178588,"genie.py",1992,0,"",python,selection_command +1493,5179622,"genie.py",2370,1,"",python,content +1494,5179622,"genie.py",2342,1,"",python,content +1495,5179622,"genie.py",2298,1,"",python,content +1496,5179622,"genie.py",2256,1,"",python,content +1497,5179622,"genie.py",2220,1,"",python,content +1498,5179622,"genie.py",2175,1,"",python,content +1499,5179622,"genie.py",2128,1,"",python,content +1500,5179622,"genie.py",2078,1,"",python,content +1501,5179622,"genie.py",2039,1,"",python,content +1502,5179623,"genie.py",1992,1,"",python,content +1503,5179758,"genie.py",2361,1,"",python,content +1504,5179758,"genie.py",2334,1,"",python,content +1505,5179758,"genie.py",2291,1,"",python,content +1506,5179758,"genie.py",2250,1,"",python,content +1507,5179758,"genie.py",2215,1,"",python,content +1508,5179758,"genie.py",2171,1,"",python,content +1509,5179758,"genie.py",2125,1,"",python,content +1510,5179758,"genie.py",2076,1,"",python,content +1511,5179758,"genie.py",2038,1,"",python,content +1512,5179758,"genie.py",1992,1,"",python,content +1513,5179919,"genie.py",1991,0,"",python,selection_command +1514,5180757,"genie.py",2032,0,"",python,selection_command +1515,5181236,"genie.py",2069,0,"",python,selection_command +1516,5181273,"genie.py",2117,0,"",python,selection_command +1517,5181298,"genie.py",2162,0,"",python,selection_command +1518,5181358,"genie.py",2205,0,"",python,selection_command +1519,5181389,"genie.py",2239,0,"",python,selection_command +1520,5181422,"genie.py",2279,0,"",python,selection_command +1521,5181432,"genie.py",2321,0,"",python,selection_command +1522,5181824,"genie.py",2351,0,"",python,selection_command +1523,5182098,"genie.py",2353,0,"\n ",python,content +1524,5184128,"genie.py",2352,0,"",python,selection_command +1525,5184519,"genie.py",2353,0,"",python,selection_command +1526,5184868,"genie.py",2354,8,"",python,content +1527,5184868,"genie.py",2353,0," ",python,content +1528,5184869,"genie.py",2354,0,"",python,selection_keyboard +1529,5184991,"genie.py",2354,0,"i",python,content +1530,5184992,"genie.py",2355,0,"",python,selection_keyboard +1531,5185149,"genie.py",2355,0,"f",python,content +1532,5185150,"genie.py",2356,0,"",python,selection_keyboard +1533,5185215,"genie.py",2356,0," ",python,content +1534,5185216,"genie.py",2357,0,"",python,selection_keyboard +1535,5185510,"genie.py",2357,0,"s",python,content +1536,5185511,"genie.py",2358,0,"",python,selection_keyboard +1537,5185685,"genie.py",2358,0,"e",python,content +1538,5185686,"genie.py",2359,0,"",python,selection_keyboard +1539,5185797,"genie.py",2359,0,"l",python,content +1540,5185798,"genie.py",2360,0,"",python,selection_keyboard +1541,5185888,"genie.py",2360,0,"f",python,content +1542,5185888,"genie.py",2361,0,"",python,selection_keyboard +1543,5186037,"genie.py",2361,0,".",python,content +1544,5186038,"genie.py",2362,0,"",python,selection_keyboard +1545,5186544,"genie.py",2362,0," ",python,content +1546,5186545,"genie.py",2363,0,"",python,selection_keyboard +1547,5186970,"genie.py",2362,1,"",python,content +1548,5187260,"genie.py",2362,0,"u",python,content +1549,5187261,"genie.py",2363,0,"",python,selection_keyboard +1550,5187356,"genie.py",2363,0,"s",python,content +1551,5187357,"genie.py",2364,0,"",python,selection_keyboard +1552,5188129,"genie.py",2362,2,"use_maskgit",python,content +1553,5189544,"genie.py",2373,0," ",python,content +1554,5189545,"genie.py",2374,0,"",python,selection_keyboard +1555,5189925,"genie.py",2374,0,"e",python,content +1556,5189926,"genie.py",2375,0,"",python,selection_keyboard +1557,5190274,"genie.py",2375,0,"l",python,content +1558,5190275,"genie.py",2376,0,"",python,selection_keyboard +1559,5190807,"genie.py",2376,0,"s",python,content +1560,5190808,"genie.py",2377,0,"",python,selection_keyboard +1561,5191008,"genie.py",2377,0,"e",python,content +1562,5191009,"genie.py",2378,0,"",python,selection_keyboard +1563,5191150,"genie.py",2378,0," ",python,content +1564,5191151,"genie.py",2379,0,"",python,selection_keyboard +1565,5191545,"genie.py",2378,0,"",python,selection_command +1566,5191737,"genie.py",2380,0,"",python,selection_command +1567,5192315,"genie.py",2415,0,"",python,selection_command +1568,5192721,"genie.py",2415,12,"",python,content +1569,5193315,"genie.py",2415,0,"toregressive",python,content +1570,5193348,"genie.py",2415,0,"",python,selection_command +1571,5193764,"genie.py",2414,0,"",python,selection_command +1572,5194237,"genie.py",2413,0,"",python,selection_command +1573,5194295,"genie.py",2412,0,"",python,selection_command +1574,5194295,"genie.py",2411,0,"",python,selection_command +1575,5194336,"genie.py",2410,0,"",python,selection_command +1576,5194366,"genie.py",2409,0,"",python,selection_command +1577,5194417,"genie.py",2408,0,"",python,selection_command +1578,5194448,"genie.py",2407,0,"",python,selection_command +1579,5194477,"genie.py",2406,0,"",python,selection_command +1580,5194504,"genie.py",2405,0,"",python,selection_command +1581,5194515,"genie.py",2404,0,"",python,selection_command +1582,5194586,"genie.py",2403,0,"",python,selection_command +1583,5194630,"genie.py",2402,0,"",python,selection_command +1584,5194823,"genie.py",2401,0,"",python,selection_command +1585,5195330,"genie.py",2400,0,"",python,selection_command +1586,5195373,"genie.py",2399,0,"",python,selection_command +1587,5195397,"genie.py",2398,0,"",python,selection_command +1588,5195444,"genie.py",2397,0,"",python,selection_command +1589,5195453,"genie.py",2396,0,"",python,selection_command +1590,5195468,"genie.py",2395,0,"",python,selection_command +1591,5195528,"genie.py",2394,0,"",python,selection_command +1592,5195539,"genie.py",2393,0,"",python,selection_command +1593,5195592,"genie.py",2392,0,"",python,selection_command +1594,5195663,"genie.py",2391,0,"",python,selection_command +1595,5195841,"genie.py",2390,0,"",python,selection_command +1596,5196000,"genie.py",2389,0,"",python,selection_command +1597,5196487,"genie.py",2389,4,"",python,content +1598,5197081,"genie.py",2389,1,"",python,content +1599,5197249,"genie.py",2389,1,"",python,content +1600,5197471,"genie.py",2389,1,"",python,content +1601,5197654,"genie.py",2389,1,"",python,content +1602,5197805,"genie.py",2389,1,"",python,content +1603,5198005,"genie.py",2389,1,"",python,content +1604,5198164,"genie.py",2389,1,"",python,content +1605,5198384,"genie.py",2389,1,"",python,content +1606,5198583,"genie.py",2389,1,"",python,content +1607,5198793,"genie.py",2389,1,"",python,content +1608,5199052,"genie.py",2389,1,"",python,content +1609,5199529,"genie.py",2389,1,"",python,content +1610,5200169,"genie.py",2385,4,"",python,content +1611,5200553,"genie.py",2381,4,"",python,content +1612,5200947,"genie.py",2380,1,"",python,content +1613,5201414,"genie.py",2379,1,"",python,content +1614,5203184,"genie.py",2692,0,"",python,selection_mouse +1615,5203871,"genie.py",2691,0,"",python,selection_mouse +1616,5222192,"genie.py",1999,0,"",python,selection_mouse +1617,5222690,"genie.py",1983,0,"",python,selection_mouse +1618,5223788,"genie.py",1983,0,"\n",python,content +1619,5224207,"genie.py",1984,0," ",python,content +1620,5224742,"genie.py",1988,0," ",python,content +1621,5224937,"genie.py",1992,0,"i",python,content +1622,5224938,"genie.py",1993,0,"",python,selection_keyboard +1623,5225043,"genie.py",1993,0,"f",python,content +1624,5225044,"genie.py",1994,0,"",python,selection_keyboard +1625,5225289,"genie.py",1994,0," ",python,content +1626,5225290,"genie.py",1995,0,"",python,selection_keyboard +1627,5226412,"genie.py",1995,0,"s",python,content +1628,5226413,"genie.py",1996,0,"",python,selection_keyboard +1629,5226891,"genie.py",1995,1,"",python,content +1630,5227602,"genie.py",1995,0,"s",python,content +1631,5227602,"genie.py",1996,0,"",python,selection_keyboard +1632,5227797,"genie.py",1996,0,"e",python,content +1633,5227798,"genie.py",1997,0,"",python,selection_keyboard +1634,5227878,"genie.py",1997,0,"l",python,content +1635,5227878,"genie.py",1998,0,"",python,selection_keyboard +1636,5227956,"genie.py",1998,0,"f",python,content +1637,5227957,"genie.py",1999,0,"",python,selection_keyboard +1638,5228077,"genie.py",1999,0,".",python,content +1639,5228078,"genie.py",2000,0,"",python,selection_keyboard +1640,5228317,"genie.py",2000,0,"u",python,content +1641,5228318,"genie.py",2001,0,"",python,selection_keyboard +1642,5228386,"genie.py",2001,0,"s",python,content +1643,5228388,"genie.py",2002,0,"",python,selection_keyboard +1644,5229092,"genie.py",2000,2,"use_maskgit",python,content +1645,5230478,"genie.py",2011,0,":",python,content +1646,5230479,"genie.py",2012,0,"",python,selection_keyboard +1647,5230682,"genie.py",2012,0,"\n ",python,content +1648,5230926,"genie.py",2013,12,"",python,content +1649,5231181,"genie.py",2014,0,"",python,selection_command +1650,5231355,"genie.py",2055,0,"",python,selection_command +1651,5231564,"genie.py",2014,0,"",python,selection_command +1652,5233180,"genie.py",2055,0,"",python,selection_command +1653,5233680,"genie.py",2092,0,"",python,selection_command +1654,5233755,"genie.py",2140,0,"",python,selection_command +1655,5233756,"genie.py",2185,0,"",python,selection_command +1656,5233780,"genie.py",2228,0,"",python,selection_command +1657,5233802,"genie.py",2262,0,"",python,selection_command +1658,5233865,"genie.py",2302,0,"",python,selection_command +1659,5233866,"genie.py",2344,0,"",python,selection_command +1660,5233922,"genie.py",2374,0,"",python,selection_command +1661,5234500,"genie.py",2432,0,"",python,selection_command +1662,5236224,"genie.py",2392,12,"",python,content +1663,5236522,"genie.py",2391,1,"",python,content +1664,5236863,"genie.py",2387,4,"",python,content +1665,5237249,"genie.py",2384,3,"",python,content +1666,5237554,"genie.py",2384,0,"\n ",python,content +1667,5239341,"genie.py",2021,0,"",python,selection_mouse +1668,5240263,"genie.py",1993,0,"",python,selection_mouse +1669,5241109,"genie.py",1992,0,"",python,selection_command +1670,5241886,"genie.py",2013,0,"",python,selection_command +1671,5242232,"genie.py",2013,1,"",python,content +1672,5242289,"genie.py",2021,0,"",python,selection_command +1673,5243463,"genie.py",2013,40," self.dynamics = DynamicsMaskGIT(",python,selection_command +1674,5243658,"genie.py",2013,77," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,",python,selection_command +1675,5244138,"genie.py",2013,125," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,",python,selection_command +1676,5244182,"genie.py",2013,170," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,",python,selection_command +1677,5244230,"genie.py",2013,213," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,",python,selection_command +1678,5244262,"genie.py",2013,247," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,",python,selection_command +1679,5244309,"genie.py",2013,287," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,",python,selection_command +1680,5244309,"genie.py",2013,329," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,",python,selection_command +1681,5244396,"genie.py",2013,359," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,",python,selection_command +1682,5244541,"genie.py",2013,370," self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) ",python,selection_command +1683,5244852,"genie.py",2021,0,"",python,selection_command +1684,5245028,"genie.py",2381,0," ",python,content +1685,5245029,"genie.py",2355,0," ",python,content +1686,5245029,"genie.py",2313,0," ",python,content +1687,5245029,"genie.py",2273,0," ",python,content +1688,5245029,"genie.py",2239,0," ",python,content +1689,5245029,"genie.py",2196,0," ",python,content +1690,5245029,"genie.py",2151,0," ",python,content +1691,5245029,"genie.py",2103,0," ",python,content +1692,5245029,"genie.py",2066,0," ",python,content +1693,5245029,"genie.py",2021,0," ",python,content +1694,5245365,"genie.py",2024,0,"",python,selection_command +1695,5245528,"genie.py",2069,0,"",python,selection_command +1696,5245707,"genie.py",2110,0,"",python,selection_command +1697,5246222,"genie.py",2162,0,"",python,selection_command +1698,5246229,"genie.py",2211,0,"",python,selection_command +1699,5246292,"genie.py",2258,0,"",python,selection_command +1700,5246292,"genie.py",2296,0,"",python,selection_command +1701,5246332,"genie.py",2340,0,"",python,selection_command +1702,5246476,"genie.py",2386,0,"",python,selection_command +1703,5246658,"genie.py",2420,0,"",python,selection_command +1704,5247010,"genie.py",2435,0,"",python,selection_command +1705,5247311,"genie.py",2436,0,"",python,selection_command +1706,5247910,"genie.py",2436,0,":",python,content +1707,5247911,"genie.py",2437,0,"",python,selection_keyboard +1708,5248235,"genie.py",2436,0,"",python,selection_command +1709,5249225,"genie.py",2437,0,"",python,selection_command +1710,5249564,"genie.py",2437,0,"\n ",python,content +1711,5249623,"genie.py",2450,1,"",python,content +1712,5249797,"genie.py",2449,0,"",python,selection_command +1713,5250450,"genie.py",2485,0,"",python,selection_command +1714,5250953,"genie.py",2522,0,"",python,selection_command +1715,5250995,"genie.py",2570,0,"",python,selection_command +1716,5251006,"genie.py",2615,0,"",python,selection_command +1717,5251040,"genie.py",2658,0,"",python,selection_command +1718,5251084,"genie.py",2692,0,"",python,selection_command +1719,5251177,"genie.py",2734,0,"",python,selection_command +1720,5251486,"genie.py",2761,0,"",python,selection_command +1721,5251922,"genie.py",2761,0," ",python,content +1722,5252332,"genie.py",2764,0,"",python,selection_command +1723,5254453,"genie.py",2387,0,"",python,selection_mouse +1724,5255682,"genie.py",2489,0,"",python,selection_mouse +1725,5255818,"genie.py",2486,9,"model_dim",python,selection_mouse +1726,5255999,"genie.py",2486,48,"model_dim=self.dyna_dim,\n num_latents",python,selection_mouse +1727,5256000,"genie.py",2486,95,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks",python,selection_mouse +1728,5256000,"genie.py",2486,139,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads",python,selection_mouse +1729,5256037,"genie.py",2486,185,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self",python,selection_mouse +1730,5256105,"genie.py",2486,218,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype",python,selection_mouse +1731,5256454,"genie.py",2486,260,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.",python,selection_mouse +1732,5256553,"genie.py",2486,265,"model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype",python,selection_mouse +1733,5257861,"genie.py",2486,0,"",python,selection_command +1734,5258477,"genie.py",2735,0," ",python,content +1735,5258477,"genie.py",2693,0," ",python,content +1736,5258477,"genie.py",2659,0," ",python,content +1737,5258477,"genie.py",2616,0," ",python,content +1738,5258477,"genie.py",2571,0," ",python,content +1739,5258477,"genie.py",2523,0," ",python,content +1740,5258477,"genie.py",2486,0," ",python,content +1741,5258832,"genie.py",2489,0,"",python,selection_command +1742,5259560,"genie.py",2607,0,"",python,selection_mouse +1743,5260714,"genie.py",2460,0,"",python,selection_mouse +1744,5260859,"genie.py",2450,22,"DynamicsAutoregressive",python,selection_mouse +1745,5263373,"genie.py",2450,0,"",python,selection_mouse +1746,5264357,"genie.py",2450,0,"s",python,content +1747,5264358,"genie.py",2451,0,"",python,selection_keyboard +1748,5264540,"genie.py",2451,0,"e",python,content +1749,5264541,"genie.py",2452,0,"",python,selection_keyboard +1750,5264637,"genie.py",2452,0,"l",python,content +1751,5264638,"genie.py",2453,0,"",python,selection_keyboard +1752,5264731,"genie.py",2453,0,"f",python,content +1753,5264732,"genie.py",2454,0,"",python,selection_keyboard +1754,5264834,"genie.py",2454,0,".",python,content +1755,5264835,"genie.py",2455,0,"",python,selection_keyboard +1756,5265140,"genie.py",2455,0,"d",python,content +1757,5265140,"genie.py",2456,0,"",python,selection_keyboard +1758,5265336,"genie.py",2456,0,"y",python,content +1759,5265337,"genie.py",2457,0,"",python,selection_keyboard +1760,5265433,"genie.py",2457,0,"n",python,content +1761,5265434,"genie.py",2458,0,"",python,selection_keyboard +1762,5265575,"genie.py",2458,0,"a",python,content +1763,5265575,"genie.py",2459,0,"",python,selection_keyboard +1764,5265646,"genie.py",2459,0,"m",python,content +1765,5265647,"genie.py",2460,0,"",python,selection_keyboard +1766,5265831,"genie.py",2460,0,"i",python,content +1767,5265832,"genie.py",2461,0,"",python,selection_keyboard +1768,5265890,"genie.py",2461,0,"c",python,content +1769,5265891,"genie.py",2462,0,"",python,selection_keyboard +1770,5265968,"genie.py",2462,0,"s",python,content +1771,5265969,"genie.py",2463,0,"",python,selection_keyboard +1772,5266122,"genie.py",2463,0,":",python,content +1773,5266123,"genie.py",2464,0,"",python,selection_keyboard +1774,5266576,"genie.py",2463,1,"",python,content +1775,5267121,"genie.py",2463,0," ",python,content +1776,5267122,"genie.py",2464,0,"",python,selection_keyboard +1777,5267315,"genie.py",2464,0,"=",python,content +1778,5267316,"genie.py",2465,0,"",python,selection_keyboard +1779,5267370,"genie.py",2465,0," ",python,content +1780,5267370,"genie.py",2466,0,"",python,selection_keyboard +1781,5268183,"genie.py",2465,0,"",python,selection_command +1782,5269973,"genie.py",2796,0,"",python,selection_mouse +1783,5269979,"genie.py",2795,0,"",python,selection_command +1784,5270681,"genie.py",2810,0,"",python,selection_mouse +1785,5270682,"genie.py",2809,0,"",python,selection_command +1786,5272952,"models/dynamics.py",0,0,"",python,tab +1787,5272953,"models/dynamics.py",2355,0,"",python,selection_mouse +1788,5274979,"models/dynamics.py",3045,0,"",python,selection_mouse +1789,5275468,"models/dynamics.py",3045,0,"d",python,content +1790,5275470,"models/dynamics.py",3046,0,"",python,selection_keyboard +1791,5275521,"models/dynamics.py",3046,0,"j",python,content +1792,5275522,"models/dynamics.py",3047,0,"",python,selection_keyboard +1793,5276033,"models/dynamics.py",3046,1,"",python,content +1794,5276167,"models/dynamics.py",3045,1,"",python,content +1795,5276278,"models/dynamics.py",3044,0,"",python,selection_command +1796,5276637,"models/dynamics.py",3031,37,"",python,content +1797,5276705,"models/dynamics.py",2988,0,"",python,selection_command +1798,5278330,"genie.py",0,0,"",python,tab +1799,5278331,"genie.py",2475,0,"",python,selection_mouse +1800,5278576,"models/dynamics.py",0,0,"",python,tab +1801,5281274,"genie.py",0,0,"",python,tab +1802,5283034,"models/dynamics.py",0,0,"",python,tab +1803,5283035,"models/dynamics.py",2171,0,"",python,selection_mouse +1804,5284527,"models/dynamics.py",2176,0,"",python,selection_mouse +1805,5284674,"models/dynamics.py",2170,17,"CausalTransformer",python,selection_mouse +1806,5285559,"models/dynamics.py",2145,0,"",python,selection_mouse +1807,5285604,"models/dynamics.py",2144,0,"",python,selection_command +1808,5286604,"models/dynamics.py",2177,0,"",python,selection_mouse +1809,5287692,"genie.py",0,0,"",python,tab +1810,5291011,"models/dynamics.py",0,0,"",python,tab +1811,5292543,"models/dynamics.py",2178,0,"",python,selection_mouse +1812,5292816,"utils/nn.py",0,0,"",python,tab +1813,5297255,"models/dynamics.py",0,0,"",python,tab +1814,5305735,"models/dynamics.py",417,0,"",python,selection_mouse +1815,5305764,"models/dynamics.py",416,0,"",python,selection_command +1816,5306326,"models/dynamics.py",448,0,"",python,selection_mouse +1817,5306469,"models/dynamics.py",442,13,"STTransformer",python,selection_mouse +1818,5310498,"models/dynamics.py",641,0,"",python,selection_mouse +1819,5310783,"models/dynamics.py",640,1,"s",python,selection_mouse +1820,5310783,"models/dynamics.py",636,5," s",python,selection_mouse +1821,5310784,"models/dynamics.py",576,65," self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1822,5310784,"models/dynamics.py",574,67," self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1823,5310784,"models/dynamics.py",545,96," self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1824,5310820,"models/dynamics.py",544,97," self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1825,5310850,"models/dynamics.py",515,126," self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1826,5310912,"models/dynamics.py",485,156," self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1827,5310965,"models/dynamics.py",457,184," self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1828,5311292,"models/dynamics.py",418,223," self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n s",python,selection_mouse +1829,5313287,"models/dynamics.py",661,0,"",python,selection_mouse +1830,5313296,"models/dynamics.py",660,0,"",python,selection_command +1831,5313572,"models/dynamics.py",660,1,")",python,selection_mouse +1832,5313656,"models/dynamics.py",661,0,"",python,selection_command +1833,5313657,"models/dynamics.py",638,23," self.dtype,\n )",python,selection_mouse +1834,5313657,"models/dynamics.py",636,25," self.dtype,\n )",python,selection_mouse +1835,5313657,"models/dynamics.py",604,57," self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1836,5313691,"models/dynamics.py",575,86," self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1837,5313722,"models/dynamics.py",545,116," self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1838,5313752,"models/dynamics.py",485,176," self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1839,5313778,"models/dynamics.py",457,204," self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1840,5313912,"models/dynamics.py",418,243," self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1841,5317186,"models/dynamics.py",2393,0,"",python,selection_mouse +1842,5317203,"models/dynamics.py",2392,0,"",python,selection_command +1843,5317392,"models/dynamics.py",2392,1,")",python,selection_mouse +1844,5317392,"models/dynamics.py",2368,24," self.dtype,\n ",python,selection_mouse +1845,5317393,"models/dynamics.py",2306,86," self.dropout,\n self.param_dtype,\n self.dtype,\n ",python,selection_mouse +1846,5317426,"models/dynamics.py",2393,0,"",python,selection_command +1847,5317427,"models/dynamics.py",2276,117," self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1848,5317469,"models/dynamics.py",2247,146," self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1849,5317498,"models/dynamics.py",2217,176," self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1850,5317528,"models/dynamics.py",2189,204," self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1851,5318920,"models/dynamics.py",2146,247," self.dynamics = CausalTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,selection_mouse +1852,5320114,"models/dynamics.py",2146,247,"",python,content +1853,5321259,"models/dynamics.py",2146,0," self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )",python,content +1854,5322858,"models/dynamics.py",2176,0,"",python,selection_mouse +1855,5322985,"models/dynamics.py",2170,13,"STTransformer",python,selection_mouse +1856,5323857,"models/dynamics.py",2265,0,"",python,selection_mouse +1857,5324363,"models/dynamics.py",2375,0,"",python,selection_mouse +1858,5324499,"models/dynamics.py",2373,5,"dtype",python,selection_mouse +1859,5327233,"models/dynamics.py",2375,0,"",python,selection_mouse +1860,5327381,"models/dynamics.py",2373,5,"dtype",python,selection_mouse +1861,5384290,"models/dynamics.py",2355,0,"",python,selection_mouse +1862,5385358,"models/dynamics.py",2178,0,"",python,selection_mouse +1863,5385501,"models/dynamics.py",2170,13,"STTransformer",python,selection_mouse +1864,5390156,"models/dynamics.py",2299,0,"",python,selection_mouse +1865,5390639,"models/dynamics.py",2355,0,"",python,selection_mouse +1866,5391147,"models/dynamics.py",2325,0,"",python,selection_mouse +1867,5391562,"models/dynamics.py",2325,0,"\n ",python,content +1868,5392367,"models/dynamics.py",2338,0,"w",python,content +1869,5392368,"models/dynamics.py",2339,0,"",python,selection_keyboard +1870,5392592,"models/dynamics.py",2339,0,"3",python,content +1871,5392594,"models/dynamics.py",2340,0,"",python,selection_keyboard +1872,5392787,"models/dynamics.py",2340,0,"l",python,content +1873,5392788,"models/dynamics.py",2341,0,"",python,selection_keyboard +1874,5392902,"models/dynamics.py",2341,0,"r",python,content +1875,5392903,"models/dynamics.py",2342,0,"",python,selection_keyboard +1876,5393197,"models/dynamics.py",2341,1,"",python,content +1877,5393310,"models/dynamics.py",2340,1,"",python,content +1878,5393457,"models/dynamics.py",2339,1,"",python,content +1879,5393891,"models/dynamics.py",2338,1,"",python,content +1880,5394027,"models/dynamics.py",2338,0,"s",python,content +1881,5394028,"models/dynamics.py",2339,0,"",python,selection_keyboard +1882,5394250,"models/dynamics.py",2339,0,"e",python,content +1883,5394251,"models/dynamics.py",2340,0,"",python,selection_keyboard +1884,5394332,"models/dynamics.py",2340,0,"l",python,content +1885,5394333,"models/dynamics.py",2341,0,"",python,selection_keyboard +1886,5394457,"models/dynamics.py",2341,0,"f",python,content +1887,5394458,"models/dynamics.py",2342,0,"",python,selection_keyboard +1888,5394547,"models/dynamics.py",2342,0,".",python,content +1889,5394547,"models/dynamics.py",2343,0,"",python,selection_keyboard +1890,5396639,"models/dynamics.py",2343,0,"u",python,content +1891,5396641,"models/dynamics.py",2344,0,"",python,selection_keyboard +1892,5396752,"models/dynamics.py",2344,0,"s",python,content +1893,5396754,"models/dynamics.py",2345,0,"",python,selection_keyboard +1894,5396927,"models/dynamics.py",2345,0,"e",python,content +1895,5396928,"models/dynamics.py",2346,0,"",python,selection_keyboard +1896,5397213,"models/dynamics.py",2346,0,"_",python,content +1897,5397213,"models/dynamics.py",2347,0,"",python,selection_keyboard +1898,5397505,"models/dynamics.py",2347,0,"m",python,content +1899,5397506,"models/dynamics.py",2348,0,"",python,selection_keyboard +1900,5397608,"models/dynamics.py",2348,0,"a",python,content +1901,5397609,"models/dynamics.py",2349,0,"",python,selection_keyboard +1902,5397682,"models/dynamics.py",2349,0,"s",python,content +1903,5397683,"models/dynamics.py",2350,0,"",python,selection_keyboard +1904,5398043,"models/dynamics.py",2350,0,"k",python,content +1905,5398044,"models/dynamics.py",2351,0,"",python,selection_keyboard +1906,5398198,"models/dynamics.py",2351,0,"g",python,content +1907,5398198,"models/dynamics.py",2352,0,"",python,selection_keyboard +1908,5398643,"models/dynamics.py",2352,0,"i",python,content +1909,5398643,"models/dynamics.py",2353,0,"",python,selection_keyboard +1910,5398871,"models/dynamics.py",2353,0,"t",python,content +1911,5398872,"models/dynamics.py",2354,0,"",python,selection_keyboard +1912,5399892,"models/dynamics.py",2354,0,",",python,content +1913,5399893,"models/dynamics.py",2355,0,"",python,selection_keyboard +1914,5401364,"models/dynamics.py",2348,0,"",python,selection_mouse +1915,5401510,"models/dynamics.py",2343,11,"use_maskgit",python,selection_mouse +1916,5401814,"models/dynamics.py",2326,30," self.use_maskgit,\n",python,selection_mouse +1917,5402603,"models/dynamics.py",2326,30,"",python,content +1918,5403628,"models/dynamics.py",2325,0,"",python,selection_mouse +1919,5404955,"utils/nn.py",0,0,"",python,tab +1920,5409190,"utils/nn.py",5884,0,"",python,selection_mouse +1921,5409201,"utils/nn.py",5883,0,"",python,selection_command +1922,5409640,"utils/nn.py",5836,0,"",python,selection_mouse +1923,5409641,"utils/nn.py",5835,0,"",python,selection_command +1924,5411299,"utils/nn.py",5836,0,"\n ",python,content +1925,5413609,"utils/nn.py",5841,0,"s",python,content +1926,5413610,"utils/nn.py",5842,0,"",python,selection_keyboard +1927,5413706,"utils/nn.py",5842,0,"p",python,content +1928,5413707,"utils/nn.py",5843,0,"",python,selection_keyboard +1929,5413855,"utils/nn.py",5843,0,"a",python,content +1930,5413857,"utils/nn.py",5844,0,"",python,selection_keyboard +1931,5414049,"utils/nn.py",5844,0,"c",python,content +1932,5414049,"utils/nn.py",5845,0,"",python,selection_keyboard +1933,5414157,"utils/nn.py",5845,0,"i",python,content +1934,5414158,"utils/nn.py",5846,0,"",python,selection_keyboard +1935,5414697,"utils/nn.py",5845,1,"",python,content +1936,5414840,"utils/nn.py",5844,1,"",python,content +1937,5414979,"utils/nn.py",5843,1,"",python,content +1938,5416182,"utils/nn.py",5843,0,"a",python,content +1939,5416183,"utils/nn.py",5844,0,"",python,selection_keyboard +1940,5416590,"utils/nn.py",5844,0,"c",python,content +1941,5416591,"utils/nn.py",5845,0,"",python,selection_keyboard +1942,5416693,"utils/nn.py",5845,0,"i",python,content +1943,5416694,"utils/nn.py",5846,0,"",python,selection_keyboard +1944,5416842,"utils/nn.py",5846,0,"a",python,content +1945,5416843,"utils/nn.py",5847,0,"",python,selection_keyboard +1946,5416938,"utils/nn.py",5847,0,"l",python,content +1947,5416939,"utils/nn.py",5848,0,"",python,selection_keyboard +1948,5417953,"utils/nn.py",5848,0,"_",python,content +1949,5417954,"utils/nn.py",5849,0,"",python,selection_keyboard +1950,5422592,"utils/nn.py",5841,8,"",python,content +1951,5438683,"utils/nn.py",5841,0,"s",python,content +1952,5438684,"utils/nn.py",5842,0,"",python,selection_keyboard +1953,5438794,"utils/nn.py",5842,0,"p",python,content +1954,5438795,"utils/nn.py",5843,0,"",python,selection_keyboard +1955,5438945,"utils/nn.py",5843,0,"a",python,content +1956,5438946,"utils/nn.py",5844,0,"",python,selection_keyboard +1957,5439156,"utils/nn.py",5844,0,"c",python,content +1958,5439157,"utils/nn.py",5845,0,"",python,selection_keyboard +1959,5439312,"utils/nn.py",5845,0,"i",python,content +1960,5439313,"utils/nn.py",5846,0,"",python,selection_keyboard +1961,5439403,"utils/nn.py",5846,0,"a",python,content +1962,5439404,"utils/nn.py",5847,0,"",python,selection_keyboard +1963,5439567,"utils/nn.py",5847,0,"l",python,content +1964,5439568,"utils/nn.py",5848,0,"",python,selection_keyboard +1965,5440774,"utils/nn.py",5848,0,"_",python,content +1966,5440775,"utils/nn.py",5849,0,"",python,selection_keyboard +1967,5441096,"utils/nn.py",5849,0,"b",python,content +1968,5441097,"utils/nn.py",5850,0,"",python,selection_keyboard +1969,5441198,"utils/nn.py",5850,0,"e",python,content +1970,5441199,"utils/nn.py",5851,0,"",python,selection_keyboard +1971,5441288,"utils/nn.py",5851,0,"r",python,content +1972,5441289,"utils/nn.py",5852,0,"",python,selection_keyboard +1973,5441406,"utils/nn.py",5852,0,"t",python,content +1974,5441407,"utils/nn.py",5853,0,"",python,selection_keyboard +1975,5443301,"utils/nn.py",5853,0,":",python,content +1976,5443302,"utils/nn.py",5854,0,"",python,selection_keyboard +1977,5443420,"utils/nn.py",5854,0," ",python,content +1978,5443421,"utils/nn.py",5855,0,"",python,selection_keyboard +1979,5443604,"utils/nn.py",5855,0,"b",python,content +1980,5443605,"utils/nn.py",5856,0,"",python,selection_keyboard +1981,5443784,"utils/nn.py",5856,0,"o",python,content +1982,5443785,"utils/nn.py",5857,0,"",python,selection_keyboard +1983,5443927,"utils/nn.py",5857,0,"o",python,content +1984,5443928,"utils/nn.py",5858,0,"",python,selection_keyboard +1985,5444059,"utils/nn.py",5858,0,"l",python,content +1986,5444060,"utils/nn.py",5859,0,"",python,selection_keyboard +1987,5445171,"utils/nn.py",5858,0,"",python,selection_command +1988,5446361,"utils/nn.py",5827,0,"",python,selection_mouse +1989,5447383,"utils/nn.py",5843,0,"",python,selection_mouse +1990,5447566,"utils/nn.py",5841,12,"spacial_bert",python,selection_mouse +1991,5448653,"utils/nn.py",6457,0,"",python,selection_mouse +1992,5448699,"utils/nn.py",6456,0,"",python,selection_command +1993,5449295,"utils/nn.py",6585,0,"",python,selection_mouse +1994,5449847,"utils/nn.py",6719,0,"",python,selection_mouse +1995,5449879,"utils/nn.py",6718,0,"",python,selection_command +1996,5450493,"utils/nn.py",6639,0,"",python,selection_mouse +1997,5450499,"utils/nn.py",6638,0,"",python,selection_command +1998,5451056,"utils/nn.py",6639,0,"\n ",python,content +1999,5451513,"utils/nn.py",6656,0,"s",python,content +2000,5451514,"utils/nn.py",6657,0,"",python,selection_keyboard +2001,5451867,"utils/nn.py",6657,0,"p",python,content +2002,5451869,"utils/nn.py",6658,0,"",python,selection_keyboard +2003,5451920,"utils/nn.py",6658,0,"a",python,content +2004,5451922,"utils/nn.py",6659,0,"",python,selection_keyboard +2005,5452135,"utils/nn.py",6659,0,"c",python,content +2006,5452137,"utils/nn.py",6660,0,"",python,selection_keyboard +2007,5452199,"utils/nn.py",6660,0,"i",python,content +2008,5452201,"utils/nn.py",6661,0,"",python,selection_keyboard +2009,5452351,"utils/nn.py",6661,0,"a",python,content +2010,5452353,"utils/nn.py",6662,0,"",python,selection_keyboard +2011,5452500,"utils/nn.py",6662,0,"l",python,content +2012,5452501,"utils/nn.py",6663,0,"",python,selection_keyboard +2013,5453133,"utils/nn.py",6663,0,"_",python,content +2014,5453134,"utils/nn.py",6664,0,"",python,selection_keyboard +2015,5453519,"utils/nn.py",6664,0,"b",python,content +2016,5453522,"utils/nn.py",6665,0,"",python,selection_keyboard +2017,5453617,"utils/nn.py",6665,0,"a",python,content +2018,5453618,"utils/nn.py",6666,0,"",python,selection_keyboard +2019,5454326,"utils/nn.py",6665,1,"",python,content +2020,5454549,"utils/nn.py",6665,0,"e",python,content +2021,5454550,"utils/nn.py",6666,0,"",python,selection_keyboard +2022,5454643,"utils/nn.py",6666,0,"r",python,content +2023,5454644,"utils/nn.py",6667,0,"",python,selection_keyboard +2024,5454888,"utils/nn.py",6667,0,"t",python,content +2025,5454890,"utils/nn.py",6668,0,"",python,selection_keyboard +2026,5456117,"utils/nn.py",6668,0,"=",python,content +2027,5456119,"utils/nn.py",6669,0,"",python,selection_keyboard +2028,5456998,"utils/nn.py",6669,0,"s",python,content +2029,5456999,"utils/nn.py",6670,0,"",python,selection_keyboard +2030,5457079,"utils/nn.py",6670,0,"ü",python,content +2031,5457080,"utils/nn.py",6671,0,"",python,selection_keyboard +2032,5457231,"utils/nn.py",6671,0,"a",python,content +2033,5457232,"utils/nn.py",6672,0,"",python,selection_keyboard +2034,5457414,"utils/nn.py",6672,0,"c",python,content +2035,5457416,"utils/nn.py",6673,0,"",python,selection_keyboard +2036,5457628,"utils/nn.py",6672,1,"",python,content +2037,5457775,"utils/nn.py",6671,1,"",python,content +2038,5457903,"utils/nn.py",6670,1,"",python,content +2039,5458113,"utils/nn.py",6670,0,"p",python,content +2040,5458114,"utils/nn.py",6671,0,"",python,selection_keyboard +2041,5458316,"utils/nn.py",6671,0,"a",python,content +2042,5458317,"utils/nn.py",6672,0,"",python,selection_keyboard +2043,5458498,"utils/nn.py",6672,0,"c",python,content +2044,5458499,"utils/nn.py",6673,0,"",python,selection_keyboard +2045,5458872,"utils/nn.py",6673,0,"i",python,content +2046,5458873,"utils/nn.py",6674,0,"",python,selection_keyboard +2047,5458999,"utils/nn.py",6674,0,"a",python,content +2048,5459000,"utils/nn.py",6675,0,"",python,selection_keyboard +2049,5459094,"utils/nn.py",6675,0,"l",python,content +2050,5459095,"utils/nn.py",6676,0,"",python,selection_keyboard +2051,5459802,"utils/nn.py",6676,0,"_",python,content +2052,5459804,"utils/nn.py",6677,0,"",python,selection_keyboard +2053,5460201,"utils/nn.py",6677,0,"b",python,content +2054,5460203,"utils/nn.py",6678,0,"",python,selection_keyboard +2055,5460557,"utils/nn.py",6678,0,"e",python,content +2056,5460559,"utils/nn.py",6679,0,"",python,selection_keyboard +2057,5460725,"utils/nn.py",6679,0,"r",python,content +2058,5460727,"utils/nn.py",6680,0,"",python,selection_keyboard +2059,5460997,"utils/nn.py",6680,0,"t",python,content +2060,5460999,"utils/nn.py",6681,0,"",python,selection_keyboard +2061,5461448,"utils/nn.py",6681,0,",",python,content +2062,5461449,"utils/nn.py",6682,0,"",python,selection_keyboard +2063,5462224,"utils/nn.py",6681,0,"",python,selection_command +2064,5465646,"utils/nn.py",6520,0,"",python,selection_mouse +2065,5467592,"utils/nn.py",4147,0,"",python,selection_mouse +2066,5468134,"utils/nn.py",4116,0,"",python,selection_mouse +2067,5469007,"utils/nn.py",4094,0,"",python,selection_mouse +2068,5469820,"utils/nn.py",4105,0,"\n ",python,content +2069,5470059,"utils/nn.py",4110,0,"s",python,content +2070,5470059,"utils/nn.py",4111,0,"",python,selection_keyboard +2071,5470259,"utils/nn.py",4111,0,"p",python,content +2072,5470260,"utils/nn.py",4112,0,"",python,selection_keyboard +2073,5470396,"utils/nn.py",4112,0,"a",python,content +2074,5470398,"utils/nn.py",4113,0,"",python,selection_keyboard +2075,5470589,"utils/nn.py",4113,0,"c",python,content +2076,5470590,"utils/nn.py",4114,0,"",python,selection_keyboard +2077,5470670,"utils/nn.py",4114,0,"i",python,content +2078,5470671,"utils/nn.py",4115,0,"",python,selection_keyboard +2079,5470804,"utils/nn.py",4115,0,"a",python,content +2080,5470805,"utils/nn.py",4116,0,"",python,selection_keyboard +2081,5470899,"utils/nn.py",4116,0,"l",python,content +2082,5470900,"utils/nn.py",4117,0,"",python,selection_keyboard +2083,5471247,"utils/nn.py",4117,0,"_",python,content +2084,5471248,"utils/nn.py",4118,0,"",python,selection_keyboard +2085,5471612,"utils/nn.py",4118,0,"b",python,content +2086,5471613,"utils/nn.py",4119,0,"",python,selection_keyboard +2087,5471727,"utils/nn.py",4119,0,"e",python,content +2088,5471728,"utils/nn.py",4120,0,"",python,selection_keyboard +2089,5471864,"utils/nn.py",4120,0,"r",python,content +2090,5471865,"utils/nn.py",4121,0,"",python,selection_keyboard +2091,5472756,"utils/nn.py",4121,0,"t",python,content +2092,5472756,"utils/nn.py",4122,0,"",python,selection_keyboard +2093,5472980,"utils/nn.py",4122,0,":",python,content +2094,5472981,"utils/nn.py",4123,0,"",python,selection_keyboard +2095,5473173,"utils/nn.py",4123,0," ",python,content +2096,5473174,"utils/nn.py",4124,0,"",python,selection_keyboard +2097,5473679,"utils/nn.py",4124,0,"b",python,content +2098,5473680,"utils/nn.py",4125,0,"",python,selection_keyboard +2099,5473895,"utils/nn.py",4125,0,"o",python,content +2100,5473896,"utils/nn.py",4126,0,"",python,selection_keyboard +2101,5474014,"utils/nn.py",4126,0,"o",python,content +2102,5474015,"utils/nn.py",4127,0,"",python,selection_keyboard +2103,5474169,"utils/nn.py",4127,0,"l",python,content +2104,5474169,"utils/nn.py",4128,0,"",python,selection_keyboard +2105,5474777,"utils/nn.py",4127,0,"",python,selection_command +2106,5475351,"utils/nn.py",4115,0,"",python,selection_mouse +2107,5475538,"utils/nn.py",4110,12,"spacial_bert",python,selection_mouse +2108,5481564,"utils/nn.py",4664,0,"",python,selection_mouse +2109,5482359,"utils/nn.py",4449,0,"",python,selection_mouse +2110,5482363,"utils/nn.py",4448,0,"",python,selection_command +2111,5483080,"utils/nn.py",4574,0,"",python,selection_mouse +2112,5483583,"utils/nn.py",4696,0,"",python,selection_mouse +2113,5484277,"utils/nn.py",4449,0,"",python,selection_mouse +2114,5484287,"utils/nn.py",4448,0,"",python,selection_command +2115,5485569,"utils/nn.py",4938,0,"",python,selection_mouse +2116,5486469,"utils/nn.py",4920,0,"",python,selection_command +2117,5487003,"utils/nn.py",4895,0,"",python,selection_command +2118,5487014,"utils/nn.py",4853,0,"",python,selection_command +2119,5487019,"utils/nn.py",4827,0,"",python,selection_command +2120,5487059,"utils/nn.py",4783,0,"",python,selection_command +2121,5487142,"utils/nn.py",4754,0,"",python,selection_command +2122,5487143,"utils/nn.py",4717,0,"",python,selection_command +2123,5487154,"utils/nn.py",4700,0,"",python,selection_command +2124,5487192,"utils/nn.py",4698,0,"",python,selection_command +2125,5487237,"utils/nn.py",4680,0,"",python,selection_command +2126,5487244,"utils/nn.py",4655,0,"",python,selection_command +2127,5487285,"utils/nn.py",4613,0,"",python,selection_command +2128,5487378,"utils/nn.py",4574,0,"",python,selection_command +2129,5487528,"utils/nn.py",4539,0,"",python,selection_command +2130,5487672,"utils/nn.py",4501,0,"",python,selection_command +2131,5487823,"utils/nn.py",4466,0,"",python,selection_command +2132,5487952,"utils/nn.py",4448,0,"",python,selection_command +2133,5488249,"utils/nn.py",4449,0,"\n causal_mask = jnp.tri(z.shape[-2])",python,content +2134,5488294,"utils/nn.py",4458,0,"",python,selection_command +2135,5494941,"utils/nn.py",4449,0,"",python,selection_mouse +2136,5494979,"utils/nn.py",4448,0,"",python,selection_command +2137,5496544,"utils/nn.py",4466,0,"",python,selection_mouse +2138,5499099,"utils/nn.py",4723,0,"",python,selection_mouse +2139,5500086,"utils/nn.py",4470,0,"",python,selection_mouse +2140,5501067,"utils/nn.py",4492,0,"",python,selection_mouse +2141,5501070,"utils/nn.py",4491,0,"",python,selection_command +2142,5501525,"utils/nn.py",4492,0,"",python,selection_command +2143,5501726,"utils/nn.py",4492,0," ",python,content +2144,5501728,"utils/nn.py",4493,0,"",python,selection_keyboard +2145,5502072,"utils/nn.py",4493,0,"i",python,content +2146,5502073,"utils/nn.py",4494,0,"",python,selection_keyboard +2147,5502170,"utils/nn.py",4494,0,"f",python,content +2148,5502171,"utils/nn.py",4495,0,"",python,selection_keyboard +2149,5502266,"utils/nn.py",4495,0," ",python,content +2150,5502267,"utils/nn.py",4496,0,"",python,selection_keyboard +2151,5502426,"utils/nn.py",4496,0,"s",python,content +2152,5502428,"utils/nn.py",4497,0,"",python,selection_keyboard +2153,5502683,"utils/nn.py",4497,0,"e",python,content +2154,5502685,"utils/nn.py",4498,0,"",python,selection_keyboard +2155,5502782,"utils/nn.py",4498,0,"l",python,content +2156,5502784,"utils/nn.py",4499,0,"",python,selection_keyboard +2157,5502880,"utils/nn.py",4499,0,"f",python,content +2158,5502882,"utils/nn.py",4500,0,"",python,selection_keyboard +2159,5503168,"utils/nn.py",4500,0,"-",python,content +2160,5503170,"utils/nn.py",4501,0,"",python,selection_keyboard +2161,5503729,"utils/nn.py",4500,1,"",python,content +2162,5504216,"utils/nn.py",4500,0,",",python,content +2163,5504216,"utils/nn.py",4501,0,"",python,selection_keyboard +2164,5504425,"utils/nn.py",4501,0,"s",python,content +2165,5504426,"utils/nn.py",4502,0,"",python,selection_keyboard +2166,5504564,"utils/nn.py",4502,0,"p",python,content +2167,5504564,"utils/nn.py",4503,0,"",python,selection_keyboard +2168,5504753,"utils/nn.py",4503,0,"a",python,content +2169,5504753,"utils/nn.py",4504,0,"",python,selection_keyboard +2170,5505225,"utils/nn.py",4504,0,"c",python,content +2171,5505226,"utils/nn.py",4505,0,"",python,selection_keyboard +2172,5505346,"utils/nn.py",4505,0,"i",python,content +2173,5505347,"utils/nn.py",4506,0,"",python,selection_keyboard +2174,5505449,"utils/nn.py",4506,0,"a",python,content +2175,5505450,"utils/nn.py",4507,0,"",python,selection_keyboard +2176,5505567,"utils/nn.py",4507,0,"l",python,content +2177,5505568,"utils/nn.py",4508,0,"",python,selection_keyboard +2178,5505867,"utils/nn.py",4508,0,"_",python,content +2179,5505868,"utils/nn.py",4509,0,"",python,selection_keyboard +2180,5506229,"utils/nn.py",4509,0,"b",python,content +2181,5506230,"utils/nn.py",4510,0,"",python,selection_keyboard +2182,5506374,"utils/nn.py",4510,0,"e",python,content +2183,5506375,"utils/nn.py",4511,0,"",python,selection_keyboard +2184,5506480,"utils/nn.py",4511,0,"r",python,content +2185,5506481,"utils/nn.py",4512,0,"",python,selection_keyboard +2186,5506584,"utils/nn.py",4512,0,"t",python,content +2187,5506585,"utils/nn.py",4513,0,"",python,selection_keyboard +2188,5506713,"utils/nn.py",4513,0," ",python,content +2189,5506714,"utils/nn.py",4514,0,"",python,selection_keyboard +2190,5506824,"utils/nn.py",4514,0,"e",python,content +2191,5506825,"utils/nn.py",4515,0,"",python,selection_keyboard +2192,5506978,"utils/nn.py",4515,0,"l",python,content +2193,5506979,"utils/nn.py",4516,0,"",python,selection_keyboard +2194,5507071,"utils/nn.py",4516,0,"s",python,content +2195,5507072,"utils/nn.py",4517,0,"",python,selection_keyboard +2196,5507232,"utils/nn.py",4517,0,"e",python,content +2197,5507232,"utils/nn.py",4518,0,"",python,selection_keyboard +2198,5507322,"utils/nn.py",4518,0," ",python,content +2199,5507323,"utils/nn.py",4519,0,"",python,selection_keyboard +2200,5507535,"utils/nn.py",4519,0,"N",python,content +2201,5507536,"utils/nn.py",4520,0,"",python,selection_keyboard +2202,5507684,"utils/nn.py",4520,0,"o",python,content +2203,5507685,"utils/nn.py",4521,0,"",python,selection_keyboard +2204,5507868,"utils/nn.py",4521,0,"e",python,content +2205,5507869,"utils/nn.py",4522,0,"",python,selection_keyboard +2206,5508743,"utils/nn.py",4521,1,"",python,content +2207,5508952,"utils/nn.py",4521,0,"n",python,content +2208,5508953,"utils/nn.py",4522,0,"",python,selection_keyboard +2209,5509028,"utils/nn.py",4522,0,"e",python,content +2210,5509029,"utils/nn.py",4523,0,"",python,selection_keyboard +2211,5511825,"utils/nn.py",4753,0,"",python,selection_mouse +2212,5512761,"utils/nn.py",4754,0,"",python,selection_command +2213,5514552,"utils/nn.py",4754,0,",",python,content +2214,5514554,"utils/nn.py",4755,0,"",python,selection_keyboard +2215,5514625,"utils/nn.py",4755,0," ",python,content +2216,5514626,"utils/nn.py",4756,0,"",python,selection_keyboard +2217,5514952,"utils/nn.py",4756,0,"a",python,content +2218,5514953,"utils/nn.py",4757,0,"",python,selection_keyboard +2219,5515095,"utils/nn.py",4757,0,",",python,content +2220,5515096,"utils/nn.py",4758,0,"",python,selection_keyboard +2221,5515333,"utils/nn.py",4757,1,"",python,content +2222,5515438,"utils/nn.py",4756,1,"",python,content +2223,5515600,"utils/nn.py",4756,0,",",python,content +2224,5515601,"utils/nn.py",4757,0,"",python,selection_keyboard +2225,5515700,"utils/nn.py",4757,0,"a",python,content +2226,5515701,"utils/nn.py",4758,0,"",python,selection_keyboard +2227,5515761,"utils/nn.py",4758,0,"s",python,content +2228,5515762,"utils/nn.py",4759,0,"",python,selection_keyboard +2229,5515883,"utils/nn.py",4759,0,"k",python,content +2230,5515885,"utils/nn.py",4760,0,"",python,selection_keyboard +2231,5516257,"utils/nn.py",4759,1,"",python,content +2232,5516394,"utils/nn.py",4758,1,"",python,content +2233,5516510,"utils/nn.py",4757,1,"",python,content +2234,5516663,"utils/nn.py",4756,1,"",python,content +2235,5517157,"utils/nn.py",4756,0,"m",python,content +2236,5517158,"utils/nn.py",4757,0,"",python,selection_keyboard +2237,5517282,"utils/nn.py",4757,0,"a",python,content +2238,5517284,"utils/nn.py",4758,0,"",python,selection_keyboard +2239,5517338,"utils/nn.py",4758,0,"s",python,content +2240,5517339,"utils/nn.py",4759,0,"",python,selection_keyboard +2241,5517399,"utils/nn.py",4759,0,"k",python,content +2242,5517401,"utils/nn.py",4760,0,"",python,selection_keyboard +2243,5517781,"utils/nn.py",4760,0,"?",python,content +2244,5517782,"utils/nn.py",4761,0,"",python,selection_keyboard +2245,5518316,"utils/nn.py",4760,1,"",python,content +2246,5518533,"utils/nn.py",4760,0,"=",python,content +2247,5518535,"utils/nn.py",4761,0,"",python,selection_keyboard +2248,5520308,"utils/nn.py",4761,0,"c",python,content +2249,5520310,"utils/nn.py",4762,0,"",python,selection_keyboard +2250,5520460,"utils/nn.py",4762,0,"a",python,content +2251,5520461,"utils/nn.py",4763,0,"",python,selection_keyboard +2252,5520547,"utils/nn.py",4763,0,"u",python,content +2253,5520548,"utils/nn.py",4764,0,"",python,selection_keyboard +2254,5520733,"utils/nn.py",4764,0,"s",python,content +2255,5520735,"utils/nn.py",4765,0,"",python,selection_keyboard +2256,5520929,"utils/nn.py",4765,0,"a",python,content +2257,5520931,"utils/nn.py",4766,0,"",python,selection_keyboard +2258,5520999,"utils/nn.py",4766,0,"l",python,content +2259,5521000,"utils/nn.py",4767,0,"",python,selection_keyboard +2260,5521306,"utils/nn.py",4767,0,"_",python,content +2261,5521308,"utils/nn.py",4768,0,"",python,selection_keyboard +2262,5521577,"utils/nn.py",4768,0,"m",python,content +2263,5521579,"utils/nn.py",4769,0,"",python,selection_keyboard +2264,5521690,"utils/nn.py",4769,0,"a",python,content +2265,5521691,"utils/nn.py",4770,0,"",python,selection_keyboard +2266,5521711,"utils/nn.py",4770,0,"s",python,content +2267,5521712,"utils/nn.py",4771,0,"",python,selection_keyboard +2268,5521803,"utils/nn.py",4771,0,"k",python,content +2269,5521804,"utils/nn.py",4772,0,"",python,selection_keyboard +2270,5523102,"utils/nn.py",4769,0,"",python,selection_mouse +2271,5523710,"utils/nn.py",4768,0,"",python,selection_command +2272,5524016,"utils/nn.py",4761,7,"",python,content +2273,5525274,"utils/nn.py",4465,0,"",python,selection_mouse +2274,5525638,"utils/nn.py",4458,7,"",python,content +2275,5526306,"utils/nn.py",4457,0,"",python,selection_command +2276,5527212,"utils/nn.py",4755,0,"",python,selection_mouse +2277,5527336,"utils/nn.py",4754,4,"mask",python,selection_mouse +2278,5528006,"utils/nn.py",4459,0,"",python,selection_mouse +2279,5528567,"utils/nn.py",4458,0,"",python,selection_command +2280,5528940,"utils/nn.py",4458,0,"s",python,content +2281,5528941,"utils/nn.py",4459,0,"",python,selection_keyboard +2282,5529139,"utils/nn.py",4459,0,"m",python,content +2283,5529140,"utils/nn.py",4460,0,"",python,selection_keyboard +2284,5529389,"utils/nn.py",4459,1,"",python,content +2285,5529714,"utils/nn.py",4459,0,"p",python,content +2286,5529714,"utils/nn.py",4460,0,"",python,selection_keyboard +2287,5529855,"utils/nn.py",4460,0,"a",python,content +2288,5529856,"utils/nn.py",4461,0,"",python,selection_keyboard +2289,5530079,"utils/nn.py",4461,0,"c",python,content +2290,5530080,"utils/nn.py",4462,0,"",python,selection_keyboard +2291,5530150,"utils/nn.py",4462,0,"i",python,content +2292,5530151,"utils/nn.py",4463,0,"",python,selection_keyboard +2293,5530269,"utils/nn.py",4463,0,"a",python,content +2294,5530270,"utils/nn.py",4464,0,"",python,selection_keyboard +2295,5530347,"utils/nn.py",4464,0,"l",python,content +2296,5530348,"utils/nn.py",4465,0,"",python,selection_keyboard +2297,5530685,"utils/nn.py",4465,0,"_",python,content +2298,5530686,"utils/nn.py",4466,0,"",python,selection_keyboard +2299,5532149,"utils/nn.py",4762,0,"",python,selection_mouse +2300,5532922,"utils/nn.py",4762,0,"s",python,content +2301,5532924,"utils/nn.py",4763,0,"",python,selection_keyboard +2302,5532983,"utils/nn.py",4763,0,"p",python,content +2303,5532985,"utils/nn.py",4764,0,"",python,selection_keyboard +2304,5533113,"utils/nn.py",4764,0,"a",python,content +2305,5533114,"utils/nn.py",4765,0,"",python,selection_keyboard +2306,5533279,"utils/nn.py",4765,0,"c",python,content +2307,5533281,"utils/nn.py",4766,0,"",python,selection_keyboard +2308,5533355,"utils/nn.py",4766,0,"i",python,content +2309,5533356,"utils/nn.py",4767,0,"",python,selection_keyboard +2310,5533468,"utils/nn.py",4767,0,"a",python,content +2311,5533470,"utils/nn.py",4768,0,"",python,selection_keyboard +2312,5533562,"utils/nn.py",4768,0,"l",python,content +2313,5533564,"utils/nn.py",4769,0,"",python,selection_keyboard +2314,5533743,"utils/nn.py",4769,0,"_",python,content +2315,5533745,"utils/nn.py",4770,0,"",python,selection_keyboard +2316,5534009,"utils/nn.py",4769,0,"",python,selection_command +2317,5541087,"utils/nn.py",4502,0,"",python,selection_mouse +2318,5542049,"utils/nn.py",4501,0,"",python,selection_command +2319,5542520,"utils/nn.py",4501,1,".",python,content +2320,5544222,"models/dynamics.py",0,0,"",python,tab +2321,5545236,"models/dynamics.py",2432,0,"",python,selection_mouse +2322,5545473,"models/dynamics.py",2431,0,"",python,selection_command +2323,5546389,"genie.py",0,0,"",python,tab +2324,5546389,"genie.py",2898,0,"",python,selection_mouse +2325,5548534,"utils/nn.py",0,0,"",python,tab +2326,5557726,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3350418.2 tasks 0-7: running\r\n",,terminal_output +2327,5557901,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-2:\r\nsrun: forcing job termination\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3350418.2 ON hkn0710 CANCELLED AT 2025-07-16T16:22:05 ***\r\n",,terminal_output +2328,5558106,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\nsrun: job abort in progress\r\n",,terminal_output +2329,5558290,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\n",,terminal_output +2330,5558470,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\n",,terminal_output +2331,5558683,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\nsrun: job abort in progress\r\n",,terminal_output +2332,5558900,"TERMINAL",0,0,"^C",,terminal_output +2333,5559099,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.2\r\n",,terminal_output +2334,5559172,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +2335,5579573,"utils/nn.py",0,0,"",python,tab +2336,5579573,"utils/nn.py",5205,0,"",python,selection_mouse +2337,5579715,"utils/nn.py",5204,0,"",python,selection_command +2338,5591058,"utils/nn.py",4498,0,"",python,selection_mouse +2339,5591748,"utils/nn.py",4559,0,"",python,selection_mouse +2340,5591799,"utils/nn.py",4558,0,"",python,selection_command +2341,5592272,"utils/nn.py",4497,0,"",python,selection_mouse +2342,5593185,"utils/nn.py",4471,0,"",python,selection_mouse +2343,5593522,"utils/nn.py",4470,1," ",python,selection_mouse +2344,5593737,"utils/nn.py",4450,75," spacial_mask = jnp.tri(z.shape[-2]) if self.spacial_bert else None\n",python,selection_mouse +2345,5594317,"utils/nn.py",4449,0,"",python,selection_mouse +2346,5594321,"utils/nn.py",4448,0,"",python,selection_command +2347,5595246,"utils/nn.py",4474,0,"",python,selection_mouse +2348,5596471,"utils/nn.py",4473,0,"",python,selection_mouse +2349,5598371,"utils/nn.py",4294,0,"",python,selection_mouse +2350,5598387,"utils/nn.py",4293,0,"",python,selection_command +2351,5599592,"utils/nn.py",4473,0,"",python,selection_mouse +2352,5599901,"utils/nn.py",4473,1,"j",python,selection_mouse +2353,5599901,"utils/nn.py",4473,4,"jnp.",python,selection_mouse +2354,5600032,"utils/nn.py",4473,6,"jnp.tr",python,selection_mouse +2355,5600032,"utils/nn.py",4473,9,"jnp.tri(z",python,selection_mouse +2356,5600033,"utils/nn.py",4473,10,"jnp.tri(z.",python,selection_mouse +2357,5600033,"utils/nn.py",4473,12,"jnp.tri(z.sh",python,selection_mouse +2358,5600033,"utils/nn.py",4473,13,"jnp.tri(z.sha",python,selection_mouse +2359,5600033,"utils/nn.py",4473,15,"jnp.tri(z.shape",python,selection_mouse +2360,5600081,"utils/nn.py",4473,16,"jnp.tri(z.shape[",python,selection_mouse +2361,5600123,"utils/nn.py",4473,18,"jnp.tri(z.shape[-2",python,selection_mouse +2362,5600213,"utils/nn.py",4473,19,"jnp.tri(z.shape[-2]",python,selection_mouse +2363,5600248,"utils/nn.py",4473,20,"jnp.tri(z.shape[-2])",python,selection_mouse +2364,5601511,"utils/nn.py",4473,20,"",python,content +2365,5602004,"utils/nn.py",4473,0,"n",python,content +2366,5602004,"utils/nn.py",4474,0,"",python,selection_keyboard +2367,5602339,"utils/nn.py",4473,1,"",python,content +2368,5602545,"utils/nn.py",4473,0,"N",python,content +2369,5602546,"utils/nn.py",4474,0,"",python,selection_keyboard +2370,5602743,"utils/nn.py",4474,0,"o",python,content +2371,5602745,"utils/nn.py",4475,0,"",python,selection_keyboard +2372,5602806,"utils/nn.py",4475,0,"n",python,content +2373,5602808,"utils/nn.py",4476,0,"",python,selection_keyboard +2374,5602871,"utils/nn.py",4476,0,"e",python,content +2375,5602872,"utils/nn.py",4477,0,"",python,selection_keyboard +2376,5603894,"utils/nn.py",4503,0,"",python,selection_mouse +2377,5604137,"utils/nn.py",4502,0,"",python,selection_command +2378,5606139,"utils/nn.py",4503,0,"",python,selection_command +2379,5606425,"utils/nn.py",4504,0,"jnp.tri(z.shape[-2])",python,content +2380,5606455,"utils/nn.py",4523,0,"",python,selection_command +2381,5607535,"utils/nn.py",4524,0,"",python,selection_command +2382,5607835,"utils/nn.py",4524,4,"",python,content +2383,5607846,"utils/nn.py",4523,0,"",python,selection_command +2384,5611210,"utils/nn.py",4449,0,"",python,selection_mouse +2385,5611210,"utils/nn.py",4448,0,"",python,selection_command +2386,5611242,"utils/nn.py",4449,0,"",python,selection_mouse +2387,5611250,"utils/nn.py",4448,0,"",python,selection_command +2388,5611907,"utils/nn.py",4449,0,"",python,selection_mouse +2389,5611909,"utils/nn.py",4448,0,"",python,selection_command +2390,5612424,"utils/nn.py",4490,0,"",python,selection_mouse +2391,5612573,"utils/nn.py",4486,12,"spacial_bert",python,selection_mouse +2392,6338288,"utils/nn.py",4632,0,"",python,selection_mouse +2393,6338328,"utils/nn.py",4631,0,"",python,selection_command +2394,6397929,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +2395,6403938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2735,0,"",python,selection_mouse +2396,6404624,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2661,0,"",python,selection_mouse +2397,6406287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2744,0,"",python,selection_mouse +2398,6406863,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2657,0,"",python,selection_mouse +2399,6408300,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2702,0,"",python,selection_mouse +2400,6409148,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2763,0,"",python,selection_mouse +2401,6409681,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2684,0,"",python,selection_mouse +2402,6421904,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2737,0,"",python,selection_mouse +2403,6422627,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2731,0,"",python,selection_mouse +2404,6423274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2638,0,"",python,selection_mouse +2405,6423823,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2650,0,"",python,selection_mouse +2406,6424666,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2650,0,"\n ",python,content +2407,6426614,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2655,0,"t",python,content +2408,6426615,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2656,0,"",python,selection_keyboard +2409,6426841,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2656,0,"o",python,content +2410,6426842,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2657,0,"",python,selection_keyboard +2411,6426921,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2657,0,"k",python,content +2412,6426922,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2658,0,"",python,selection_keyboard +2413,6427197,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2658,0,"e",python,content +2414,6427198,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2659,0,"",python,selection_keyboard +2415,6427324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2659,0,"n",python,content +2416,6427325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2660,0,"",python,selection_keyboard +2417,6430361,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2655,5,"",python,content +2418,6430945,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2651,4,"",python,content +2419,6431150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2650,1,"",python,content +2420,6432390,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2787,0,"",python,selection_mouse +2421,6432980,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2724,0,"",python,selection_mouse +2422,6434053,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2724,0,"\n ",python,content +2423,6435157,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2729,0," logits = outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:, 1:]\n mask = outputs[""mask""][:, 1:] # if you use a mask\n",python,content +2424,6437168,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2734,0,"",python,selection_mouse +2425,6438026,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2733,0,"",python,selection_mouse +2426,6438772,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2729,4,"",python,content +2427,6438921,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2728,0,"",python,selection_command +2428,6439253,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2654,0,"",python,selection_command +2429,6439741,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2724,0,"\n ",python,content +2430,6441178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2729,0,"l",python,content +2431,6441179,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2730,0,"",python,selection_keyboard +2432,6441341,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2730,0,"o",python,content +2433,6441342,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2731,0,"",python,selection_keyboard +2434,6441516,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2731,0,"g",python,content +2435,6441517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2732,0,"",python,selection_keyboard +2436,6441598,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2732,0,"i",python,content +2437,6441599,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2733,0,"",python,selection_keyboard +2438,6442210,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2733,0,"t",python,content +2439,6442210,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2734,0,"",python,selection_keyboard +2440,6442425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2734,0,"s",python,content +2441,6442426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2735,0,"",python,selection_keyboard +2442,6442738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2735,0," ",python,content +2443,6442739,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2736,0,"",python,selection_keyboard +2444,6442892,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2736,0,"=",python,content +2445,6442893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2737,0,"",python,selection_keyboard +2446,6442992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2737,0," ",python,content +2447,6442992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2738,0,"",python,selection_keyboard +2448,6443342,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2737,0,"",python,selection_command +2449,6444222,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2753,0,"",python,selection_mouse +2450,6444366,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,7,"outputs",python,selection_mouse +2451,6444566,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,76,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs",python,selection_mouse +2452,6444567,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,90,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens",python,selection_mouse +2453,6444633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,91,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""",python,selection_mouse +2454,6444659,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,93,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][",python,selection_mouse +2455,6444713,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,95,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:,",python,selection_mouse +2456,6444746,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,96,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:, ",python,selection_mouse +2457,6444795,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,97,"outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:, 1",python,selection_mouse +2458,6444826,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,29,"outputs[""token_logits""][:, :-",python,selection_mouse +2459,6444877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,30,"outputs[""token_logits""][:, :-1",python,selection_mouse +2460,6444912,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,31,"outputs[""token_logits""][:, :-1]",python,selection_mouse +2461,6444952,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,32,"outputs[""token_logits""][:, :-1] ",python,selection_mouse +2462,6445806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2738,0,"",python,selection_command +2463,6448022,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2755,0,"",python,selection_mouse +2464,6448166,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,7,"outputs",python,selection_mouse +2465,6448425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,9,"outputs[""",python,selection_mouse +2466,6448467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,21,"outputs[""token_logits",python,selection_mouse +2467,6448806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,22,"outputs[""token_logits""",python,selection_mouse +2468,6448806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,23,"outputs[""token_logits""]",python,selection_mouse +2469,6448860,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,24,"outputs[""token_logits""][",python,selection_mouse +2470,6448916,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,26,"outputs[""token_logits""][:,",python,selection_mouse +2471,6448917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,27,"outputs[""token_logits""][:, ",python,selection_mouse +2472,6448990,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,28,"outputs[""token_logits""][:, :",python,selection_mouse +2473,6449042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,29,"outputs[""token_logits""][:, :-",python,selection_mouse +2474,6449111,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,30,"outputs[""token_logits""][:, :-1",python,selection_mouse +2475,6449158,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,31,"outputs[""token_logits""][:, :-1]",python,selection_mouse +2476,6449377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,30,"outputs[""token_logits""][:, :-1",python,selection_mouse +2477,6449417,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,28,"outputs[""token_logits""][:, :",python,selection_mouse +2478,6449447,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,25,"outputs[""token_logits""][:",python,selection_mouse +2479,6449473,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,21,"outputs[""token_logits",python,selection_mouse +2480,6450130,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2763,0,"",python,selection_mouse +2481,6450510,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2754,0,"",python,selection_mouse +2482,6450673,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,7,"outputs",python,selection_mouse +2483,6450872,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,8,"outputs[",python,selection_mouse +2484,6450873,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,21,"outputs[""token_logits",python,selection_mouse +2485,6451075,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,22,"outputs[""token_logits""",python,selection_mouse +2486,6452916,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,23,"outputs[""token_logits""]",python,selection_mouse +2487,6453812,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2752,0,"",python,selection_command +2488,6454153,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2737,0,"",python,selection_command +2489,6454898,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2738,0,"outputs[""token_logits""]",python,content +2490,6454920,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2760,0,"",python,selection_command +2491,6455691,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2797,0,"",python,selection_command +2492,6455936,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2865,0,"",python,selection_command +2493,6456401,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2864,0,"",python,selection_command +2494,6456910,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2863,0,"",python,selection_command +2495,6456933,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2862,0,"",python,selection_command +2496,6456989,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2861,0,"",python,selection_command +2497,6456990,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2860,0,"",python,selection_command +2498,6457050,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2859,0,"",python,selection_command +2499,6457050,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2858,0,"",python,selection_command +2500,6457125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2857,0,"",python,selection_command +2501,6457149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2856,0,"",python,selection_command +2502,6457205,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2855,0,"",python,selection_command +2503,6457239,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2854,0,"",python,selection_command +2504,6457239,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2853,0,"",python,selection_command +2505,6457265,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2852,0,"",python,selection_command +2506,6457298,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2851,0,"",python,selection_command +2507,6457326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2850,0,"",python,selection_command +2508,6457386,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2849,0,"",python,selection_command +2509,6457387,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2848,0,"",python,selection_command +2510,6457412,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2847,0,"",python,selection_command +2511,6457421,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2846,0,"",python,selection_command +2512,6457539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2845,0,"",python,selection_command +2513,6457726,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,0,"",python,selection_command +2514,6458350,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,1,"o",python,selection_command +2515,6458413,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,2,"ou",python,selection_command +2516,6458917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,3,"out",python,selection_command +2517,6458966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,4,"outp",python,selection_command +2518,6458966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,5,"outpu",python,selection_command +2519,6459113,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,6,"output",python,selection_command +2520,6459114,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,7,"outputs",python,selection_command +2521,6459114,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,8,"outputs[",python,selection_command +2522,6459208,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,9,"outputs[""",python,selection_command +2523,6459209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,10,"outputs[""v",python,selection_command +2524,6459209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,11,"outputs[""vi",python,selection_command +2525,6459324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,12,"outputs[""vid",python,selection_command +2526,6459324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,13,"outputs[""vide",python,selection_command +2527,6459324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,14,"outputs[""video",python,selection_command +2528,6459325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,15,"outputs[""video_",python,selection_command +2529,6459364,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,16,"outputs[""video_t",python,selection_command +2530,6459364,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,17,"outputs[""video_to",python,selection_command +2531,6459437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,18,"outputs[""video_tok",python,selection_command +2532,6459602,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,19,"outputs[""video_toke",python,selection_command +2533,6459775,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,20,"outputs[""video_token",python,selection_command +2534,6459910,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,21,"outputs[""video_tokens",python,selection_command +2535,6460033,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,22,"outputs[""video_tokens""",python,selection_command +2536,6460187,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,23,"outputs[""video_tokens""]",python,selection_command +2537,6460422,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2844,0,"",python,selection_command +2538,6460476,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2776,0,"",python,selection_command +2539,6460658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2739,0,"",python,selection_command +2540,6460955,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2761,0,"\n ",python,content +2541,6461725,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2766,0,"t",python,content +2542,6461726,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2767,0,"",python,selection_keyboard +2543,6461818,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2767,0,"a",python,content +2544,6461819,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2768,0,"",python,selection_keyboard +2545,6462063,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2768,0,"r",python,content +2546,6462068,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2769,0,"",python,selection_keyboard +2547,6462069,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2769,0,"g",python,content +2548,6462071,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2770,0,"",python,selection_keyboard +2549,6462210,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2770,0,"e",python,content +2550,6462210,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2771,0,"",python,selection_keyboard +2551,6462293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2771,0,"t",python,content +2552,6462294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2772,0,"",python,selection_keyboard +2553,6462419,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2772,0,"s",python,content +2554,6462420,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2773,0,"",python,selection_keyboard +2555,6462726,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2773,0," ",python,content +2556,6462726,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2774,0,"",python,selection_keyboard +2557,6462919,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2774,0,"=",python,content +2558,6462920,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2775,0,"",python,selection_keyboard +2559,6463031,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2775,0," ",python,content +2560,6463032,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2776,0,"",python,selection_keyboard +2561,6463518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2776,0," logits = outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:, 1:]\n mask = outputs[""mask""][:, 1:] # if you use a mask\n",python,content +2562,6465034,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2776,168,"",python,content +2563,6465488,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2775,0,"",python,selection_command +2564,6466190,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2776,0,"outputs[""video_tokens""]",python,content +2565,6466221,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2798,0,"",python,selection_command +2566,6468000,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2799,0,"\n ",python,content +2567,6468622,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2804,0,"i",python,content +2568,6468623,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,0,"",python,selection_keyboard +2569,6468670,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,0,"f",python,content +2570,6468671,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2806,0,"",python,selection_keyboard +2571,6468795,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2806,0," ",python,content +2572,6468796,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"",python,selection_keyboard +2573,6470306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"n",python,content +2574,6470307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2808,0,"",python,selection_keyboard +2575,6470488,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2808,0,"o",python,content +2576,6470489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,0,"",python,selection_keyboard +2577,6470583,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,0,"t",python,content +2578,6470584,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2810,0,"",python,selection_keyboard +2579,6470684,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2810,0," ",python,content +2580,6470685,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,0,"",python,selection_keyboard +2581,6470891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,0,"s",python,content +2582,6470892,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"",python,selection_keyboard +2583,6471073,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"e",python,content +2584,6471074,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"",python,selection_keyboard +2585,6471176,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"l",python,content +2586,6471177,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"",python,selection_keyboard +2587,6471274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"f",python,content +2588,6471275,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"",python,selection_keyboard +2589,6471392,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"-",python,content +2590,6471393,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"",python,selection_keyboard +2591,6471988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,1,"",python,content +2592,6472186,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,".",python,content +2593,6472186,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"",python,selection_keyboard +2594,6472806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"u",python,content +2595,6472807,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"",python,selection_keyboard +2596,6473375,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,1,"",python,content +2597,6473535,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"m",python,content +2598,6473536,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"",python,selection_keyboard +2599,6473664,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"a",python,content +2600,6473665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2818,0,"",python,selection_keyboard +2601,6474478,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2818,0,"s",python,content +2602,6474478,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2819,0,"",python,selection_keyboard +2603,6474504,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2819,0,"k",python,content +2604,6474505,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2820,0,"",python,selection_keyboard +2605,6476528,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,4,"",python,content +2606,6476674,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,1,"",python,content +2607,6477049,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,4,"",python,content +2608,6477308,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,0,"a",python,content +2609,6477309,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"",python,selection_keyboard +2610,6477558,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"t",python,content +2611,6477559,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"",python,selection_keyboard +2612,6477728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"g",python,content +2613,6477729,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"",python,selection_keyboard +2614,6477837,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"s",python,content +2615,6477838,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"",python,selection_keyboard +2616,6478290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,1,"",python,content +2617,6478436,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,1,"",python,content +2618,6478532,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,1,"",python,content +2619,6478698,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2812,0,"r",python,content +2620,6478699,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"",python,selection_keyboard +2621,6479005,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2813,0,"g",python,content +2622,6479006,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"",python,selection_keyboard +2623,6479125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,"s",python,content +2624,6479126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"",python,selection_keyboard +2625,6479296,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,".",python,content +2626,6479298,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"",python,selection_keyboard +2627,6479733,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"m",python,content +2628,6479734,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"",python,selection_keyboard +2629,6480158,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"a",python,content +2630,6480159,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2818,0,"",python,selection_keyboard +2631,6482949,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,2,"use_maskgit",python,content +2632,6484452,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,":",python,content +2633,6484453,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2828,0,"",python,selection_keyboard +2634,6484608,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,"",python,selection_command +2635,6485000,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2856,0,"",python,selection_command +2636,6486125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2829,67," logits = outputs[""token_logits""][:, :-1] # predict t+1 given t",python,selection_command +2637,6486784,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2829,112," logits = outputs[""token_logits""][:, :-1] # predict t+1 given t\n targets = outputs[""video_tokens""][:, 1:]",python,selection_command +2638,6487098,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2833,0,"",python,selection_command +2639,6487653,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2901,0," ",python,content +2640,6487654,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2833,0," ",python,content +2641,6488025,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,0,"",python,selection_command +2642,6488195,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2908,0,"",python,selection_command +2643,6488467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2957,0,"",python,selection_command +2644,6488772,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2954,0,"",python,selection_command +2645,6488968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2954,0," ",python,content +2646,6489326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2957,0,"",python,selection_command +2647,6489539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2908,0,"",python,selection_command +2648,6489972,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,0,"",python,selection_command +2649,6490227,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"",python,selection_command +2650,6490658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2836,0,"",python,selection_command +2651,6491158,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2900,0,"",python,selection_command +2652,6491696,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2899,1,"",python,content +2653,6491874,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2893,6,"",python,content +2654,6492104,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2891,2,"",python,content +2655,6492287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2890,1,"",python,content +2656,6492556,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2889,1,"",python,content +2657,6492758,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2881,8,"",python,content +2658,6492971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2879,2,"",python,content +2659,6493576,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2878,1,"",python,content +2660,6493703,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2877,1,"",python,content +2661,6493923,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2876,0,"",python,selection_command +2662,6494238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2925,0,"",python,selection_command +2663,6494368,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2974,0,"",python,selection_command +2664,6495006,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2985,0,"",python,selection_command +2665,6495316,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2981,4,"",python,content +2666,6495461,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2979,2,"",python,content +2667,6495627,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2975,4,"",python,content +2668,6495793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2971,4,"",python,content +2669,6495971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2968,3,"",python,content +2670,6496168,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,2,"",python,content +2671,6496652,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2965,1,"",python,content +2672,6496982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2964,0,"",python,selection_command +2673,6497834,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2915,0,"",python,selection_command +2674,6498084,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2964,0,"",python,selection_command +2675,6498388,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2966,0,"",python,selection_command +2676,6498517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3004,0,"",python,selection_command +2677,6498933,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,0,"",python,selection_command +2678,6499482,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"",python,selection_command +2679,6499611,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"",python,selection_command +2680,6499783,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,0,"",python,selection_command +2681,6499902,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,0,"",python,selection_command +2682,6501145,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,23,"",python,content +2683,6501193,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3062,0,"",python,selection_command +2684,6501555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,0,"",python,selection_command +2685,6502044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,0,"t",python,content +2686,6502044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,0,"",python,selection_keyboard +2687,6502087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,0,"o",python,content +2688,6502089,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"",python,selection_keyboard +2689,6502172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"k",python,content +2690,6502172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"",python,selection_keyboard +2691,6502280,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"e",python,content +2692,6502281,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,0,"",python,selection_keyboard +2693,6502467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,0,"n",python,content +2694,6502468,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3068,0,"",python,selection_keyboard +2695,6503138,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,1,"",python,content +2696,6503322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,1,"",python,content +2697,6503455,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,1,"",python,content +2698,6503762,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,1,"",python,content +2699,6503907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,0,"a",python,content +2700,6503908,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"",python,selection_keyboard +2701,6504124,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"r",python,content +2702,6504125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"",python,selection_keyboard +2703,6504217,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"g",python,content +2704,6504218,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,0,"",python,selection_keyboard +2705,6504953,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,4,"targets",python,content +2706,6505181,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3069,0,"",python,selection_command +2707,6505602,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3068,0,"",python,selection_command +2708,6506087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3067,0,"",python,selection_command +2709,6506133,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3066,0,"",python,selection_command +2710,6506155,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3065,0,"",python,selection_command +2711,6506182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3064,0,"",python,selection_command +2712,6506266,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3063,0,"",python,selection_command +2713,6506306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3062,0,"",python,selection_command +2714,6506307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3061,0,"",python,selection_command +2715,6506334,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3060,0,"",python,selection_command +2716,6506365,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3059,0,"",python,selection_command +2717,6506430,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3058,0,"",python,selection_command +2718,6506431,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3057,0,"",python,selection_command +2719,6506485,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3056,0,"",python,selection_command +2720,6506517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3055,0,"",python,selection_command +2721,6506517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3054,0,"",python,selection_command +2722,6506541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3053,0,"",python,selection_command +2723,6506550,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3052,0,"",python,selection_command +2724,6506606,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3051,0,"",python,selection_command +2725,6506629,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3050,0,"",python,selection_command +2726,6506656,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3049,0,"",python,selection_command +2727,6506656,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3048,0,"",python,selection_command +2728,6506696,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3047,0,"",python,selection_command +2729,6506749,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3046,0,"",python,selection_command +2730,6506784,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3045,0,"",python,selection_command +2731,6506819,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3044,0,"",python,selection_command +2732,6506830,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3043,0,"",python,selection_command +2733,6506857,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3042,0,"",python,selection_command +2734,6506865,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3041,0,"",python,selection_command +2735,6506903,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3040,0,"",python,selection_command +2736,6507059,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3039,0,"",python,selection_command +2737,6507276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,0,"",python,selection_command +2738,6508812,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,7,"",python,content +2739,6509199,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2740,6509401,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2741,6509545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2742,6509767,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2743,6510015,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2744,6510145,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2745,6510347,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2746,6510557,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2747,6510777,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2748,6510987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2749,6511197,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2750,6511410,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2751,6511642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2752,6511880,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2753,6512104,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2754,6512732,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,1,"",python,content +2755,6513719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,0,"l",python,content +2756,6513720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3039,0,"",python,selection_keyboard +2757,6513857,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3039,0,"o",python,content +2758,6513857,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3040,0,"",python,selection_keyboard +2759,6514164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3040,0,"g",python,content +2760,6514165,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3041,0,"",python,selection_keyboard +2761,6516334,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3038,3,"logits",python,content +2762,6516701,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3043,0,"",python,selection_command +2763,6525005,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2798,0,"",python,selection_mouse +2764,6525792,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2799,0,"\n ",python,content +2765,6526515,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2799,0,"",python,selection_mouse +2766,6526993,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2804,0,"",python,selection_mouse +2767,6528809,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2875,0,"",python,selection_mouse +2768,6530061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2804,0,"",python,selection_mouse +2769,6530717,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2833,0,"",python,selection_mouse +2770,6542871,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +2771,6544088,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +2772,6544252,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +2773,6544398,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +2774,6555323,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2775,6555382,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2776,6555443,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2777,6559112,"TERMINAL",0,0,"Running on 8 devices.\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\nRunning on 8 devices.\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\nRunning on 8 devices.\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n",,terminal_output +2778,6559827,"TERMINAL",0,0,"srun: error: hkn0710: tasks 1-2: Exited with exit code 1\r\nsrun: error: hkn0711: tasks 5-6: Exited with exit code 1\r\n",,terminal_output +2779,6559934,"TERMINAL",0,0,"srun: error: hkn0710: task 0: Exited with exit code 1\r\nsrun: error: hkn0711: task 4: Exited with exit code 1\r\n",,terminal_output +2780,6560146,"TERMINAL",0,0,"srun: error: hkn0711: task 7: Exited with exit code 1\r\nsrun: error: hkn0710: task 3: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +2781,6583497,"utils/nn.py",0,0,"",python,tab +2782,6583498,"utils/nn.py",4027,0,"",python,selection_mouse +2783,6584098,"utils/nn.py",2745,1282,"\nclass CausalTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # Input projection and normalization\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n # Causal transformer blocks\n for _ in range(self.num_blocks):\n x = STBlock2(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n\n # Output projection\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n",python,selection_mouse +2784,6584159,"utils/nn.py",1004,3023,"lass STBlock2(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\nclass CausalTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # Input projection and normalization\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n # Causal transformer blocks\n for _ in range(self.num_blocks):\n x = STBlock2(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n\n # Output projection\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n",python,selection_mouse +2785,6584200,"utils/nn.py",1002,3025,"\nclass STBlock2(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\nclass CausalTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # Input projection and normalization\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n # Causal transformer blocks\n for _ in range(self.num_blocks):\n x = STBlock2(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n\n # Output projection\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n",python,selection_mouse +2786,6585584,"utils/nn.py",1003,0,"",python,selection_command +2787,6586787,"utils/nn.py",4005,0,"#",python,content +2788,6586788,"utils/nn.py",3992,0,"#",python,content +2789,6586788,"utils/nn.py",3966,0,"#",python,content +2790,6586788,"utils/nn.py",3924,0,"#",python,content +2791,6586788,"utils/nn.py",3898,0,"#",python,content +2792,6586788,"utils/nn.py",3872,0,"#",python,content +2793,6586788,"utils/nn.py",3844,0,"#",python,content +2794,6586788,"utils/nn.py",3830,0,"#",python,content +2795,6586788,"utils/nn.py",3800,0,"#",python,content +2796,6586788,"utils/nn.py",3754,0,"#",python,content +2797,6586788,"utils/nn.py",3716,0,"#",python,content +2798,6586788,"utils/nn.py",3674,0,"#",python,content +2799,6586788,"utils/nn.py",3638,0,"#",python,content +2800,6586788,"utils/nn.py",3608,0,"#",python,content +2801,6586788,"utils/nn.py",3563,0,"#",python,content +2802,6586789,"utils/nn.py",3527,0,"#",python,content +2803,6586789,"utils/nn.py",3514,0,"#",python,content +2804,6586789,"utils/nn.py",3504,0,"#",python,content +2805,6586789,"utils/nn.py",3489,0,"#",python,content +2806,6586789,"utils/nn.py",3455,0,"#",python,content +2807,6586789,"utils/nn.py",3405,0,"#",python,content +2808,6586789,"utils/nn.py",3371,0,"#",python,content +2809,6586789,"utils/nn.py",3352,0,"#",python,content +2810,6586789,"utils/nn.py",3318,0,"#",python,content +2811,6586789,"utils/nn.py",3272,0,"#",python,content +2812,6586789,"utils/nn.py",3231,0,"#",python,content +2813,6586789,"utils/nn.py",3212,0,"#",python,content +2814,6586789,"utils/nn.py",3178,0,"#",python,content +2815,6586789,"utils/nn.py",3128,0,"#",python,content +2816,6586789,"utils/nn.py",3094,0,"#",python,content +2817,6586789,"utils/nn.py",3076,0,"#",python,content +2818,6586789,"utils/nn.py",3045,0,"#",python,content +2819,6586790,"utils/nn.py",3000,0,"#",python,content +2820,6586790,"utils/nn.py",2945,0,"#",python,content +2821,6586790,"utils/nn.py",2929,0,"#",python,content +2822,6586790,"utils/nn.py",2907,0,"#",python,content +2823,6586790,"utils/nn.py",2880,0,"#",python,content +2824,6586790,"utils/nn.py",2861,0,"#",python,content +2825,6586790,"utils/nn.py",2842,0,"#",python,content +2826,6586790,"utils/nn.py",2822,0,"#",python,content +2827,6586790,"utils/nn.py",2805,0,"#",python,content +2828,6586790,"utils/nn.py",2786,0,"#",python,content +2829,6586790,"utils/nn.py",2746,0,"#",python,content +2830,6586790,"utils/nn.py",2736,0,"#",python,content +2831,6586790,"utils/nn.py",2717,0,"#",python,content +2832,6586790,"utils/nn.py",2694,0,"#",python,content +2833,6586790,"utils/nn.py",2681,0,"#",python,content +2834,6586790,"utils/nn.py",2655,0,"#",python,content +2835,6586791,"utils/nn.py",2613,0,"#",python,content +2836,6586791,"utils/nn.py",2591,0,"#",python,content +2837,6586791,"utils/nn.py",2565,0,"#",python,content +2838,6586791,"utils/nn.py",2425,0,"#",python,content +2839,6586791,"utils/nn.py",2412,0,"#",python,content +2840,6586791,"utils/nn.py",2386,0,"#",python,content +2841,6586791,"utils/nn.py",2344,0,"#",python,content +2842,6586791,"utils/nn.py",2314,0,"#",python,content +2843,6586791,"utils/nn.py",2284,0,"#",python,content +2844,6586791,"utils/nn.py",2254,0,"#",python,content +2845,6586791,"utils/nn.py",2236,0,"#",python,content +2846,6586791,"utils/nn.py",2205,0,"#",python,content +2847,6586791,"utils/nn.py",2179,0,"#",python,content +2848,6586791,"utils/nn.py",2137,0,"#",python,content +2849,6586791,"utils/nn.py",2098,0,"#",python,content +2850,6586791,"utils/nn.py",2063,0,"#",python,content +2851,6586791,"utils/nn.py",2025,0,"#",python,content +2852,6586792,"utils/nn.py",1986,0,"#",python,content +2853,6586792,"utils/nn.py",1943,0,"#",python,content +2854,6586792,"utils/nn.py",1930,0,"#",python,content +2855,6586792,"utils/nn.py",1904,0,"#",python,content +2856,6586792,"utils/nn.py",1862,0,"#",python,content +2857,6586792,"utils/nn.py",1832,0,"#",python,content +2858,6586792,"utils/nn.py",1788,0,"#",python,content +2859,6586794,"utils/nn.py",1759,0,"#",python,content +2860,6586794,"utils/nn.py",1722,0,"#",python,content +2861,6586794,"utils/nn.py",1703,0,"#",python,content +2862,6586794,"utils/nn.py",1672,0,"#",python,content +2863,6586794,"utils/nn.py",1646,0,"#",python,content +2864,6586794,"utils/nn.py",1604,0,"#",python,content +2865,6586794,"utils/nn.py",1565,0,"#",python,content +2866,6586794,"utils/nn.py",1530,0,"#",python,content +2867,6586795,"utils/nn.py",1492,0,"#",python,content +2868,6586795,"utils/nn.py",1453,0,"#",python,content +2869,6586795,"utils/nn.py",1410,0,"#",python,content +2870,6586795,"utils/nn.py",1397,0,"#",python,content +2871,6586795,"utils/nn.py",1371,0,"#",python,content +2872,6586795,"utils/nn.py",1329,0,"#",python,content +2873,6586795,"utils/nn.py",1299,0,"#",python,content +2874,6586795,"utils/nn.py",1255,0,"#",python,content +2875,6586795,"utils/nn.py",1219,0,"#",python,content +2876,6586795,"utils/nn.py",1164,0,"#",python,content +2877,6586795,"utils/nn.py",1148,0,"#",python,content +2878,6586795,"utils/nn.py",1134,0,"#",python,content +2879,6586795,"utils/nn.py",1112,0,"#",python,content +2880,6586795,"utils/nn.py",1085,0,"#",python,content +2881,6586795,"utils/nn.py",1066,0,"#",python,content +2882,6586796,"utils/nn.py",1047,0,"#",python,content +2883,6586796,"utils/nn.py",1034,0,"#",python,content +2884,6586796,"utils/nn.py",1003,0,"#",python,content +2885,6586797,"utils/nn.py",1004,0,"",python,selection_keyboard +2886,6586882,"utils/nn.py",4103,0," ",python,content +2887,6586882,"utils/nn.py",4089,0," ",python,content +2888,6586882,"utils/nn.py",4062,0," ",python,content +2889,6586882,"utils/nn.py",4019,0," ",python,content +2890,6586882,"utils/nn.py",3992,0," ",python,content +2891,6586882,"utils/nn.py",3965,0," ",python,content +2892,6586882,"utils/nn.py",3936,0," ",python,content +2893,6586882,"utils/nn.py",3921,0," ",python,content +2894,6586882,"utils/nn.py",3890,0," ",python,content +2895,6586882,"utils/nn.py",3843,0," ",python,content +2896,6586882,"utils/nn.py",3804,0," ",python,content +2897,6586883,"utils/nn.py",3761,0," ",python,content +2898,6586883,"utils/nn.py",3724,0," ",python,content +2899,6586883,"utils/nn.py",3693,0," ",python,content +2900,6586883,"utils/nn.py",3647,0," ",python,content +2901,6586883,"utils/nn.py",3610,0," ",python,content +2902,6586883,"utils/nn.py",3596,0," ",python,content +2903,6586883,"utils/nn.py",3585,0," ",python,content +2904,6586883,"utils/nn.py",3569,0," ",python,content +2905,6586883,"utils/nn.py",3534,0," ",python,content +2906,6586883,"utils/nn.py",3483,0," ",python,content +2907,6586883,"utils/nn.py",3448,0," ",python,content +2908,6586883,"utils/nn.py",3428,0," ",python,content +2909,6586883,"utils/nn.py",3393,0," ",python,content +2910,6586883,"utils/nn.py",3346,0," ",python,content +2911,6586883,"utils/nn.py",3304,0," ",python,content +2912,6586883,"utils/nn.py",3284,0," ",python,content +2913,6586883,"utils/nn.py",3249,0," ",python,content +2914,6586884,"utils/nn.py",3198,0," ",python,content +2915,6586884,"utils/nn.py",3163,0," ",python,content +2916,6586884,"utils/nn.py",3144,0," ",python,content +2917,6586884,"utils/nn.py",3112,0," ",python,content +2918,6586884,"utils/nn.py",3066,0," ",python,content +2919,6586884,"utils/nn.py",3010,0," ",python,content +2920,6586884,"utils/nn.py",2993,0," ",python,content +2921,6586884,"utils/nn.py",2970,0," ",python,content +2922,6586884,"utils/nn.py",2942,0," ",python,content +2923,6586884,"utils/nn.py",2922,0," ",python,content +2924,6586884,"utils/nn.py",2902,0," ",python,content +2925,6586884,"utils/nn.py",2881,0," ",python,content +2926,6586884,"utils/nn.py",2863,0," ",python,content +2927,6586884,"utils/nn.py",2843,0," ",python,content +2928,6586884,"utils/nn.py",2802,0," ",python,content +2929,6586884,"utils/nn.py",2791,0," ",python,content +2930,6586884,"utils/nn.py",2771,0," ",python,content +2931,6586884,"utils/nn.py",2747,0," ",python,content +2932,6586885,"utils/nn.py",2733,0," ",python,content +2933,6586885,"utils/nn.py",2706,0," ",python,content +2934,6586885,"utils/nn.py",2663,0," ",python,content +2935,6586885,"utils/nn.py",2640,0," ",python,content +2936,6586885,"utils/nn.py",2613,0," ",python,content +2937,6586885,"utils/nn.py",2472,0," ",python,content +2938,6586885,"utils/nn.py",2458,0," ",python,content +2939,6586885,"utils/nn.py",2431,0," ",python,content +2940,6586885,"utils/nn.py",2388,0," ",python,content +2941,6586885,"utils/nn.py",2357,0," ",python,content +2942,6586885,"utils/nn.py",2326,0," ",python,content +2943,6586885,"utils/nn.py",2295,0," ",python,content +2944,6586885,"utils/nn.py",2276,0," ",python,content +2945,6586885,"utils/nn.py",2244,0," ",python,content +2946,6586885,"utils/nn.py",2217,0," ",python,content +2947,6586885,"utils/nn.py",2174,0," ",python,content +2948,6586886,"utils/nn.py",2134,0," ",python,content +2949,6586887,"utils/nn.py",2098,0," ",python,content +2950,6586887,"utils/nn.py",2059,0," ",python,content +2951,6586887,"utils/nn.py",2019,0," ",python,content +2952,6586887,"utils/nn.py",1975,0," ",python,content +2953,6586887,"utils/nn.py",1961,0," ",python,content +2954,6586887,"utils/nn.py",1934,0," ",python,content +2955,6586887,"utils/nn.py",1891,0," ",python,content +2956,6586887,"utils/nn.py",1860,0," ",python,content +2957,6586887,"utils/nn.py",1815,0," ",python,content +2958,6586887,"utils/nn.py",1785,0," ",python,content +2959,6586888,"utils/nn.py",1747,0," ",python,content +2960,6586888,"utils/nn.py",1727,0," ",python,content +2961,6586888,"utils/nn.py",1695,0," ",python,content +2962,6586888,"utils/nn.py",1668,0," ",python,content +2963,6586888,"utils/nn.py",1625,0," ",python,content +2964,6586888,"utils/nn.py",1585,0," ",python,content +2965,6586888,"utils/nn.py",1549,0," ",python,content +2966,6586888,"utils/nn.py",1510,0," ",python,content +2967,6586888,"utils/nn.py",1470,0," ",python,content +2968,6586888,"utils/nn.py",1426,0," ",python,content +2969,6586888,"utils/nn.py",1412,0," ",python,content +2970,6586888,"utils/nn.py",1385,0," ",python,content +2971,6586888,"utils/nn.py",1342,0," ",python,content +2972,6586888,"utils/nn.py",1311,0," ",python,content +2973,6586888,"utils/nn.py",1266,0," ",python,content +2974,6586888,"utils/nn.py",1229,0," ",python,content +2975,6586889,"utils/nn.py",1173,0," ",python,content +2976,6586889,"utils/nn.py",1156,0," ",python,content +2977,6586889,"utils/nn.py",1141,0," ",python,content +2978,6586889,"utils/nn.py",1118,0," ",python,content +2979,6586889,"utils/nn.py",1090,0," ",python,content +2980,6586889,"utils/nn.py",1070,0," ",python,content +2981,6586889,"utils/nn.py",1050,0," ",python,content +2982,6586889,"utils/nn.py",1036,0," ",python,content +2983,6586889,"utils/nn.py",1004,0," ",python,content +2984,6586890,"utils/nn.py",1005,0,"",python,selection_keyboard +2985,6587242,"utils/nn.py",1004,0,"",python,selection_command +2986,6595179,"utils/nn.py",6975,0,"",python,selection_mouse +2987,6595314,"utils/nn.py",6969,12,"spacial_bert",python,selection_mouse +2988,6596004,"utils/nn.py",6991,0,"",python,selection_mouse +2989,6596125,"utils/nn.py",6982,12,"spacial_bert",python,selection_mouse +2990,6596975,"utils/nn.py",6833,0,"",python,selection_mouse +2991,6600734,"utils/nn.py",6831,0,"",python,selection_mouse +2992,6603695,"utils/nn.py",4315,0,"",python,selection_mouse +2993,6608508,"models/dynamics.py",0,0,"",python,tab +2994,6610093,"utils/nn.py",0,0,"",python,tab +2995,6613219,"utils/nn.py",4373,0,"",python,selection_mouse +2996,6614697,"utils/nn.py",0,0,"",python,tab +2997,6631800,"utils/nn.py",7212,0,"",python,selection_mouse +2998,6631813,"utils/nn.py",7211,0,"",python,selection_command +2999,6633191,"utils/nn.py",6989,0,"",python,selection_mouse +3000,6636209,"utils/nn.py",6939,0,"",python,selection_mouse +3001,6637134,"utils/nn.py",6982,0,"",python,selection_mouse +3002,6638234,"utils/nn.py",6982,0,"s",python,content +3003,6638236,"utils/nn.py",6983,0,"",python,selection_keyboard +3004,6638406,"utils/nn.py",6983,0,"e",python,content +3005,6638408,"utils/nn.py",6984,0,"",python,selection_keyboard +3006,6638494,"utils/nn.py",6984,0,"l",python,content +3007,6638496,"utils/nn.py",6985,0,"",python,selection_keyboard +3008,6638563,"utils/nn.py",6985,0,"f",python,content +3009,6638564,"utils/nn.py",6986,0,"",python,selection_keyboard +3010,6638686,"utils/nn.py",6986,0,".",python,content +3011,6638688,"utils/nn.py",6987,0,"",python,selection_keyboard +3012,6638996,"utils/nn.py",6986,0,"",python,selection_command +3013,6644746,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3014,6646092,"genie.py",0,0,"",python,tab +3015,6651098,"genie.py",2478,0,"",python,selection_mouse +3016,6651375,"models/dynamics.py",0,0,"",python,tab +3017,6658103,"models/dynamics.py",2299,0,"",python,selection_mouse +3018,6658135,"models/dynamics.py",2298,0,"",python,selection_command +3019,6662732,"models/dynamics.py",2324,0,"",python,selection_command +3020,6663068,"models/dynamics.py",2325,0,"\n ",python,content +3021,6663580,"models/dynamics.py",2338,0,"s",python,content +3022,6663581,"models/dynamics.py",2339,0,"",python,selection_keyboard +3023,6663859,"models/dynamics.py",2339,0,"e",python,content +3024,6663860,"models/dynamics.py",2340,0,"",python,selection_keyboard +3025,6664147,"models/dynamics.py",2340,0,"l",python,content +3026,6664150,"models/dynamics.py",2341,0,"",python,selection_keyboard +3027,6664281,"models/dynamics.py",2341,0,"f",python,content +3028,6664282,"models/dynamics.py",2342,0,"",python,selection_keyboard +3029,6664724,"models/dynamics.py",2342,0,".",python,content +3030,6664725,"models/dynamics.py",2343,0,"",python,selection_keyboard +3031,6665064,"models/dynamics.py",2343,0,"s",python,content +3032,6665065,"models/dynamics.py",2344,0,"",python,selection_keyboard +3033,6665554,"models/dynamics.py",2344,0,"p",python,content +3034,6665555,"models/dynamics.py",2345,0,"",python,selection_keyboard +3035,6666151,"models/dynamics.py",2345,0,"a",python,content +3036,6666152,"models/dynamics.py",2346,0,"",python,selection_keyboard +3037,6666353,"models/dynamics.py",2346,0,"c",python,content +3038,6666354,"models/dynamics.py",2347,0,"",python,selection_keyboard +3039,6666458,"models/dynamics.py",2347,0,"i",python,content +3040,6666459,"models/dynamics.py",2348,0,"",python,selection_keyboard +3041,6666741,"models/dynamics.py",2348,0,"a",python,content +3042,6666741,"models/dynamics.py",2349,0,"",python,selection_keyboard +3043,6666827,"models/dynamics.py",2349,0,"l",python,content +3044,6666828,"models/dynamics.py",2350,0,"",python,selection_keyboard +3045,6667183,"models/dynamics.py",2350,0,"_",python,content +3046,6667183,"models/dynamics.py",2351,0,"",python,selection_keyboard +3047,6667564,"models/dynamics.py",2351,0,"b",python,content +3048,6667565,"models/dynamics.py",2352,0,"",python,selection_keyboard +3049,6667720,"models/dynamics.py",2352,0,"e",python,content +3050,6667721,"models/dynamics.py",2353,0,"",python,selection_keyboard +3051,6667841,"models/dynamics.py",2353,0,"r",python,content +3052,6667842,"models/dynamics.py",2354,0,"",python,selection_keyboard +3053,6667923,"models/dynamics.py",2354,0,"t",python,content +3054,6667924,"models/dynamics.py",2355,0,"",python,selection_keyboard +3055,6668538,"models/dynamics.py",2355,0,",",python,content +3056,6668539,"models/dynamics.py",2356,0,"",python,selection_keyboard +3057,6669003,"models/dynamics.py",2355,0,"",python,selection_command +3058,6680560,"models/dynamics.py",2343,12,"",python,content +3059,6680987,"models/dynamics.py",2342,1,"",python,content +3060,6681573,"models/dynamics.py",2338,4,"",python,content +3061,6683839,"models/dynamics.py",2338,0,"F",python,content +3062,6683840,"models/dynamics.py",2339,0,"",python,selection_keyboard +3063,6684038,"models/dynamics.py",2339,0,"a",python,content +3064,6684039,"models/dynamics.py",2340,0,"",python,selection_keyboard +3065,6684101,"models/dynamics.py",2340,0,"l",python,content +3066,6684102,"models/dynamics.py",2341,0,"",python,selection_keyboard +3067,6684222,"models/dynamics.py",2341,0,"s",python,content +3068,6684223,"models/dynamics.py",2342,0,"",python,selection_keyboard +3069,6684394,"models/dynamics.py",2342,0,"e",python,content +3070,6684394,"models/dynamics.py",2343,0,"",python,selection_keyboard +3071,6685114,"models/dynamics.py",2342,0,"",python,selection_command +3072,6687996,"models/dynamics.py",571,0,"",python,selection_mouse +3073,6688000,"models/dynamics.py",570,0,"",python,selection_command +3074,6688550,"models/dynamics.py",597,0,"",python,selection_mouse +3075,6688554,"models/dynamics.py",596,0,"",python,selection_command +3076,6690088,"models/dynamics.py",597,0,"\n ",python,content +3077,6690450,"models/dynamics.py",610,0,"T",python,content +3078,6690451,"models/dynamics.py",611,0,"",python,selection_keyboard +3079,6691219,"models/dynamics.py",611,0,"r",python,content +3080,6691220,"models/dynamics.py",612,0,"",python,selection_keyboard +3081,6691321,"models/dynamics.py",612,0,"u",python,content +3082,6691322,"models/dynamics.py",613,0,"",python,selection_keyboard +3083,6691454,"models/dynamics.py",613,0,"e",python,content +3084,6691455,"models/dynamics.py",614,0,"",python,selection_keyboard +3085,6692433,"models/dynamics.py",614,0,",",python,content +3086,6692434,"models/dynamics.py",615,0,"",python,selection_keyboard +3087,6692495,"models/dynamics.py",614,0,"",python,selection_command +3088,6719522,"models/dynamics.py",715,0,"",python,selection_mouse +3089,6720048,"models/dynamics.py",1053,0,"",python,selection_mouse +3090,6720668,"models/dynamics.py",1296,0,"",python,selection_mouse +3091,6722792,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3092,6723245,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3093,6723360,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3094,6723473,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3095,6725500,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\n",,terminal_output +3096,6725627,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\nTraceback (most recent call last):\r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer, CausalTransformer\r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\n from utils.nn import STTransformer, CausalTransformer\r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\n",,terminal_output +3097,6725686,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer, CausalTransformer\r\nImportError: cannot import name 'CausalTransformer' from 'utils.nn' (/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py)\r\n",,terminal_output +3098,6725791,"TERMINAL",0,0,"srun: error: hkn0711: tasks 5-6: Exited with exit code 1\r\n",,terminal_output +3099,6725921,"TERMINAL",0,0,"srun: error: hkn0711: tasks 4,7: Exited with exit code 1\r\n",,terminal_output +3100,6725992,"TERMINAL",0,0,"srun: error: hkn0710: tasks 0-3: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3101,6763160,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"",python,tab +3102,6768989,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3103,6773229,"models/dynamics.py",0,0,"",python,tab +3104,6775574,"models/dynamics.py",139,0,"",python,selection_mouse +3105,6775792,"models/dynamics.py",139,4," Cau",python,selection_mouse +3106,6775793,"models/dynamics.py",139,19," CausalTransformer\n",python,selection_mouse +3107,6776194,"models/dynamics.py",139,18," CausalTransformer",python,selection_mouse +3108,6776248,"models/dynamics.py",103,36,"\nfrom utils.nn import STTransformer,",python,selection_mouse +3109,6776633,"models/dynamics.py",139,18," CausalTransformer",python,selection_mouse +3110,6777537,"models/dynamics.py",139,18,"",python,content +3111,6777574,"models/dynamics.py",138,0,"",python,selection_command +3112,6778477,"models/dynamics.py",138,1,"",python,content +3113,6778500,"models/dynamics.py",137,0,"",python,selection_command +3114,6781386,"utils/nn.py",0,0,"",python,tab +3115,6784738,"genie.py",0,0,"",python,tab +3116,6787790,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3117,6788058,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3118,6788167,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3119,6788298,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3120,6790411,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3121,6790538,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3122,6790642,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3123,6790698,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3124,6793926,"TERMINAL",0,0,"Running on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n",,terminal_output +3125,6794027,"TERMINAL",0,0,"Running on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\nRunning on 8 devices.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 209, in \r\n init_params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 98, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 26, in setup\r\n self.encoder = STTransformer(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'dtype'\r\n",,terminal_output +3126,6794850,"TERMINAL",0,0,"srun: error: hkn0710: tasks 0-1: Exited with exit code 1\r\nsrun: error: hkn0711: tasks 4-5: Exited with exit code 1\r\n",,terminal_output +3127,6794910,"TERMINAL",0,0,"srun: error: hkn0710: task 2: Exited with exit code 1\r\n",,terminal_output +3128,6794970,"TERMINAL",0,0,"srun: error: hkn0711: task 6: Exited with exit code 1\r\n",,terminal_output +3129,6795116,"TERMINAL",0,0,"srun: error: hkn0710: task 3: Exited with exit code 1\r\n",,terminal_output +3130,6795188,"TERMINAL",0,0,"srun: error: hkn0711: task 7: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3131,6800954,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Support for keyword-only fields in dataclasses for Python versions <3.10.\n\nThis module provides wrappers for `dataclasses.dataclass` and\n`dataclasses.field` that simulate support for keyword-only fields for Python\nversions before 3.10 (which is the version where dataclasses added keyword-only\nfield support). If this module is imported in Python 3.10+, then\n`kw_only_dataclasses.dataclass` and `kw_only_dataclasses.field` will simply be\naliases for `dataclasses.dataclass` and `dataclasses.field`.\n\nFor earlier Python versions, when constructing a dataclass, any fields that have\nbeen marked as keyword-only (including inherited fields) will be moved to the\nend of the constuctor's argument list. This makes it possible to have a base\nclass that defines a field with a default, and a subclass that defines a field\nwithout a default. E.g.:\n\n>>> from flax.linen import kw_only_dataclasses\n>>> @kw_only_dataclasses.dataclass\n... class Parent:\n... name: str = kw_only_dataclasses.field(default='', kw_only=True)\n\n>>> @kw_only_dataclasses.dataclass\n... class Child(Parent):\n... size: float # required.\n\n>>> import inspect\n>>> print(inspect.signature(Child.__init__))\n(self, size: float, name: str = '') -> None\n\n\n(If we used `dataclasses` rather than `kw_only_dataclasses` for the above\nexample, then it would have failed with TypeError ""non-default argument\n'size' follows default argument."")\n\nWARNING: fields marked as keyword-only will not *actually* be turned into\nkeyword-only parameters in the constructor; they will only be moved to the\nend of the parameter list (after all non-keyword-only parameters).\n""""""\n\nimport dataclasses\nimport functools\nimport inspect\nfrom types import MappingProxyType\nfrom typing import Any, TypeVar\n\nimport typing_extensions as tpe\n\nimport flax\n\nM = TypeVar('M', bound='flax.linen.Module')\nFieldName = str\nAnnotation = Any\nDefault = Any\n\n\nclass _KwOnlyType:\n """"""Metadata tag used to tag keyword-only fields.""""""\n\n def __repr__(self):\n return 'KW_ONLY'\n\n\nKW_ONLY = _KwOnlyType()\n\n\ndef field(*, metadata=None, kw_only=dataclasses.MISSING, **kwargs):\n """"""Wrapper for dataclassess.field that adds support for kw_only fields.\n\n Args:\n metadata: A mapping or None, containing metadata for the field.\n kw_only: If true, the field will be moved to the end of `__init__`'s\n parameter list.\n **kwargs: Keyword arguments forwarded to `dataclasses.field`\n\n Returns:\n A `dataclasses.Field` object.\n """"""\n if kw_only is not dataclasses.MISSING and kw_only:\n if (\n kwargs.get('default', dataclasses.MISSING) is dataclasses.MISSING\n and kwargs.get('default_factory', dataclasses.MISSING)\n is dataclasses.MISSING\n ):\n raise ValueError('Keyword-only fields with no default are not supported.')\n if metadata is None:\n metadata = {}\n metadata[KW_ONLY] = True\n return dataclasses.field(metadata=metadata, **kwargs)\n\n\n@tpe.dataclass_transform(field_specifiers=(field,)) # type: ignore[literal-required]\ndef dataclass(cls=None, extra_fields=None, **kwargs):\n """"""Wrapper for dataclasses.dataclass that adds support for kw_only fields.\n\n Args:\n cls: The class to transform (or none to return a decorator).\n extra_fields: A list of `(name, type, Field)` tuples describing extra fields\n that should be added to the dataclass. This is necessary for linen's\n use-case of this module, since the base class (linen.Module) is *not* a\n dataclass. In particular, linen.Module class is used as the base for both\n frozen and non-frozen dataclass subclasses; but the frozen status of a\n dataclass must match the frozen status of any base dataclasses.\n **kwargs: Additional arguments for `dataclasses.dataclass`.\n\n Returns:\n `cls`.\n """"""\n\n def wrap(cls):\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\n\n return wrap if cls is None else wrap(cls)\n\n\ndef _process_class(cls: type[M], extra_fields=None, **kwargs):\n """"""Transforms `cls` into a dataclass that supports kw_only fields.""""""\n if '__annotations__' not in cls.__dict__:\n cls.__annotations__ = {}\n\n # The original __dataclass_fields__ dicts for all base classes. We will\n # modify these in-place before turning `cls` into a dataclass, and then\n # restore them to their original values.\n base_dataclass_fields = {} # dict[cls, cls.__dataclass_fields__.copy()]\n\n # The keyword only fields from `cls` or any of its base classes.\n kw_only_fields: dict[FieldName, tuple[Annotation, Default]] = {}\n\n # Scan for KW_ONLY marker.\n kw_only_name = None\n for name, annotation in cls.__annotations__.items():\n if annotation is KW_ONLY:\n if kw_only_name is not None:\n raise TypeError('Multiple KW_ONLY markers')\n kw_only_name = name\n elif kw_only_name is not None:\n if not hasattr(cls, name):\n raise ValueError(\n 'Keyword-only fields with no default are not supported.'\n )\n default = getattr(cls, name)\n if isinstance(default, dataclasses.Field):\n default.metadata = MappingProxyType({**default.metadata, KW_ONLY: True})\n else:\n default = field(default=default, kw_only=True)\n setattr(cls, name, default)\n if kw_only_name:\n del cls.__annotations__[kw_only_name]\n\n # Inject extra fields.\n if extra_fields:\n for name, annotation, default in extra_fields:\n if not (isinstance(name, str) and isinstance(default, dataclasses.Field)):\n raise ValueError(\n 'Expected extra_fields to a be a list of '\n '(name, type, Field) tuples.'\n )\n setattr(cls, name, default)\n cls.__annotations__[name] = annotation\n\n # Extract kw_only fields from base classes' __dataclass_fields__.\n for base in reversed(cls.__mro__[1:]):\n if not dataclasses.is_dataclass(base):\n continue\n base_annotations = base.__dict__.get('__annotations__', {})\n base_dataclass_fields[base] = dict(\n getattr(base, '__dataclass_fields__', {})\n )\n for base_field in list(dataclasses.fields(base)):\n field_name = base_field.name\n if base_field.metadata.get(KW_ONLY) or field_name in kw_only_fields:\n kw_only_fields[field_name] = (\n base_annotations.get(field_name),\n base_field,\n )\n del base.__dataclass_fields__[field_name]\n\n # Remove any keyword-only fields from this class.\n cls_annotations = cls.__dict__['__annotations__']\n for name, annotation in list(cls_annotations.items()):\n value = getattr(cls, name, None)\n if (\n isinstance(value, dataclasses.Field) and value.metadata.get(KW_ONLY)\n ) or name in kw_only_fields:\n del cls_annotations[name]\n kw_only_fields[name] = (annotation, value)\n\n # Add keyword-only fields at the end of __annotations__, in the order they\n # were found in the base classes and in this class.\n for name, (annotation, default) in kw_only_fields.items():\n setattr(cls, name, default)\n cls_annotations.pop(name, None)\n cls_annotations[name] = annotation\n\n create_init = '__init__' not in vars(cls) and kwargs.get('init', True)\n\n # Apply the dataclass transform.\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\n\n # Restore the base classes' __dataclass_fields__.\n for _cls, fields in base_dataclass_fields.items():\n _cls.__dataclass_fields__ = fields\n\n if create_init:\n dataclass_init = transformed_cls.__init__\n # use sum to count the number of init fields that are not keyword-only\n expected_num_args = sum(\n f.init and not f.metadata.get(KW_ONLY, False)\n for f in dataclasses.fields(transformed_cls)\n )\n\n @functools.wraps(dataclass_init)\n def init_wrapper(self, *args, **kwargs):\n num_args = len(args)\n if num_args > expected_num_args:\n # we add + 1 to each to account for `self`, matching python's\n # default error message\n raise TypeError(\n f'__init__() takes {expected_num_args + 1} positional '\n f'arguments but {num_args + 1} were given'\n )\n\n dataclass_init(self, *args, **kwargs)\n\n init_wrapper.__signature__ = inspect.signature(dataclass_init) # type: ignore\n transformed_cls.__init__ = init_wrapper # type: ignore[method-assign]\n\n # Return the transformed dataclass\n return transformed_cls\n",python,tab +3132,6810894,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +3133,6812671,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",535,0,"",python,selection_mouse +3134,6812823,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",525,13,"STTransformer",python,selection_mouse +3135,6882855,"utils/nn.py",0,0,"",python,tab +3136,6882856,"utils/nn.py",4334,0,"",python,selection_mouse +3137,6883351,"utils/nn.py",4309,0,"",python,selection_mouse +3138,6883533,"utils/nn.py",4306,12,"spacial_bert",python,selection_mouse +3139,6883685,"utils/nn.py",4302,23," spacial_bert: bool\n",python,selection_mouse +3140,6884715,"utils/nn.py",4306,0,"",python,selection_command +3141,6891385,"utils/nn.py",4302,0,"",python,selection_command +3142,6894436,"utils/nn.py",4302,0," spacial_bert: bool = False\n",python,content +3143,6894441,"utils/nn.py",4333,23,"",python,content +3144,6900821,"utils/nn.py",4313,0,"",python,selection_mouse +3145,6901797,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"",python,tab +3146,6901798,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",778,0,"",python,selection_mouse +3147,6901834,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",777,0,"",python,selection_command +3148,6903936,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3149,6904054,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3150,6904165,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3151,6904287,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3152,6906385,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 18, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 10, in \r\n from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 8, in \r\n from utils.nn import STTransformer\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 136, in \r\n class STBlock(nn.Module):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1042, in __init_subclass__\r\n cls._customized_dataclass_transform(kw_only)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1114, in _customized_dataclass_transform\r\n kw_only_dataclasses.dataclass(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 125, in dataclass\r\n return wrap if cls is None else wrap(cls)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 123, in wrap\r\n return _process_class(cls, extra_fields=extra_fields, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 210, in _process_class\r\n transformed_cls: type[M] = dataclasses.dataclass(cls, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1184, in dataclass\r\n return wrap(cls)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1175, in wrap\r\n return _process_class(cls, init, repr, eq, order, unsafe_hash,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 1024, in _process_class\r\n _init_fn(all_init_fields,\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/dataclasses.py"", line 544, in _init_fn\r\n raise TypeError(f'non-default argument {f.name!r} '\r\nTypeError: non-default argument 'param_dtype' follows default argument\r\n",,terminal_output +3153,6906568,"TERMINAL",0,0,"srun: error: hkn0710: tasks 0-2: Exited with exit code 1\r\n",,terminal_output +3154,6906630,"TERMINAL",0,0,"srun: error: hkn0711: tasks 4-7: Exited with exit code 1\r\n",,terminal_output +3155,6906700,"TERMINAL",0,0,"srun: error: hkn0710: task 3: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3156,6911839,"utils/nn.py",0,0,"",python,tab +3157,6911840,"utils/nn.py",4308,0,"",python,selection_mouse +3158,6912118,"utils/nn.py",4381,0,"",python,selection_mouse +3159,6912667,"utils/nn.py",4341,0,"",python,selection_mouse +3160,6913345,"utils/nn.py",4333,48,"",python,content +3161,6913445,"utils/nn.py",4310,0,"",python,selection_command +3162,6913759,"utils/nn.py",4291,0,"",python,selection_command +3163,6914023,"utils/nn.py",4301,0,"\n param_dtype: jnp.dtype\n dtype: jnp.dtype",python,content +3164,6914035,"utils/nn.py",4306,0,"",python,selection_command +3165,6918227,"utils/nn.py",6985,0,"",python,selection_mouse +3166,6919750,"utils/nn.py",6961,48,"",python,content +3167,6919796,"utils/nn.py",6977,0,"",python,selection_command +3168,6920129,"utils/nn.py",7023,0,"",python,selection_command +3169,6920393,"utils/nn.py",7040,0,"\n spacial_bert=self.spacial_bert,",python,content +3170,6920421,"utils/nn.py",7057,0,"",python,selection_command +3171,6923331,"utils/nn.py",7040,0,"",python,selection_mouse +3172,6923341,"utils/nn.py",7039,0,"",python,selection_command +3173,6923982,"utils/nn.py",7071,0,"",python,selection_mouse +3174,6924618,"utils/nn.py",7062,0,"",python,selection_mouse +3175,6924751,"utils/nn.py",7057,12,"spacial_bert",python,selection_mouse +3176,6927017,"utils/nn.py",6175,0,"",python,selection_mouse +3177,6928326,"utils/nn.py",6158,23,"",python,content +3178,6928352,"utils/nn.py",6162,0,"",python,selection_command +3179,6928425,"utils/nn.py",6189,0,"",python,selection_command +3180,6928765,"utils/nn.py",6205,0,"\n spacial_bert: bool",python,content +3181,6928822,"utils/nn.py",6210,0,"",python,selection_command +3182,6929155,"utils/nn.py",6211,0,"",python,selection_command +3183,6929669,"utils/nn.py",6212,0,"",python,selection_command +3184,6929681,"utils/nn.py",6213,0,"",python,selection_command +3185,6929732,"utils/nn.py",6214,0,"",python,selection_command +3186,6929767,"utils/nn.py",6215,0,"",python,selection_command +3187,6929901,"utils/nn.py",6228,0,"",python,selection_command +3188,6930636,"utils/nn.py",6228,0," ",python,content +3189,6930637,"utils/nn.py",6229,0,"",python,selection_keyboard +3190,6931091,"utils/nn.py",6229,0,"=",python,content +3191,6931092,"utils/nn.py",6230,0,"",python,selection_keyboard +3192,6931185,"utils/nn.py",6230,0," ",python,content +3193,6931186,"utils/nn.py",6231,0,"",python,selection_keyboard +3194,6931641,"utils/nn.py",6231,0,"F",python,content +3195,6931642,"utils/nn.py",6232,0,"",python,selection_keyboard +3196,6931854,"utils/nn.py",6232,0,"a",python,content +3197,6931855,"utils/nn.py",6233,0,"",python,selection_keyboard +3198,6931963,"utils/nn.py",6233,0,"l",python,content +3199,6931965,"utils/nn.py",6234,0,"",python,selection_keyboard +3200,6932089,"utils/nn.py",6234,0,"s",python,content +3201,6932090,"utils/nn.py",6235,0,"",python,selection_keyboard +3202,6932278,"utils/nn.py",6235,0,"e",python,content +3203,6932279,"utils/nn.py",6236,0,"",python,selection_keyboard +3204,6932489,"utils/nn.py",6231,5,"False",python,content +3205,6938460,"utils/nn.py",4373,0,"",python,selection_mouse +3206,6942971,"models/dynamics.py",0,0,"",python,tab +3207,6946578,"models/dynamics.py",2340,0,"",python,selection_mouse +3208,6947421,"models/dynamics.py",2325,19,"",python,content +3209,6947477,"models/dynamics.py",2337,0,"",python,selection_command +3210,6947684,"models/dynamics.py",2367,0,"",python,selection_command +3211,6948114,"models/dynamics.py",2378,0,"\n False,",python,content +3212,6948137,"models/dynamics.py",2391,0,"",python,selection_command +3213,6951517,"models/dynamics.py",2391,0,"s",python,content +3214,6951519,"models/dynamics.py",2392,0,"",python,selection_keyboard +3215,6951596,"models/dynamics.py",2392,0,"p",python,content +3216,6951597,"models/dynamics.py",2393,0,"",python,selection_keyboard +3217,6951733,"models/dynamics.py",2393,0,"a",python,content +3218,6951734,"models/dynamics.py",2394,0,"",python,selection_keyboard +3219,6952366,"models/dynamics.py",2391,3,"spacial_bert=",python,content +3220,6954284,"models/dynamics.py",2403,0,"",python,selection_command +3221,6960532,"models/dynamics.py",643,0,"",python,selection_mouse +3222,6960863,"models/dynamics.py",650,0,"\n spacial_bert=False,",python,content +3223,6960916,"models/dynamics.py",663,0,"",python,selection_command +3224,6961966,"models/dynamics.py",682,0,"",python,selection_command +3225,6962185,"models/dynamics.py",681,0,"",python,selection_command +3226,6962532,"models/dynamics.py",676,5,"",python,content +3227,6963157,"models/dynamics.py",676,0,"T",python,content +3228,6963158,"models/dynamics.py",677,0,"",python,selection_keyboard +3229,6963287,"models/dynamics.py",677,0,"r",python,content +3230,6963287,"models/dynamics.py",678,0,"",python,selection_keyboard +3231,6963365,"models/dynamics.py",678,0,"u",python,content +3232,6963366,"models/dynamics.py",679,0,"",python,selection_keyboard +3233,6963525,"models/dynamics.py",679,0,"e",python,content +3234,6963525,"models/dynamics.py",680,0,"",python,selection_keyboard +3235,6963926,"models/dynamics.py",680,0,"k",python,content +3236,6963927,"models/dynamics.py",681,0,"",python,selection_keyboard +3237,6964507,"models/dynamics.py",680,1,"",python,content +3238,6964598,"models/dynamics.py",679,0,"",python,selection_command +3239,6964731,"models/dynamics.py",649,0,"",python,selection_command +3240,6964892,"models/dynamics.py",625,0,"",python,selection_command +3241,6965072,"models/dynamics.py",595,0,"",python,selection_command +3242,6965476,"models/dynamics.py",579,18,"",python,content +3243,6965554,"models/dynamics.py",591,0,"",python,selection_command +3244,6969150,"utils/nn.py",0,0,"",python,tab +3245,6970440,"utils/nn.py",6231,0,"",python,selection_mouse +3246,6970575,"utils/nn.py",6231,5,"False",python,selection_mouse +3247,6971479,"utils/nn.py",6231,5,"T",python,content +3248,6971480,"utils/nn.py",6232,0,"",python,selection_keyboard +3249,6971676,"utils/nn.py",6232,0,"r",python,content +3250,6971677,"utils/nn.py",6233,0,"",python,selection_keyboard +3251,6971737,"utils/nn.py",6233,0,"u",python,content +3252,6971738,"utils/nn.py",6234,0,"",python,selection_keyboard +3253,6971893,"utils/nn.py",6234,0,"e",python,content +3254,6971894,"utils/nn.py",6235,0,"",python,selection_keyboard +3255,6974576,"utils/nn.py",4377,0,"",python,selection_mouse +3256,6974702,"utils/nn.py",4375,5,"False",python,selection_mouse +3257,6975327,"utils/nn.py",4375,5,"T",python,content +3258,6975328,"utils/nn.py",4376,0,"",python,selection_keyboard +3259,6975469,"utils/nn.py",4376,0,"r",python,content +3260,6975470,"utils/nn.py",4377,0,"",python,selection_keyboard +3261,6975516,"utils/nn.py",4377,0,"u",python,content +3262,6975518,"utils/nn.py",4378,0,"",python,selection_keyboard +3263,6975668,"utils/nn.py",4378,0,"e",python,content +3264,6975669,"utils/nn.py",4379,0,"",python,selection_keyboard +3265,6976076,"utils/nn.py",4378,0,"",python,selection_command +3266,6977715,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3267,6978052,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3268,6978165,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3269,6978304,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3270,6980319,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3271,6980427,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3272,6980547,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3273,6980606,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3274,6984914,"TERMINAL",0,0,"2025-07-16 16:45:52.337452: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.359151: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.361654: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.375090: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.375520: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.378557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.385595: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:45:52.407127: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3275,6999648,"TERMINAL",0,0,"2025-07-16 16:46:07.053684: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:07.065643: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3276,6999701,"TERMINAL",0,0,"2025-07-16 16:46:07.187686: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:07.187681: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3277,6999797,"TERMINAL",0,0,"2025-07-16 16:46:07.241274: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:07.248745: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:07.251663: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:07.251886: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3278,7003135,"TERMINAL",0,0,"2025-07-16 16:46:10.635521: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3279,7003195,"TERMINAL",0,0,"2025-07-16 16:46:10.669808: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3280,7003314,"TERMINAL",0,0,"2025-07-16 16:46:10.814705: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3281,7003435,"TERMINAL",0,0,"2025-07-16 16:46:10.848849: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:10.894326: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:10.894357: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:46:10.911384: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3282,7003543,"TERMINAL",0,0,"2025-07-16 16:46:11.014828: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3283,7005917,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3284,7006811,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_164613-bb375awk\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/bb375awk\r\n",,terminal_output +3285,7074084,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3286,7074136,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3287,7104074,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3288,7111286,"utils/nn.py",0,0,"",python,tab +3289,7111287,"utils/nn.py",4362,0,"",python,selection_mouse +3290,7111377,"utils/nn.py",4354,12,"spacial_bert",python,selection_mouse +3291,7111640,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3292,7113523,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3293,7113656,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\n",,terminal_output +3294,7118327,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +3295,7119454,"TERMINAL",0,0,"bash",,terminal_focus +3296,7123386,"TERMINAL",0,0,"queue",,terminal_command +3297,7123450,"TERMINAL",0,0,"]633;E;2025-07-16 16:48:10 queue;22762ecb-63fc-44db-bc05-6ba77d772526]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Jul 16 16:48:10 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 16:47:48\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 22:09:30\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 22:09:30\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 22:09:30\t 2 hkn[0604,0608]3350418 accelerat interact tum_cte0 R 1:58:24\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-22:53:15\t 2 hkn[0503,0506]",,terminal_output +3298,7124572,"TERMINAL",0,0,"1911156",,terminal_output +3299,7125213,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3300,7125330,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3301,7125546,"TERMINAL",0,0,"25133378",,terminal_output +3302,7125557,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3303,7125614,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3304,7125671,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3305,7125741,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3306,7125836,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3307,7125894,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3308,7125955,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3309,7126113,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3310,7126171,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3311,7126266,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3312,7126326,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3313,7126384,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3314,7126489,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3315,7126489,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D;0",,terminal_output +3316,7127731,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3317,7139933,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3318,7140458,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 7, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3319,7140531,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3320,7140710,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3321,7140771,"TERMINAL",0,0,"Filtering out episode with length 8, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3322,7141361,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3323,7141630,"TERMINAL",0,0,"Filtering out episode with length 14, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3324,7143421,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3325,7144232,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3326,7144594,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3327,7144779,"TERMINAL",0,0,"Filtering out episode with length 10, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3328,7144840,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3329,7144946,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3330,7145071,"TERMINAL",0,0,"Filtering out episode with length 14, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3331,7147516,"TERMINAL",0,0,"Filtering out episode with length 7, which is shorter than the requested sequence length 16.\r\nwandb: \r\nwandb: 🚀 View run dynamics-debug-run-debug-mihir at: https://wandb.ai/instant-uv/jafar/runs/bb375awk\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_164613-bb375awk/logs\r\n",,terminal_output +3332,7150172,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3333,7150635,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 337, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 142, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in dynamics_loss_fn\r\n acc = (mask * acc).sum() / mask.sum()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1280, in multiply\r\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\r\nTypeError: mul got incompatible shapes for broadcasting: (96, 15, 920), (96, 16, 920).\r\n",,terminal_output +3334,7152757,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\n",,terminal_output +3335,7154388,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 11 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3336,7154985,"TERMINAL",0,0,"srun: error: hkn0710: task 2: Exited with exit code 1\r\nsrun: error: hkn0711: task 6: Exited with exit code 1\r\n",,terminal_output +3337,7155413,"TERMINAL",0,0,"srun: error: hkn0711: tasks 4,7: Exited with exit code 1\r\nsrun: error: hkn0711: task 5: Exited with exit code 1\r\nsrun: error: hkn0710: tasks 0,3: Exited with exit code 1\r\nsrun: error: hkn0710: task 1: Exited with exit code 1\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3338,7188480,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"",python,tab +3339,7189353,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3340,7199494,"TERMINAL",0,0,"srun",,terminal_focus +3341,7208788,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3342,7227424,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,0,"",python,selection_mouse +3343,7227633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3144,3,"ts""",python,selection_mouse +3344,7227634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3139,8,"_logits""",python,selection_mouse +3345,7227634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3134,13,"token_logits""",python,selection_mouse +3346,7227634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,56,"].argmax(-1) == outputs[""video_tokens""]\n acc = (mask ",python,selection_mouse +3347,7227662,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,53,"].argmax(-1) == outputs[""video_tokens""]\n acc = (ma",python,selection_mouse +3348,7227683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,51,"].argmax(-1) == outputs[""video_tokens""]\n acc = (",python,selection_mouse +3349,7227736,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,50,"].argmax(-1) == outputs[""video_tokens""]\n acc = ",python,selection_mouse +3350,7227941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3147,51,"].argmax(-1) == outputs[""video_tokens""]\n acc = (",python,selection_mouse +3351,7228293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3127,20,"tputs[""token_logits""",python,selection_mouse +3352,7228294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3129,18,"uts[""token_logits""",python,selection_mouse +3353,7228294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3130,17,"ts[""token_logits""",python,selection_mouse +3354,7228294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3131,16,"s[""token_logits""",python,selection_mouse +3355,7228386,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3130,17,"ts[""token_logits""",python,selection_mouse +3356,7228421,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3129,18,"uts[""token_logits""",python,selection_mouse +3357,7228475,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,19,"puts[""token_logits""",python,selection_mouse +3358,7228476,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3127,20,"tputs[""token_logits""",python,selection_mouse +3359,7228521,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3126,21,"utputs[""token_logits""",python,selection_mouse +3360,7228573,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3125,22,"outputs[""token_logits""",python,selection_mouse +3361,7230282,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3125,22,"",python,content +3362,7230283,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,4,"",python,content +3363,7230665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3121,1,"",python,content +3364,7230993,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3121,0,"l",python,content +3365,7230994,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3122,0,"",python,selection_keyboard +3366,7231316,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3122,0,"g",python,content +3367,7231317,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3123,0,"",python,selection_keyboard +3368,7231669,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3122,1,"",python,content +3369,7231806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3122,0,"o",python,content +3370,7231809,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3123,0,"",python,selection_keyboard +3371,7233788,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3121,2,"logits",python,content +3372,7235262,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3094,0,"",python,selection_mouse +3373,7235413,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3092,3,"sum",python,selection_mouse +3374,7236589,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3121,0,"",python,selection_mouse +3375,7236743,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3121,6,"logits",python,selection_mouse +3376,7238188,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3265,0,"",python,selection_mouse +3377,7238456,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3264,1,"]",python,selection_mouse +3378,7238457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3262,3,"s""]",python,selection_mouse +3379,7238457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3259,6,"gits""]",python,selection_mouse +3380,7238457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3258,7,"ogits""]",python,selection_mouse +3381,7238482,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3257,8,"logits""]",python,selection_mouse +3382,7238510,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3256,9,"_logits""]",python,selection_mouse +3383,7238534,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3255,10,"n_logits""]",python,selection_mouse +3384,7238570,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3254,11,"en_logits""]",python,selection_mouse +3385,7238598,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3253,12,"ken_logits""]",python,selection_mouse +3386,7238623,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3252,13,"oken_logits""]",python,selection_mouse +3387,7238648,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3251,14,"token_logits""]",python,selection_mouse +3388,7238672,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3250,15,"""token_logits""]",python,selection_mouse +3389,7238749,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3249,16,"[""token_logits""]",python,selection_mouse +3390,7238821,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3248,17,"s[""token_logits""]",python,selection_mouse +3391,7238865,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3247,18,"ts[""token_logits""]",python,selection_mouse +3392,7238916,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3246,19,"uts[""token_logits""]",python,selection_mouse +3393,7238963,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3245,20,"puts[""token_logits""]",python,selection_mouse +3394,7238963,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3244,21,"tputs[""token_logits""]",python,selection_mouse +3395,7239020,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3243,22,"utputs[""token_logits""]",python,selection_mouse +3396,7239043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3242,23,"outputs[""token_logits""]",python,selection_mouse +3397,7239620,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3242,23,"",python,content +3398,7239954,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3242,0,"l",python,content +3399,7239955,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3243,0,"",python,selection_keyboard +3400,7240171,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3243,0,"o",python,content +3401,7240172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3244,0,"",python,selection_keyboard +3402,7241004,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3242,2,"logits",python,content +3403,7247117,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3145,0,"",python,selection_mouse +3404,7247284,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,7,"outputs",python,selection_mouse +3405,7247381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,9,"outputs[""",python,selection_mouse +3406,7247407,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,21,"outputs[""video_tokens",python,selection_mouse +3407,7247507,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,22,"outputs[""video_tokens""",python,selection_mouse +3408,7247768,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,23,"outputs[""video_tokens""]",python,selection_mouse +3409,7249118,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,23,"t",python,content +3410,7249119,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3143,0,"",python,selection_keyboard +3411,7249516,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3143,0,"a",python,content +3412,7249518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3144,0,"",python,selection_keyboard +3413,7250243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3142,2,"targets",python,content +3414,7251796,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3284,0,"",python,selection_mouse +3415,7254785,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4031,0,"",python,selection_mouse +3416,7254946,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4024,12,"token_logits",python,selection_mouse +3417,7255846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4029,0,"",python,selection_mouse +3418,7256702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4018,0,"",python,selection_mouse +3419,7256903,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,7,"outputs",python,selection_mouse +3420,7257024,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,8,"outputs[",python,selection_mouse +3421,7257025,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,21,"outputs[""token_logits",python,selection_mouse +3422,7257322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,22,"outputs[""token_logits""",python,selection_mouse +3423,7257925,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,23,"outputs[""token_logits""]",python,selection_mouse +3424,7258632,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,23,"l",python,content +3425,7258634,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4016,0,"",python,selection_keyboard +3426,7258860,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4016,0,"o",python,content +3427,7258861,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4017,0,"",python,selection_keyboard +3428,7259446,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",4015,2,"logits",python,content +3429,7261884,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2777,0,"",python,selection_mouse +3430,7262760,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2792,0,"",python,selection_mouse +3431,7262936,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2785,12,"video_tokens",python,selection_mouse +3432,7273479,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3202,0,"",python,selection_mouse +3433,7273652,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3196,12,"select_probs",python,selection_mouse +3434,7280840,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3156,0,"",python,selection_mouse +3435,7281060,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3154,3,"acc",python,selection_mouse +3436,7285724,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3722,0,"",python,selection_mouse +3437,7285818,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3714,12,"video_tokens",python,selection_mouse +3438,7286555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3709,0,"",python,selection_mouse +3439,7286731,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,7,"outputs",python,selection_mouse +3440,7290759,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3709,0,"",python,selection_mouse +3441,7291217,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,7,"outputs",python,selection_mouse +3442,7291522,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,8,"outputs[",python,selection_mouse +3443,7291523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,9,"outputs[""",python,selection_mouse +3444,7291523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,21,"outputs[""video_tokens",python,selection_mouse +3445,7292204,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,22,"outputs[""video_tokens""",python,selection_mouse +3446,7293195,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3705,23,"outputs[""video_tokens""]",python,selection_mouse +3447,7309370,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3448,7309825,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3449,7309968,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3450,7310115,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3451,7315026,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3452,7318687,"TERMINAL",0,0,"2025-07-16 16:51:26.188864: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3453,7318799,"TERMINAL",0,0,"2025-07-16 16:51:26.232401: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.235522: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.242134: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.244663: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.252474: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.254494: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:51:26.261430: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3454,7333152,"TERMINAL",0,0,"2025-07-16 16:51:40.650315: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3455,7333469,"TERMINAL",0,0,"2025-07-16 16:51:40.882364: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3456,7333522,"TERMINAL",0,0,"2025-07-16 16:51:40.991073: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3457,7333673,"TERMINAL",0,0,"2025-07-16 16:51:41.175090: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3458,7334085,"TERMINAL",0,0,"2025-07-16 16:51:41.569331: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3459,7334145,"TERMINAL",0,0,"2025-07-16 16:51:41.640270: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3460,7334393,"TERMINAL",0,0,"2025-07-16 16:51:41.895241: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3461,7334594,"TERMINAL",0,0,"2025-07-16 16:51:42.093317: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3462,7336715,"TERMINAL",0,0,"2025-07-16 16:51:44.212089: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3463,7337054,"TERMINAL",0,0,"2025-07-16 16:51:44.446378: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3464,7337160,"TERMINAL",0,0,"2025-07-16 16:51:44.620379: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3465,7337328,"TERMINAL",0,0,"2025-07-16 16:51:44.828603: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3466,7337788,"TERMINAL",0,0,"2025-07-16 16:51:45.287958: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3467,7337894,"TERMINAL",0,0,"2025-07-16 16:51:45.380493: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3468,7338000,"TERMINAL",0,0,"2025-07-16 16:51:45.478223: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3469,7338588,"TERMINAL",0,0,"2025-07-16 16:51:46.041036: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3470,7340230,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3471,7340864,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_165147-a3vd1ho1\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/a3vd1ho1\r\n",,terminal_output +3472,7420293,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3473,7420963,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3474,7425937,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3475,7426559,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nWARNING:absl:Missing metrics for step 145000\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\n",,terminal_output +3476,7431489,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +3477,7431775,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +3478,7436913,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3479,7437636,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3480,7437743,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3481,7437845,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3482,7437920,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3483,7438024,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3484,7438139,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3485,7438202,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3486,7438308,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3487,7438417,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3488,7438475,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3489,7438534,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3490,7438594,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3491,7438705,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3492,7438759,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3493,7438854,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3494,7439356,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3495,7439408,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3496,7439921,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3497,7440172,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3498,7474684,"TERMINAL",0,0,"2025-07-16 16:54:02.091488: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:02.092444: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:02.092979: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:02.094280: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:02.094300: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:02.095781: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3499,7476220,"TERMINAL",0,0,"2025-07-16 16:54:03.666056: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.667042: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.667583: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.668877: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.668895: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.670372: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3500,7476367,"TERMINAL",0,0,"2025-07-16 16:54:03.867055: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.868049: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.868586: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.869937: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.869956: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:03.871456: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3501,7476967,"TERMINAL",0,0,"2025-07-16 16:54:04.465879: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:04.466842: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:04.467383: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:04.468678: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:04.468698: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:04.470171: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3502,7483187,"TERMINAL",0,0,"2025-07-16 16:54:10.686611: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:10.687607: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:10.688152: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:10.689465: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:10.689485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:10.690971: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3503,7483908,"TERMINAL",0,0,"2025-07-16 16:54:11.406497: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:11.407461: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:11.407998: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:11.409309: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:11.409328: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:11.410812: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3504,7486255,"TERMINAL",0,0,"2025-07-16 16:54:13.553570: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:13.554588: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:13.555146: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:13.556520: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:13.556540: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:13.558122: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3505,7487279,"TERMINAL",0,0,"2025-07-16 16:54:14.689057: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:14.690087: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:14.690633: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:14.692021: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:14.692042: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 16:54:14.693582: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3506,7605719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3507,7608219,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2526,0,"",python,selection_mouse +3508,7609242,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2467,0,"",python,selection_mouse +3509,7610560,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2670,0,"",python,selection_mouse +3510,7610723,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2664,12,"token_logits",python,selection_mouse +3511,7834011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3778,0,"",python,selection_mouse +3512,7835893,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3350418.8 tasks 0-7: running\r\n",,terminal_output +3513,7836102,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nsrun: forcing job termination\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-8:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3350418.8 ON hkn0710 CANCELLED AT 2025-07-16T17:00:03 ***\r\n",,terminal_output +3514,7836257,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\nsrun: job abort in progress\r\n",,terminal_output +3515,7836440,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\n",,terminal_output +3516,7836640,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\nsrun: job abort in progress\r\n",,terminal_output +3517,7836833,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\n",,terminal_output +3518,7837273,"TERMINAL",0,0,"^C",,terminal_output +3519,7837483,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.8\r\n",,terminal_output +3520,7837605,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3521,7837679,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3522,7843722,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3523,7844429,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3524,7845362,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3534,0,"",python,selection_mouse +3525,7846041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_mouse +3526,7851532,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3527,7851533,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2755,0,"",python,selection_mouse +3528,7852058,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2623,0,"",python,selection_mouse +3529,7852087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2622,0,"",python,selection_command +3530,7853305,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2788,0,"",python,selection_mouse +3531,7854396,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2787,0,"",python,selection_mouse +3532,7855128,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_mouse +3533,7856396,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"\n",python,content +3534,7856475,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2801,0,"",python,content +3535,7857226,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,1,"",python,content +3536,7857561,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2804,1,"",python,content +3537,7857717,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2803,1,"",python,content +3538,7857886,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2802,1,"",python,content +3539,7858070,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2801,1,"",python,content +3540,7858236,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2801,0," ",python,content +3541,7858898,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,0,"j",python,content +3542,7858899,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2806,0,"",python,selection_keyboard +3543,7859221,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2806,0,"a",python,content +3544,7859223,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"",python,selection_keyboard +3545,7859374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2807,0,"x",python,content +3546,7859376,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2808,0,"",python,selection_keyboard +3547,7859785,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,3,"jax",python,content +3548,7860374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2808,0,".",python,content +3549,7860375,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,0,"",python,selection_keyboard +3550,7860663,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,0,"d",python,content +3551,7860664,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2810,0,"",python,selection_keyboard +3552,7860877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2810,0,"e",python,content +3553,7860878,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2811,0,"",python,selection_keyboard +3554,7861774,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2809,2,"debug",python,content +3555,7862138,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2814,0,".",python,content +3556,7862139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"",python,selection_keyboard +3557,7862567,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,0,"b",python,content +3558,7862569,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"",python,selection_keyboard +3559,7862764,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"r",python,content +3560,7862765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2817,0,"",python,selection_keyboard +3561,7863063,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2815,2,"breakpoint",python,content +3562,7863825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2825,0,"()",python,content +3563,7863826,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2826,0,"",python,selection_keyboard +3564,7863900,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2826,1,")",python,content +3565,7863901,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2827,0,"",python,selection_keyboard +3566,7864126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2826,0,"",python,selection_command +3567,7864748,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_command +3568,7865238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2787,0,"",python,selection_command +3569,7865286,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2750,0,"",python,selection_command +3570,7865313,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2676,0,"",python,selection_command +3571,7865346,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2649,0,"",python,selection_command +3572,7865369,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2622,0,"",python,selection_command +3573,7865510,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2569,0,"",python,selection_command +3574,7865799,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2622,0,"",python,selection_command +3575,7868045,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2649,0,"",python,selection_command +3576,7868549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2676,0,"",python,selection_command +3577,7868582,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2750,0,"",python,selection_command +3578,7868625,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2787,0,"",python,selection_command +3579,7868652,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_command +3580,7868659,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2826,0,"",python,selection_command +3581,7868688,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2853,0,"",python,selection_command +3582,7868769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2882,0,"",python,selection_command +3583,7868770,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2931,0,"",python,selection_command +3584,7868943,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2980,0,"",python,selection_command +3585,7869128,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2994,0,"",python,selection_command +3586,7869318,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3020,0,"",python,selection_command +3587,7869691,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2994,0,"",python,selection_command +3588,7869933,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2994,0,"\n jax.debug.breakpoint()",python,content +3589,7869988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2999,0,"",python,selection_command +3590,7875217,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3591,7876011,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""debug-mihir""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nXLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=96 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-debug-run-$slurm_job_id \\r\n --tags dynamics debug \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +3592,7876206,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2279443\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0710\r\nSLURM_JOB_START_TIME=1752670186\r\nSLURM_STEP_NODELIST=hkn0710\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1752706186\r\nSLURM_PMI2_SRUN_PORT=42215\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3350418\r\nSLURM_PTY_PORT=33491\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=43\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0710\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=147\r\nSLURM_NODELIST=hkn[0710-0711]\r\nSLURM_SRUN_COMM_PORT=44593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3350418\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0710\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0710-0711]\r\n",,terminal_output +3593,7876320,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3594,7879201,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3595,7879317,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3596,7884030,"TERMINAL",0,0,"2025-07-16 17:00:51.455050: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:00:51.471850: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:00:51.501225: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:00:51.522492: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:00:51.522501: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:00:51.529743: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3597,7884090,"TERMINAL",0,0,"2025-07-16 17:00:51.586553: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3598,7884150,"TERMINAL",0,0,"2025-07-16 17:00:51.628887: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3599,7898527,"TERMINAL",0,0,"2025-07-16 17:01:05.951089: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:01:05.990738: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3600,7898930,"TERMINAL",0,0,"2025-07-16 17:01:06.384462: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3601,7898995,"TERMINAL",0,0,"2025-07-16 17:01:06.491498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3602,7899785,"TERMINAL",0,0,"2025-07-16 17:01:07.285693: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3603,7899936,"TERMINAL",0,0,"2025-07-16 17:01:07.437166: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3604,7900365,"TERMINAL",0,0,"2025-07-16 17:01:07.794115: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3605,7900556,"TERMINAL",0,0,"2025-07-16 17:01:08.056115: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3606,7902043,"TERMINAL",0,0,"2025-07-16 17:01:09.526139: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:01:09.538793: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3607,7902476,"TERMINAL",0,0,"2025-07-16 17:01:09.978411: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3608,7902602,"TERMINAL",0,0,"2025-07-16 17:01:10.103181: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3609,7903640,"TERMINAL",0,0,"2025-07-16 17:01:11.078459: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3610,7903755,"TERMINAL",0,0,"2025-07-16 17:01:11.255416: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3611,7904052,"TERMINAL",0,0,"2025-07-16 17:01:11.531931: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3612,7904668,"TERMINAL",0,0,"2025-07-16 17:01:12.138437: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3613,7906509,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3614,7907055,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250716_170113-hbq766x2\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-debug-run-debug-mihir\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/hbq766x2\r\n",,terminal_output +3615,7976614,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3616,7976687,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3617,7977582,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3618,8004923,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\nWARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3619,8011248,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3620,8013311,"TERMINAL",0,0,"WARNING:absl:Dropping 2 examples of 89394 examples (shard 8).\r\n",,terminal_output +3621,8013398,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nWARNING:absl:Missing metrics for step 60000\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\n",,terminal_output +3622,8018041,"TERMINAL",0,0,"Running on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\nRunning on 8 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349472, 'dynamics': 29734912, 'total': 87074000}\r\n",,terminal_output +3623,8023530,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3624,8023605,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3625,8023718,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3626,8024069,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3627,8024211,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3628,8024344,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3629,8024404,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3630,8024465,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3631,8024574,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3632,8024636,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3633,8024691,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3634,8024750,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3635,8024810,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3636,8024990,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3637,8025111,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3638,8025163,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3639,8025260,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3640,8025321,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3641,8025380,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3642,8025499,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3643,8030337,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3644,8031030,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3645,8050199,"TERMINAL",0,0,"E0716 17:03:37.685478 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685546 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685571 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685593 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685614 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685635 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.685657 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.701456 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:37.701495 199601 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3646,8050801,"TERMINAL",0,0,"E0716 17:03:38.251997 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252059 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252088 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252111 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252132 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252155 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.252176 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.267557 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.267594 2332796 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3647,8051191,"TERMINAL",0,0,"E0716 17:03:38.670177 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670244 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670268 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670292 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670315 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670337 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.670358 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.685898 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.685940 199602 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3648,8051260,"TERMINAL",0,0,"E0716 17:03:38.734401 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734465 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734491 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734514 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734536 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734558 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.734580 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.750327 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:38.750366 2332797 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3649,8054804,"TERMINAL",0,0,"E0716 17:03:42.269341 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269410 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269435 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269460 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269482 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269502 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.269526 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.285562 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:42.285602 199600 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3650,8057571,"TERMINAL",0,0,"E0716 17:03:45.033533 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033605 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033632 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033658 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033680 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033702 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.033728 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.050607 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:45.050656 2332795 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3651,8060427,"TERMINAL",0,0,"E0716 17:03:47.816930 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.816990 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.817015 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.817041 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.817064 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.817094 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.817127 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.833236 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:47.833281 2332798 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3652,8060858,"TERMINAL",0,0,"E0716 17:03:48.343876 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %multiply.11 = bf16[12,16,90,160,3]{4,3,2,1,0} multiply(%convert.3105, %broadcast.579), sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/div"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=88}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.343968 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %get-tuple-element = s32[180]{0} get-tuple-element(%reduce), index=1, sharding={devices=[8]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/vq/reduce"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"" source_line=274}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.343993 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.9093 = f32[12,15,1,32]{3,2,1,0} reshape(%add.9092), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/lam.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"" source_line=98}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.344017 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %broadcast.2982 = f32[12,16,920]{2,1,0} broadcast(%constant), dimensions={}, sharding={devices=[8,1,1]<=[8]}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.344039 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = bf16[12,16,90,160,3]{4,3,2,1,0} slice(%reshape.16454), slice={[0:12], [0:16], [0:90], [0:160], [0:3]}, sharding={devices=[8,1,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.decode/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/preprocess.py"" source_line=25}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.344298 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %convert.16456 = f32[12,16,920,1024]{3,2,1,0} convert(%add.13535), sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/convert_element_type"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=96}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.344336 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %reshape.6080 = s32[12,16,920]{2,1,0} reshape(%get-tuple-element), sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/jvp(Genie)/tokenizer.vq_encode/reshape"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"" source_line=72}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.359956 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = f32[12,15,920,1024]{3,2,1,0} slice(%convert.16456), slice={[0:12], [0:15], [0:920], [0:1024]}, sharding={devices=[8,1,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=102}. You probably want to enrich the sharding annotations to prevent this from happening.\r\nE0716 17:03:48.359987 199603 spmd_partitioner.cc:630] [spmd] Involuntary full rematerialization. The compiler was not able to go from sharding {devices=[8,1,1]<=[8]} to {maximal device=0} without doing a full rematerialization of the tensor for HLO operation: %slice = s32[12,15,920]{2,1,0} slice(%reshape.6080), slice={[0:12], [1:16], [0:920]}, sharding={devices=[8,1,1]<=[8]}, metadata={op_name=""jit(train_step)/jit(main)/slice"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"" source_line=103}. You probably want to enrich the sharding annotations to prevent this from happening.\r\n",,terminal_output +3653,8064513,"TERMINAL",0,0,"2025-07-16 17:03:51.945303: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:51.945770: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:51.945831: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:51.946395: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:51.947688: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:51.947710: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3654,8064709,"TERMINAL",0,0,"2025-07-16 17:03:52.208746: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.209207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.209265: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.209824: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.211125: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.211144: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3655,8065538,"TERMINAL",0,0,"2025-07-16 17:03:52.940538: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.941007: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.941066: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.941631: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.942949: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:52.942967: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3656,8065738,"TERMINAL",0,0,"2025-07-16 17:03:53.236481: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:53.236966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:53.237026: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:53.237597: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:53.238932: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:53.238952: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3657,8069264,"TERMINAL",0,0,"2025-07-16 17:03:56.756552: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:56.757009: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:56.757067: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:56.757622: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:56.758916: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:56.758935: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3658,8071990,"TERMINAL",0,0,"2025-07-16 17:03:59.471120: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:59.471593: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:59.471652: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:59.472232: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:59.473552: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:03:59.473572: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3659,8075164,"TERMINAL",0,0,"2025-07-16 17:04:02.663103: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.663587: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.663651: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.664236: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.665554: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.665572: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3660,8075329,"TERMINAL",0,0,"2025-07-16 17:04:02.828972: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.829509: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.829574: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.830177: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.831877: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-16 17:04:02.831910: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3661,8150784,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3662,8150785,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3138,0,"",python,selection_mouse +3663,8151515,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,0,"",python,selection_mouse +3664,8151714,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,1," ",python,selection_mouse +3665,8151857,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3115,50," ce_loss = (mask * ce_loss).sum() / mask.sum()\n",python,selection_mouse +3666,8152608,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,0,"",python,selection_mouse +3667,8152609,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3128,1," ",python,selection_mouse +3668,8152791,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3115,50," ce_loss = (mask * ce_loss).sum() / mask.sum()\n",python,selection_mouse +3669,8206543,"TERMINAL",0,0,"Entering jdb:\r\n",,terminal_output +3670,8210873,"TERMINAL",0,0,"l",,terminal_output +3671,8211102,"TERMINAL",0,0,"\r\n(jdb) > /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py(100)\r\n )\r\n mask = outputs[""mask""]\r\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\r\n logits = outputs[""token_logits""]\r\n targets = outputs[""video_tokens""]\r\n \r\n-> jax.debug.breakpoint()\r\n if not args.use_maskgit:\r\n logits = outputs[""token_logits""][:, :-1]\r\n targets = outputs[""video_tokens""][:, 1:]\r\n mask = outputs[""mask""][:, 1:] \r\n \r\n",,terminal_output +3672,8216066,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3673,8216067,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2623,0,"",python,selection_mouse +3674,8216139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2622,0,"",python,selection_command +3675,8229067,"TERMINAL",0,0,"m",,terminal_output +3676,8229208,"TERMINAL",0,0,"as",,terminal_output +3677,8229327,"TERMINAL",0,0,"k",,terminal_output +3678,8229568,"TERMINAL",0,0,".",,terminal_output +3679,8229756,"TERMINAL",0,0,"s",,terminal_output +3680,8229871,"TERMINAL",0,0,"h",,terminal_output +3681,8230027,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3682,8230865,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3683,8230926,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3684,8231107,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3685,8231167,"TERMINAL",0,0,"\r\n(jdb) (96, 16, 920)\r\n",,terminal_output +3686,8256161,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +3687,8256343,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +3688,8256712,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +3689,8261491,"TERMINAL",0,0,"   ",,terminal_output +3690,8286699,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3691,8286943,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3692,8287186,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3693,8287356,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3694,8287579,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3695,8287785,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3696,8287946,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +3697,8289616,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3698,8289774,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3699,8289834,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3700,8290006,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3701,8290136,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3702,8290381,"TERMINAL",0,0,"\r\n(jdb) (96, 16, 920, 1024)\r\n",,terminal_output +3703,8294920,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3704,8294978,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3705,8295177,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3706,8295241,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3707,8295442,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3708,8295549,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3709,8295764,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3710,8295883,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +3711,8296198,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3712,8296260,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3713,8296407,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3714,8296476,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3715,8296579,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3716,8296879,"TERMINAL",0,0,"\r\n(jdb) (96, 16, 920)\r\n",,terminal_output +3717,8304802,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3718,8304803,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2900,0,"",python,selection_mouse +3719,8306487,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2900,0," ",python,content +3720,8306489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2901,0,"",python,selection_keyboard +3721,8306665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2901,0,":",python,content +3722,8306666,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2902,0,"",python,selection_keyboard +3723,8307271,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2902,0,"m",python,content +3724,8307272,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2903,0,"",python,selection_keyboard +3725,8307988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2902,1,"",python,content +3726,8308312,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2902,0,",",python,content +3727,8308313,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2903,0,"",python,selection_keyboard +3728,8309540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2953,0,"",python,selection_mouse +3729,8310699,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2953,0," ",python,content +3730,8310700,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2954,0,"",python,selection_keyboard +3731,8311117,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2954,0,":",python,content +3732,8311118,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2955,0,"",python,selection_keyboard +3733,8311981,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2955,0,",",python,content +3734,8311982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2956,0,"",python,selection_keyboard +3735,8313445,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2960,0,"",python,selection_mouse +3736,8314728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2995,0,"",python,selection_mouse +3737,8315649,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2995,0,":",python,content +3738,8315650,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2996,0,"",python,selection_keyboard +3739,8315913,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2996,0,",",python,content +3740,8315914,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2997,0,"",python,selection_keyboard +3741,8316321,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2997,0," ",python,content +3742,8316322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2998,0,"",python,selection_keyboard +3743,8316841,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2997,0,"",python,selection_command +3744,8317882,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2955,0,"",python,selection_mouse +3745,8318506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3003,0,"",python,selection_mouse +3746,8452719,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3747,8452865,"TERMINAL",0,0,"\r\n",,terminal_output +3748,8456741,"TERMINAL",0,0,"(jdb) Entering jdb:\r\n",,terminal_output +3749,8463337,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3750,8463569,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3751,8463748,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3752,8463854,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3753,8464148,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +3754,8464313,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3755,8464376,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3756,8464630,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3757,8464683,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3758,8464744,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3759,8464966,"TERMINAL",0,0,"\r\n(jdb) (96, 15, 920)\r\n",,terminal_output +3760,8465973,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3761,8466180,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3762,8466240,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3763,8466518,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3764,8466623,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3765,8467348,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3766,8467418,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3767,8467632,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3768,8467722,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +3769,8467942,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3770,8468645,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3771,8468753,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3772,8468937,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3773,8469044,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3774,8469182,"TERMINAL",0,0,"\r\n(jdb) (96, 15, 920, 1024)\r\n",,terminal_output +3775,8471354,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3776,8471459,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3777,8472168,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3778,8472228,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3779,8472442,"TERMINAL",0,0,"[?25le[?25h[?25lt[?25h",,terminal_output +3780,8472732,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3781,8472918,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +3782,8473289,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3783,8473509,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3784,8473667,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3785,8473781,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3786,8473843,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3787,8474055,"TERMINAL",0,0,"\r\n(jdb) (96, 15, 920)\r\n",,terminal_output +3788,8518182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3789,8518183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2816,0,"",python,selection_mouse +3790,8519647,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2801,27,"",python,content +3791,8519669,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2805,0,"",python,selection_command +3792,8519888,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2834,0,"",python,selection_command +3793,8520188,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2886,0,"",python,selection_command +3794,8520329,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2938,0,"",python,selection_command +3795,8520462,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2976,0,"",python,selection_command +3796,8520898,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2976,28,"",python,content +3797,8520938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2980,0,"",python,selection_command +3798,8523400,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3350418.9 tasks 0-7: running\r\n",,terminal_output +3799,8523980,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.9\r\n(jdb) --KeyboardInterrupt--\r\nEntering jdb:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-8:\r\nsrun: forcing job termination\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-1:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3350418.9 ON hkn0710 CANCELLED AT 2025-07-16T17:11:31 ***\r\n",,terminal_output +3800,8524105,"TERMINAL",0,0,"(jdb) ",,terminal_output +3801,8524208,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3350418.9\r\nsrun: job abort in progress\r\n",,terminal_output +3802,8524779,"TERMINAL",0,0,"^C",,terminal_output +3803,8525434,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3804,8526129,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3805,8526789,"TERMINAL",0,0,"\r",,terminal_output +3806,8527042,"TERMINAL",0,0,"",,terminal_output +3807,8578230,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +3808,8578389,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3809,8578513,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +3810,8578650,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3811,8578775,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 17:12:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 17:12:04\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 22:33:46\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 22:33:46\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 22:33:46\t 2 hkn[0604,0608]3350418 accelerat interact tum_cte0 R 2:22:40\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-23:17:31\t 2 hkn[0503,0506]",,terminal_output +3812,8579797,"TERMINAL",0,0,"7577712",,terminal_output +3813,8580821,"TERMINAL",0,0,"8688823",,terminal_output +3814,8581797,"TERMINAL",0,0,"9799934",,terminal_output +3815,8582815,"TERMINAL",0,0,"30850505045",,terminal_output +3816,8583892,"TERMINAL",0,0,"1911156",,terminal_output +3817,8584918,"TERMINAL",0,0,"21022267",,terminal_output +3818,8585941,"TERMINAL",0,0,"3133378",,terminal_output +3819,8586967,"TERMINAL",0,0,"4244489",,terminal_output +3820,8587990,"TERMINAL",0,0,"53555940",,terminal_output +3821,8589027,"TERMINAL",0,0,"64666501",,terminal_output +3822,8589935,"TERMINAL",0,0,"7577712",,terminal_output +3823,8590954,"TERMINAL",0,0,"8688823",,terminal_output +3824,8591995,"TERMINAL",0,0,"9799934",,terminal_output +3825,8592989,"TERMINAL",0,0,"4084:004:004:0045",,terminal_output +3826,8594030,"TERMINAL",0,0,"1911156",,terminal_output +3827,8594537,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3828,8604525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3829,8604526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1916,0,"",python,selection_mouse +3830,8604599,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1915,0,"",python,selection_command +3831,8605096,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1858,0,"",python,selection_mouse +3832,8605254,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1856,3,"jnp",python,selection_mouse +3833,8605857,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1788,0,"",python,selection_mouse +3834,8606006,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3835,8606160,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1774,30," use_maskgit: bool = False\n",python,selection_mouse +3836,8606849,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1782,0,"",python,selection_mouse +3837,8606850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3838,8607717,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1803,0,"",python,selection_mouse +3839,8607734,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,0,"",python,selection_command +3840,8608085,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,1,"e",python,selection_mouse +3841,8608086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1800,2,"ls",python,selection_mouse +3842,8608086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1766,36,"t = 0.5\n use_maskgit: bool = Fals",python,selection_mouse +3843,8608086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1763,39,"loat = 0.5\n use_maskgit: bool = Fals",python,selection_mouse +3844,8608086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1759,43,"t: float = 0.5\n use_maskgit: bool = Fals",python,selection_mouse +3845,8608110,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1803,0,"",python,selection_command +3846,8608110,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1755,48,"limit: float = 0.5\n use_maskgit: bool = False",python,selection_mouse +3847,8608150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1780,23,"e_maskgit: bool = False",python,selection_mouse +3848,8608173,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,25,"use_maskgit: bool = False",python,selection_mouse +3849,8608198,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1777,26," use_maskgit: bool = False",python,selection_mouse +3850,8608554,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,25,"use_maskgit: bool = False",python,selection_mouse +3851,8608913,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,0,"",python,selection_mouse +3852,8608913,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3853,8609140,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,41,"use_maskgit: bool = False\n param_dtype",python,selection_mouse +3854,8609141,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,46,"use_maskgit: bool = False\n param_dtype: jnp",python,selection_mouse +3855,8609180,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,47,"use_maskgit: bool = False\n param_dtype: jnp.",python,selection_mouse +3856,8609180,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,52,"use_maskgit: bool = False\n param_dtype: jnp.dtype",python,selection_mouse +3857,8609207,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,54,"use_maskgit: bool = False\n param_dtype: jnp.dtype =",python,selection_mouse +3858,8609232,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,58,"use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp",python,selection_mouse +3859,8609255,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,59,"use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.",python,selection_mouse +3860,8609297,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,66,"use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32",python,selection_mouse +3861,8609523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,59,"use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.",python,selection_mouse +3862,8609581,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,25,"use_maskgit: bool = False",python,selection_mouse +3863,8609581,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1773,16,"\n use_maskgit",python,selection_mouse +3864,8609827,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1773,0,"",python,selection_mouse +3865,8609833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1772,0,"",python,selection_command +3866,8610426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1803,0,"",python,selection_mouse +3867,8610429,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1802,0,"",python,selection_command +3868,8610890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,1,"s",python,selection_mouse +3869,8610890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1796,6,"= Fals",python,selection_mouse +3870,8610890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1791,11,"bool = Fals",python,selection_mouse +3871,8610891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1788,14,"t: bool = Fals",python,selection_mouse +3872,8610891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1786,16,"git: bool = Fals",python,selection_mouse +3873,8610891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1785,17,"kgit: bool = Fals",python,selection_mouse +3874,8610891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1784,18,"skgit: bool = Fals",python,selection_mouse +3875,8610891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,19,"askgit: bool = Fals",python,selection_mouse +3876,8610916,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1801,2,"se",python,selection_command +3877,8610917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,20,"askgit: bool = False",python,selection_mouse +3878,8611460,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,0,"",python,selection_mouse +3879,8611461,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3880,8612202,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,0,"",python,selection_mouse +3881,8612203,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3882,8612381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1774,30," use_maskgit: bool = False\n",python,selection_mouse +3883,8612957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1783,0,"",python,selection_mouse +3884,8612958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3885,8718803,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +3886,8718863,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3887,8719002,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3888,8719063,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3889,8719123,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3890,8719279,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 17:14:46 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 17:14:24\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 22:36:06\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 22:36:06\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 22:36:06\t 2 hkn[0604,0608]3350418 accelerat interact tum_cte0 R 2:25:00\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-23:19:51\t 2 hkn[0503,0506]",,terminal_output +3891,8720292,"TERMINAL",0,0,"7577712",,terminal_output +3892,8721315,"TERMINAL",0,0,"8688823",,terminal_output +3893,8722339,"TERMINAL",0,0,"9799934",,terminal_output +3894,8723365,"TERMINAL",0,0,"50810101045",,terminal_output +3895,8723677,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3896,8758400,"TERMINAL",0,0,"use_maskgit",,terminal_output +3897,8760697,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3898,8761129,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3899,8761190,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3900,8761251,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3901,8761371,"TERMINAL",0,0,"data generation_1752502813.7130806.gif generation_1752579794.2949483.gif gifs read_tf_record.py tests\r\ndebug generation_1752503689.8298378.gif generation_1752579931.2817705.gif input_pipeline requirements-franz.txt train_dynamics.py\r\nframe-knoms.png generation_1752504934.1629438.gif generation_1752580458.8344245.gif LICENSE requirements.txt train_lam.py\r\nframe.png generation_1752505829.3945305.gif generation_1752580934.2848504.gif local-logs sample.py train_tokenizer_bak.py\r\nframes generation_1752513109.1235461.gif generation_1752581091.8428152.gif logs sample.py_bak train_tokenizer.py\r\ngenerate_dataset.py generation_1752513384.5762262.gif generation_1752581503.520897.gif models scripts_cremers utils\r\ngeneration_1752489078.1856709.gif generation_1752513923.7489405.gif generation_1752581641.3452077.gif overfit_dir scripts_horeka wandb\r\ngeneration_1752489445.163335.gif generation_1752579157.0310874.gif generation_1752588193.6372015.gif __pycache__ slurm weekend-job-requeuer.sh\r\ngeneration_1752501077.2698705.gif generation_1752579372.4300406.gif genie.py README.md slurm-3309772.out weekend-job-starter.sh\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +3902,9025225,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3903,9025226,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1784,0,"",python,selection_mouse +3904,9025299,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3905,9025391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1774,30," use_maskgit: bool = False\n",python,selection_mouse +3906,9026192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1784,0,"",python,selection_mouse +3907,9026193,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",1778,11,"use_maskgit",python,selection_mouse +3908,9107585,"models/dynamics.py",0,0,"",python,tab +3909,9116116,"utils/nn.py",0,0,"",python,tab +3910,9229508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"",python,tab +3911,9235771,"models/dynamics.py",0,0,"",python,tab +3912,9240650,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +3913,9262594,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3465,0,"",python,selection_mouse +3914,9265890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3199,0,"",python,selection_mouse +3915,9265891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3198,0,"",python,selection_command +3916,9266068,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3199,0,"",python,selection_mouse +3917,9266083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3198,0,"",python,selection_command +3918,9266259,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3194,4,"sum(",python,selection_mouse +3919,9266260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3189,9,"mask.sum(",python,selection_mouse +3920,9266260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3187,11,"/ mask.sum(",python,selection_mouse +3921,9266260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3181,17,"sum() / mask.sum(",python,selection_mouse +3922,9266260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3179,19,").sum() / mask.sum(",python,selection_mouse +3923,9266260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3176,22,"acc).sum() / mask.sum(",python,selection_mouse +3924,9266261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3174,24,"* acc).sum() / mask.sum(",python,selection_mouse +3925,9266289,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3194,5,"sum()",python,selection_command +3926,9266289,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,30,"mask * acc).sum() / mask.sum()",python,selection_mouse +3927,9266398,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3168,31,"(mask * acc).sum() / mask.sum()",python,selection_mouse +3928,9266854,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,0,"",python,selection_mouse +3929,9266974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,4,"mask",python,selection_mouse +3930,9267157,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,6,"mask *",python,selection_mouse +3931,9267157,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,10,"mask * acc",python,selection_mouse +3932,9267158,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,53,"mask * acc).sum() / mask.sum()\n select_probs = jax",python,selection_mouse +3933,9267182,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,54,"mask * acc).sum() / mask.sum()\n select_probs = jax.",python,selection_mouse +3934,9267211,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,57,"mask * acc).sum() / mask.sum()\n select_probs = jax.nn.",python,selection_mouse +3935,9267237,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,64,"mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax",python,selection_mouse +3936,9267352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,65,"mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(",python,selection_mouse +3937,9267361,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,71,"mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits",python,selection_mouse +3938,9267444,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,72,"mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)",python,selection_mouse +3939,9267769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,30,"mask * acc).sum() / mask.sum()",python,selection_mouse +3940,9268739,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3199,0,"",python,selection_mouse +3941,9268745,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3198,0,"",python,selection_command +3942,9268927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3198,1,")",python,selection_mouse +3943,9268928,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3195,3,"um(",python,selection_mouse +3944,9268928,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3188,10," mask.sum(",python,selection_mouse +3945,9268928,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3146,52," == targets\n acc = (mask * acc).sum() / mask.sum(",python,selection_mouse +3946,9268929,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3199,0,"",python,selection_command +3947,9268960,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3140,59,"ax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()",python,selection_mouse +3948,9268987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3137,62,"rgmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()",python,selection_mouse +3949,9269015,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3135,64,".argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()",python,selection_mouse +3950,9269042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3173,26," * acc).sum() / mask.sum()",python,selection_mouse +3951,9269078,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3172,27,"k * acc).sum() / mask.sum()",python,selection_mouse +3952,9269143,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3171,28,"sk * acc).sum() / mask.sum()",python,selection_mouse +3953,9269225,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3170,29,"ask * acc).sum() / mask.sum()",python,selection_mouse +3954,9269290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3169,30,"mask * acc).sum() / mask.sum()",python,selection_mouse +3955,9269347,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3168,31,"(mask * acc).sum() / mask.sum()",python,selection_mouse +3956,9269787,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3168,0,"",python,selection_mouse +3957,9270115,"models/dynamics.py",0,0,"",python,tab +3958,9270116,"models/dynamics.py",1036,0,"",python,selection_mouse +3959,9270140,"models/dynamics.py",1035,0,"",python,selection_command +3960,9272339,"TERMINAL",0,0,"ls",,terminal_output +3961,9272734,"TERMINAL",0,0,"queue",,terminal_output +3962,9273353,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/tester.sh",,terminal_output +3963,9274094,"TERMINAL",0,0,"\rqueue",,terminal_output +3964,9274489,"TERMINAL",0,0,"ls",,terminal_output +3965,9274680,"TERMINAL",0,0,"",,terminal_output +3966,9274844,"TERMINAL",0,0,"",,terminal_output +3967,9275057,"TERMINAL",0,0,"",,terminal_output +3968,9378871,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3969,9379067,"TERMINAL",0,0,"[?25lu[?25h[?25ln[?25h",,terminal_output +3970,9379270,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3971,9379341,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3972,9379402,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3973,9379549,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0710 jafar_jobs]$ ",,terminal_output +3974,9380037,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3975,9380465,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +3976,9380991,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3977,9381200,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +3978,9381450,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3979,9382356,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3980,9382416,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3981,9383350,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +3982,9383563,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3983,9383998,"TERMINAL",0,0,"[?25lu[?25h[?25ln[?25h",,terminal_output +3984,9384174,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3985,9384281,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3986,9384341,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3987,9384506,"TERMINAL",0,0,"\r\n[?2004l\rsending incremental file list\r\n",,terminal_output +3988,9386927,"TERMINAL",0,0,"./\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\n",,terminal_output +3989,9388620,"TERMINAL",0,0,"input_pipeline/\r\ninput_pipeline/download/\r\ninput_pipeline/download/download_array_records.sh\r\ninput_pipeline/download/openai/\r\ninput_pipeline/download/openai/download_index_files.sh\r\ninput_pipeline/download/openai/download_videos.py\r\ninput_pipeline/preprocess/\r\ninput_pipeline/preprocess/npy_to_tfrecord.py\r\ninput_pipeline/preprocess/video_to_array_records.py\r\ninput_pipeline/preprocess/video_to_npy.py\r\nmodels/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nslurm/jobs/mihir/horeka/yolo-runs/tester.sh\r\nslurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch\r\nutils/\r\nutils/lr_utils.py\r\nutils/nn.py\r\nutils/parameter_utils.py\r\n",,terminal_output +3990,9388768,"TERMINAL",0,0,"\r\nsent 121,176 bytes received 512 bytes 22,125.09 bytes/sec\r\ntotal size is 141,970,947 speedup is 1,166.68\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0710 jafar_jobs]$ ",,terminal_output +3991,9409618,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --job-name=train_dynamics_modelsize_scaling_36M_2_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,tab +3992,9411070,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1536,0,"",shellscript,selection_mouse +3993,9411071,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1535,0,"",shellscript,selection_command +3994,9411382,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1536,0,"",shellscript,selection_mouse +3995,9411383,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1535,0,"",shellscript,selection_command +3996,9412354,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1621,0,"",shellscript,selection_mouse +3997,9412355,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1620,0,"",shellscript,selection_command +3998,9412523,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1621,0,"",shellscript,selection_mouse +3999,9412523,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1620,0,"",shellscript,selection_command +4000,9412716,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1620,1,"\",shellscript,selection_mouse +4001,9412717,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1585,35,"\n --data_dir $array_records_dir ",shellscript,selection_mouse +4002,9412763,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1621,0,"",shellscript,selection_command +4003,9412763,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1585,36,"\n --data_dir $array_records_dir \",shellscript,selection_mouse +4004,9412764,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1536,85,"\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4005,9412878,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1514,107,"\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4006,9412878,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1483,138,"36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4007,9412879,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1475,146,"scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4008,9412879,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1417,204,"scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4009,9412879,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1380,241,"1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4010,9412906,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1379,242,"=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4011,9412932,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1349,272,"\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4012,9412957,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1312,309,"log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4013,9412984,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1303,318," \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4014,9413013,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1283,338,"\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4015,9413045,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1251,370,"batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4016,9413077,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1218,403,"ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \",shellscript,selection_mouse +4017,9413499,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1219,0,"",shellscript,selection_mouse +4018,9413499,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1218,8,"ckpt_dir",shellscript,selection_mouse +4019,9413659,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,33," --ckpt_dir $CHECKPOINT_DIR \\n",shellscript,selection_mouse +4020,9413870,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,55," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n",shellscript,selection_mouse +4021,9413870,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,94," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n",shellscript,selection_mouse +4022,9413871,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,138," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n",shellscript,selection_mouse +4023,9413911,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,277," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n",shellscript,selection_mouse +4024,9413912,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,374," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n",shellscript,selection_mouse +4025,9413938,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1212,410," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4026,9414179,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1622,0,"",shellscript,selection_mouse +4027,9415014,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1621,1,"\n",shellscript,selection_mouse +4028,9415014,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1480,142,"ng 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4029,9415015,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1332,290,"000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4030,9415015,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1232,390,"KPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4031,9415016,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1210,412,"\\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4032,9415017,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1161,461,"\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4033,9415017,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1143,479,"\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4034,9415017,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",972,650,"CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4035,9415018,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",821,801,"\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4036,9415051,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1621,1,"\n",shellscript,selection_command +4037,9415051,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",655,967,"ords_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4038,9415091,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",555,1067,"\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4039,9415139,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",472,1150,"ob-name=train_dynamics_modelsize_scaling_36M_2_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,selection_mouse +4040,9415482,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",472,0,"",shellscript,selection_mouse +4041,9415969,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",783,0,"",shellscript,selection_mouse +4042,9416626,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1349,0,"",shellscript,selection_mouse +4043,9416636,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1348,0,"",shellscript,selection_command +4044,9426142,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_yolorun\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n# tf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,tab +4045,9429061,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4046,9429780,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",1036,0,"",shellscript,selection_mouse +4047,9430024,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",1034,2,"\n ",shellscript,selection_mouse +4048,9430024,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",901,135," \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4049,9430025,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",792,244,"\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4050,9430025,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",736,300,"ax_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4051,9430092,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",711,325,"--batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4052,9430092,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",709,327," --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4053,9430093,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",708,328," --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4054,9430093,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",707,329," --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4055,9430093,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",1034,2,"\n ",shellscript,selection_command +4056,9430151,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",707,329," --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4057,9430270,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",674,362," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4058,9430379,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",605,431,"XLA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4059,9430731,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",606,430,"LA_FLAGS=--xla_gpu_autotune_level=0 srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4060,9430732,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",677,359," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4061,9430732,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",679,357,"-ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4062,9430732,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",682,354,"pt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4063,9430732,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",686,350,"ir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4064,9430733,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",689,347,"$CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4065,9430733,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",692,344,"ECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4066,9430733,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",696,340,"OINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4067,9430733,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",701,335,"DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4068,9430734,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",704,332," \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4069,9430734,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",706,330,"\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4070,9431119,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",643,393,"run python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4071,9431120,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",642,394,"srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4072,9435642,"utils/nn.py",0,0,"",python,tab +4073,9436628,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4074,9438072,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4075,9438968,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1560,0,"",shellscript,selection_mouse +4076,9439002,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1559,0,"",shellscript,selection_command +4077,9439286,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1559,1," ",shellscript,selection_mouse +4078,9439287,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1558,1,"\n",shellscript,selection_mouse +4079,9439287,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1513,46,"_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4080,9439287,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1427,132,"\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4081,9439288,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1307,252,"\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4082,9439288,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1284,275,"rval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4083,9439288,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1260,299,"4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4084,9439288,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1228,331,"_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4085,9439289,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1226,333,"ip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4086,9439289,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1208,351,"=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,selection_mouse +4087,9439320,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1560,0,"",shellscript,selection_command +4088,9439321,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1186,374,"size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4089,9439362,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1152,408,"dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4090,9439480,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1151,409,"_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4091,9439506,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1141,419," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4092,9439529,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1115,445,"ython train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4093,9439551,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1112,448,"n python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4094,9439577,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1109,451,"srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --grad_clip_threshold=10 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-grad-norm-test-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,selection_mouse +4095,9440826,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1109,451,"",shellscript,content +4096,9441266,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1109,0,"srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-debug-run-$slurm_job_id \\n --tags dynamics debug \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,content +4097,9443972,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4098,9445581,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4099,9446746,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4100,9448772,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4101,9451970,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4102,9453025,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",586,0,"",shellscript,selection_mouse +4103,9453403,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",439,147,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4104,9453467,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",438,148,"\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4105,9453532,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",413,173,"mkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4106,9453634,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",354,232,"CHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4107,9456279,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4108,9457664,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",955,0,"",shellscript,selection_mouse +4109,9457930,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",954,1,"k",shellscript,selection_mouse +4110,9457931,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",953,2,"ck",shellscript,selection_mouse +4111,9457931,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",942,13,"\ntokenizer_ck",shellscript,selection_mouse +4112,9457997,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",921,34,"r -p $CHECKPOINT_DIR\n\ntokenizer_ck",shellscript,selection_mouse +4113,9458022,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",919,36,"dir -p $CHECKPOINT_DIR\n\ntokenizer_ck",shellscript,selection_mouse +4114,9458022,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",807,148,"ECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ck",shellscript,selection_mouse +4115,9458041,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",806,149,"HECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ck",shellscript,selection_mouse +4116,9458080,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,151,"\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ck",shellscript,selection_mouse +4117,9459665,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1090,0,"",shellscript,selection_mouse +4118,9459886,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",947,143,"nizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4119,9459887,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",946,144,"enizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4120,9459887,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",942,148,"\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4121,9459941,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",917,173,"mkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4122,9459956,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",805,285,"CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4123,9460006,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,286,"\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +4124,9461324,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,286,"x",shellscript,content +4125,9462308,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,1,"",shellscript,content +4126,9462698,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,0,"CHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,content +4127,9463938,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",889,0,"",shellscript,selection_command +4128,9464125,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",888,0,"",shellscript,selection_command +4129,9464306,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",863,0,"",shellscript,selection_command +4130,9464476,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,0,"",shellscript,selection_command +4131,9464675,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",804,0,"\n",shellscript,content +4132,9466728,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",605,0,"",shellscript,selection_mouse +4133,9467200,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",726,0,"",shellscript,selection_mouse +4134,9468486,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",646,105,"",shellscript,content +4135,9470401,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",82,0,"",shellscript,selection_mouse +4136,9471452,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",82,2,"",shellscript,content +4137,9473179,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",519,0,"",shellscript,selection_mouse +4138,9473557,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",519,0,"4",shellscript,content +4139,9473558,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",520,0,"",shellscript,selection_keyboard +4140,9473705,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",520,0,"8",shellscript,content +4141,9473705,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",521,0,"",shellscript,selection_keyboard +4142,9474729,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",520,1,"",shellscript,content +4143,9474850,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",519,1,"",shellscript,content +4144,9476705,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",82,0,"",shellscript,selection_mouse +4145,9477878,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",82,0,"4",shellscript,content +4146,9477880,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",83,0,"",shellscript,selection_keyboard +4147,9478324,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",83,0,"8",shellscript,content +4148,9478325,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",84,0,"",shellscript,selection_keyboard +4149,9478728,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",83,0,"",shellscript,selection_command +4150,9480611,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",298,0,"",shellscript,selection_mouse +4151,9481646,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",546,0,"",shellscript,selection_mouse +4152,9483071,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",780,0,"",shellscript,selection_mouse +4153,9483716,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",404,0,"",shellscript,selection_mouse +4154,9484202,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",399,0,"",shellscript,selection_mouse +4155,9484457,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",403,0,"",shellscript,selection_command +4156,9485197,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",403,0,"_",shellscript,content +4157,9485198,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",404,0,"",shellscript,selection_keyboard +4158,9485490,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",404,0,"n",shellscript,content +4159,9485491,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",405,0,"",shellscript,selection_keyboard +4160,9485562,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",405,0,"e",shellscript,content +4161,9485562,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",406,0,"",shellscript,selection_keyboard +4162,9485678,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",406,0,"w",shellscript,content +4163,9485679,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",407,0,"",shellscript,selection_keyboard +4164,9485996,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",407,0,"_",shellscript,content +4165,9485997,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",408,0,"",shellscript,selection_keyboard +4166,9486198,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",408,0,"a",shellscript,content +4167,9486198,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",409,0,"",shellscript,selection_keyboard +4168,9486770,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",409,0,"t",shellscript,content +4169,9486771,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",410,0,"",shellscript,selection_keyboard +4170,9486973,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",410,0,"c",shellscript,content +4171,9486974,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,0,"",shellscript,selection_keyboard +4172,9487077,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,0,"h",shellscript,content +4173,9487078,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",412,0,"",shellscript,selection_keyboard +4174,9487590,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,1,"",shellscript,content +4175,9487688,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",410,1,"",shellscript,content +4176,9487812,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",409,1,"",shellscript,content +4177,9487922,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",409,0,"r",shellscript,content +4178,9487923,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",410,0,"",shellscript,selection_keyboard +4179,9488062,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",410,0,"c",shellscript,content +4180,9488063,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,0,"",shellscript,selection_keyboard +4181,9488184,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,0,"h",shellscript,content +4182,9488185,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",412,0,"",shellscript,selection_keyboard +4183,9488862,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",411,0,"",shellscript,selection_command +4184,9491067,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1161,0,"",shellscript,selection_mouse +4185,9491568,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1167,0,"",shellscript,selection_mouse +4186,9492366,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1168,0,"",shellscript,selection_command +4187,9492876,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1168,5,"",shellscript,content +4188,9493747,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1168,0,"n",shellscript,content +4189,9493747,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1169,0,"",shellscript,selection_keyboard +4190,9493859,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1169,0,"e",shellscript,content +4191,9493860,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1170,0,"",shellscript,selection_keyboard +4192,9494018,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1170,0,"w",shellscript,content +4193,9494019,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1171,0,"",shellscript,selection_keyboard +4194,9494378,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1171,0,"-",shellscript,content +4195,9494378,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1172,0,"",shellscript,selection_keyboard +4196,9494647,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1172,0,"a",shellscript,content +4197,9494648,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1173,0,"",shellscript,selection_keyboard +4198,9494849,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1173,0,"r",shellscript,content +4199,9494850,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1174,0,"",shellscript,selection_keyboard +4200,9495009,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1174,0,"c",shellscript,content +4201,9495009,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1175,0,"",shellscript,selection_keyboard +4202,9495115,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1175,0,"h",shellscript,content +4203,9495116,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1176,0,"",shellscript,selection_keyboard +4204,9496366,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1218,0,"",shellscript,selection_mouse +4205,9497007,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1217,0,"",shellscript,selection_mouse +4206,9497317,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1217,1,"d",shellscript,selection_mouse +4207,9497317,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1217,2,"de",shellscript,selection_mouse +4208,9497591,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1219,0,"",shellscript,selection_mouse +4209,9497592,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1217,5,"debug",shellscript,selection_mouse +4210,9497991,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1217,5,"",shellscript,content +4211,9498406,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1216,1,"",shellscript,content +4212,9498979,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1215,0,"",shellscript,selection_command +4213,9499503,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1147,0,"",shellscript,selection_mouse +4214,9499521,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1146,0,"",shellscript,selection_command +4215,9500188,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",991,0,"",shellscript,selection_mouse +4216,9500191,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",990,0,"",shellscript,selection_command +4217,9579545,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",369,0,"",shellscript,selection_mouse +4218,9582149,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",515,0,"",shellscript,selection_mouse +4219,9583183,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",506,0,"",shellscript,selection_command +4220,9583587,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",506,0,"#",shellscript,content +4221,9583588,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",507,0,"",shellscript,selection_keyboard +4222,9583812,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",507,0," ",shellscript,content +4223,9583813,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",508,0,"",shellscript,selection_keyboard +4224,9584342,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",507,0,"",shellscript,selection_command +4225,9595326,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4226,9595661,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4227,9596123,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +4228,9596228,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4229,9596877,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4230,9596948,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4231,9597126,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4232,9597269,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4233,9597537,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4234,9597716,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4235,9597778,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4236,9597839,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4237,9598261,"TERMINAL",0,0,"\r\n[?2004l\rsending incremental file list\r\nslurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch\r\n",,terminal_output +4238,9598330,"TERMINAL",0,0,"\r\nsent 22,069 bytes received 137 bytes 44,412.00 bytes/sec\r\ntotal size is 141,970,740 speedup is 6,393.35\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0710 jafar_jobs]$ ",,terminal_output +4239,9600738,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4240,9600800,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4241,9600943,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4242,9601004,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4243,9601134,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4244,9601196,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4245,9601361,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4246,9601604,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",,terminal_output +4247,9602190,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch\r\n[?2004l\rSubmitted batch job 3351743\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0710 jafar_jobs]$ ",,terminal_output +4248,9606772,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4249,9608169,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4250,9610529,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4251,9633098,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4252,9633158,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4253,9633360,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4254,9633422,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4255,9633572,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4256,9634053,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +4257,9634194,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +4258,9634297,"TERMINAL",0,0,"afar",,terminal_output +4259,9634479,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4260,9634967,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4261,9635454,"TERMINAL",0,0,"[?25ll[?25h[?25ls[?25h",,terminal_output +4262,9635578,"TERMINAL",0,0,"\r\n[?2004l\rdata generation_1752502813.7130806.gif generation_1752579794.2949483.gif gifs read_tf_record.py tests\r\ndebug generation_1752503689.8298378.gif generation_1752579931.2817705.gif input_pipeline requirements-franz.txt train_dynamics.py\r\nframe-knoms.png generation_1752504934.1629438.gif generation_1752580458.8344245.gif LICENSE requirements.txt train_lam.py\r\nframe.png generation_1752505829.3945305.gif generation_1752580934.2848504.gif local-logs sample.py train_tokenizer_bak.py\r\nframes generation_1752513109.1235461.gif generation_1752581091.8428152.gif logs sample.py_bak train_tokenizer.py\r\ngenerate_dataset.py generation_1752513384.5762262.gif generation_1752581503.520897.gif models scripts_cremers utils\r\ngeneration_1752489078.1856709.gif generation_1752513923.7489405.gif generation_1752581641.3452077.gif overfit_dir scripts_horeka wandb\r\ngeneration_1752489445.163335.gif generation_1752579157.0310874.gif generation_1752588193.6372015.gif __pycache__ slurm weekend-job-requeuer.sh\r\ngeneration_1752501077.2698705.gif generation_1752579372.4300406.gif genie.py README.md slurm-3309772.out weekend-job-starter.sh\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4263,9641134,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +4264,9641229,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4265,9641335,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4266,9641398,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4267,9641503,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4268,9641629,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 17:30:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3351743 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3348592 accelerat train_dy tum_cte0 R 17:29:47\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 22:51:29\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 22:51:29\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 22:51:29\t 2 hkn[0604,0608]3350418 accelerat interact tum_cte0 R 2:40:23\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-23:35:14\t 2 hkn[0503,0506]",,terminal_output +4269,9642625,"TERMINAL",0,0,"10830303045",,terminal_output +4270,9642755,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4271,9643051,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4272,9643265,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4273,9643380,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4274,9643678,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4275,9643732,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4276,9643837,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4277,9644034,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn0710.localdomain: Wed Jul 16 17:30:11 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly:\t 7 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 17 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +4278,9645079,"TERMINAL",0,0,"2",,terminal_output +4279,9645411,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4280,9645777,"TERMINAL",0,0,"idling",,terminal_output +4281,9645924,"TERMINAL",0,0,"queue",,terminal_output +4282,9646474,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 17:30:13 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 17:29:51\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 22:51:33\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 22:51:33\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 22:51:33\t 2 hkn[0604,0608]3351743 accelerat train_dy tum_cte0 R\t0:00\t 2 hkn[0725-0726]3350418 accelerat interact tum_cte0 R 2:40:27\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-23:35:18\t 2 hkn[0503,0506]",,terminal_output +4283,9647538,"TERMINAL",0,0,"42444189",,terminal_output +4284,9648715,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4285,10074596,"slurm-3309772.out",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n2025-07-01 17:53:19.832287: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:53:24.082124: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:53:32.675517: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:53:40.082943: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:53:47.055162: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:53:52.971602: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\n warnings.warn(\n2025-07-01 17:54:08.811422: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:54:20.376835: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:54:33.047791: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:54:36.873159: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:54:48.467977: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:54:51.729733: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:06.062658: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:18.265202: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:21.670011: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:37.007236: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:49.095341: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:55:52.252054: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:56:09.335016: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:56:21.868202: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:56:25.207158: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:56:44.884640: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:56:57.561690: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:57:00.864374: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:57:22.461924: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:57:38.073265: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:58:01.234593: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:58:16.289428: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:58:41.028468: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:58:56.554326: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:59:22.428882: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 17:59:38.377644: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:00:08.007591: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:00:24.357203: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:00:54.448262: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:01:10.657357: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:02:03.449115: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:02:56.526631: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n2025-07-01 18:03:52.397056: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\n====================================================================================================\nFrame 1\n====================================================================================================\n====================================================================================================\nFrame 2\n====================================================================================================\n====================================================================================================\nFrame 3\n====================================================================================================\n====================================================================================================\nFrame 4\n====================================================================================================\n====================================================================================================\nFrame 5\n====================================================================================================\n====================================================================================================\nFrame 6\n====================================================================================================\n====================================================================================================\nFrame 7\n====================================================================================================\n====================================================================================================\nFrame 8\n====================================================================================================\n====================================================================================================\nFrame 9\n====================================================================================================\n====================================================================================================\nFrame 10\n====================================================================================================\n====================================================================================================\nFrame 11\n====================================================================================================\n====================================================================================================\nFrame 12\n====================================================================================================\n====================================================================================================\nFrame 13\n====================================================================================================\n====================================================================================================\nFrame 14\n====================================================================================================\n====================================================================================================\nFrame 15\n====================================================================================================\nSSIM: 0.13965484499931335\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3309772\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: COMPLETED (exit code 0)\nPartition: accelerated\nNodes: 1\nCores per node: 6\nNodelist: hkn0411\nCPU Utilized: 00:21:11\nCPU Efficiency: 29.06% of 01:12:54 core-walltime\nJob Wall-clock time: 00:12:09\nStarttime: Tue Jul 1 17:52:30 2025\nEndtime: Tue Jul 1 18:04:39 2025\nMemory Utilized: 3.47 GB\nMemory Efficiency: 6.93% of 50.00 GB\nEnergy Consumed: 745740 Joule / 207.15 Watthours\nAverage node power draw: 1022.96296296296 Watt\n",plaintext,tab +4286,10078893,"sample.py_bak",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\n# --- Get video + latent actions ---\n# tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n# ]\n# dataloader = get_dataloader(\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # args.image_height,\n # args.image_width,\n # args.image_channels,\n # seed=args.seed,\n# )\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",plaintext,tab +4287,10083063,"train_tokenizer_bak.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import count_parameters_by_component\nfrom utils.logger import CompositeLogger\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 0.0\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log_dir: str = ""logs/"" \n loggers: list[str] = field(default_factory=lambda: [""console""]) # options: console, local, tb, wandb\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if jax.process_index() == 0:\n cfg = vars(args).copy()\n cfg[""model_param_count""] = param_counts\n logger = CompositeLogger(args.loggers, cfg)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n array_record_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in dataloader) # type: ignore\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n step += 1\n\n # --- Logging ---\n if step % args.log_interval == 0 and jax.process_index() == 0:\n logger.log_metrics(\n {\n ""loss"": loss,\n **metrics,\n },\n step\n )\n if step % args.log_image_interval == 0:\n\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=np.asarray(gt_seq[0] * 255.).astype(np.uint8),\n recon=np.asarray(recon_seq[0] * 255.).astype(np.uint8),\n true_vs_recon=np.asarray(comparison_seq.astype(np.uint8)\n ),\n )\n logger.log_images(log_images, step)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +4288,10307011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"",python,tab +4289,10308844,"genie.py",0,0,"",python,tab +4290,10310527,"models/dynamics.py",0,0,"",python,tab +4291,10311803,"models/dynamics.py",1036,0,"",python,selection_mouse +4292,10311813,"models/dynamics.py",1035,0,"",python,selection_command +4293,10512633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",0,0,"",python,tab +4294,10567522,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",882,0,"",python,selection_mouse +4295,10567524,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",881,0,"",python,selection_command +4296,10568177,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",1030,0,"",python,selection_mouse +4297,10568192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",1029,0,"",python,selection_command +4298,10568738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",1143,0,"",python,selection_mouse +4299,10568744,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py",1142,0,"",python,selection_command +4300,10572509,"genie.py",0,0,"",python,tab +4301,10579206,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +4302,10579267,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4303,10579430,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4304,10579581,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4305,10579641,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4306,10579702,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4307,10579816,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4308,10579956,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4309,10580062,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4310,10580217,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4311,10580528,"TERMINAL",0,0,"On branch causal-transformer-dynamics-model\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4312,10583656,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4313,10583799,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4314,10583885,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4315,10583991,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4316,10584218,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4317,10584524,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4318,10584702,"TERMINAL",0,0,"[?25ld[?25h[?25l [?25h",,terminal_output +4319,10586227,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4320,10586288,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4321,10586483,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4322,10586598,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4323,10586726,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4324,10586840,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4325,10587302,"TERMINAL",0,0,"",,terminal_output +4326,10587431,"TERMINAL",0,0,"",,terminal_output +4327,10588173,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4328,10588234,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4329,10588296,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4330,10588356,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4331,10588549,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4332,10588605,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4333,10588832,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4334,10588949,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4335,10589124,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +4336,10589331,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +4337,10589469,"TERMINAL",0,0," add-wandb-name-and-tags\r\n* causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n preprocess_video\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4338,10593024,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +4339,10593690,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4340,10593847,"TERMINAL",0,0,"[?25l [?25h[?25lc[?25h",,terminal_output +4341,10593991,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4342,10594291,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4343,10594471,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4344,10594622,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4345,10594689,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4346,10594790,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4347,10595543,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4348,10595705,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4349,10595771,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4350,10595858,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4351,10597247,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4352,10598459,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4353,10598675,"TERMINAL",0,0,"[?25la[?25h[?25lm[?25h",,terminal_output +4354,10598735,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4355,10599025,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +4356,10599305,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4357,10599367,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4358,10599668,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +4359,10599855,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4360,10600064,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4361,10600581,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4362,10600744,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4363,10601002,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4364,10601292,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4365,10601689,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4366,10601749,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4367,10602315,"TERMINAL",0,0,"",,terminal_output +4368,10603091,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +4369,10604865,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4370,10604966,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4371,10605184,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +4372,10605304,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4373,10605485,"TERMINAL",0,0,"[?25lm[?25h[?25le[?25h",,terminal_output +4374,10606208,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4375,10606402,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4376,10606629,"TERMINAL",0,0,"[?25le[?25h[?25ld[?25h",,terminal_output +4377,10606692,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4378,10606935,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4379,10606995,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4380,10607244,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4381,10607441,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4382,10607502,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4383,10607685,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4384,10607791,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4385,10607954,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4386,10608029,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4387,10608124,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4388,10608751,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4389,10609286,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4390,10609766,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4391,10610052,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4392,10610457,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +4393,10610517,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4394,10610758,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4395,10611714,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4396,10611774,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4397,10611942,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4398,10612147,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4399,10612624,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4400,10612802,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4401,10612870,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4402,10613965,"TERMINAL",0,0,"[?25lm[?25h[?25la[?25h[?25ls[?25h",,terminal_output +4403,10614068,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +4404,10614250,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4405,10614379,"TERMINAL",0,0,"[?25li[?25h[?25lt[?25h",,terminal_output +4406,10614931,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +4407,10615581,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4408,10615722,"TERMINAL",0,0,"[causal-transformer-dynamics-model fa133d5] implemented option to ablate maskgit\r\n",,terminal_output +4409,10615792,"TERMINAL",0,0," 4 files changed, 188 insertions(+), 16 deletions(-)\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4410,10616378,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4411,10616603,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4412,10616762,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +4413,10616968,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4414,10617135,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4415,10617204,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4416,10617300,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4417,10618384,"TERMINAL",0,0,"",,terminal_output +4418,10618568,"TERMINAL",0,0,"",,terminal_output +4419,10634087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +4420,10634087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2535,0,"",python,selection_mouse +4421,10634696,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2666,0,"",python,selection_mouse +4422,10635573,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_mouse +4423,10636219,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2842,0,"",python,selection_mouse +4424,10636869,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3143,0,"",python,selection_mouse +4425,10707817,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +4426,10707986,"TERMINAL",0,0,"[?25lu[?25h[?25le[?25h[?25lu[?25h",,terminal_output +4427,10708097,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4428,10708257,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0710.localdomain: Wed Jul 16 17:47:55 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3348592 accelerat train_dy tum_cte0 R 17:47:33\t 2 hkn[0416,0421]3348397 accelerat train_dy tum_cte0 R 23:09:15\t 2 hkn[0810,0815]3348399 accelerat train_dy tum_cte0 R 23:09:15\t 2 hkn[0601,0603]3348400 accelerat train_dy tum_cte0 R 23:09:15\t 2 hkn[0604,0608]3351743 accelerat train_dy tum_cte0 R17:42\t 2 hkn[0725-0726]3350418 accelerat interact tum_cte0 R 2:58:09\t 2 hkn[0710-0711]3345116 accelerat train_dy tum_cte0 R 1-23:53:00\t 2 hkn[0503,0506]",,terminal_output +4429,10709334,"TERMINAL",0,0,"646663101",,terminal_output +4430,10709667,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4431,10712013,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4432,10712956,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",413,0,"",shellscript,selection_mouse +4433,10744523,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +4434,10745772,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",0,0,"",shellscript,tab +4435,10746527,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",308,0,"",shellscript,selection_mouse +4436,10747116,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",603,0,"",shellscript,selection_mouse +4437,10747694,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",706,0,"",shellscript,selection_mouse +4438,10748393,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",644,0,"",shellscript,selection_mouse +4439,10749459,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",748,0,"",shellscript,selection_mouse +4440,10750063,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",728,0,"",shellscript,selection_mouse +4441,10750822,"slurm/jobs/mihir/horeka/yolo-runs/tester.sh",780,0,"",shellscript,selection_mouse +4442,10753386,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"",python,tab +4443,10753386,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2735,0,"",python,selection_mouse +4444,10753894,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",2800,0,"",python,selection_mouse +4445,10754587,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3055,0,"",python,selection_mouse +4446,10759467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",3085,0,"",python,selection_mouse +4447,10777199,"models/dynamics.py",0,0,"",python,tab +4448,10786502,"models/dynamics.py",102,0,"",python,selection_mouse +4449,10786520,"models/dynamics.py",101,0,"",python,selection_command +4450,10787435,"models/dynamics.py",89,14,"",python,content +4451,10799728,"TERMINAL",0,0,"bash",,terminal_focus +4452,10801030,"TERMINAL",0,0,"srun",,terminal_focus +4453,10802399,"TERMINAL",0,0,"[?25lc[?25h[?25lp[?25h",,terminal_output +4454,10802532,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4455,10802872,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4456,10802979,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4457,10803094,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4458,10803338,"TERMINAL",0,0,"in_",,terminal_output +4459,10804212,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4460,10804414,"TERMINAL",0,0,"ynamics.py ",,terminal_output +4461,10805701,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4462,10805761,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4463,10805975,"TERMINAL",0,0,"ain_",,terminal_output +4464,10806928,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4465,10807101,"TERMINAL",0,0,"ynamics.py ",,terminal_output +4466,10809953,"TERMINAL",0,0,"[?25l.[1@_[?25h",,terminal_output +4467,10810772,"TERMINAL",0,0,"[?25l.[1@a[?25h",,terminal_output +4468,10811602,"TERMINAL",0,0,"[?25l.[1@u[?25h",,terminal_output +4469,10811762,"TERMINAL",0,0,"[?25l.[1@t[?25h",,terminal_output +4470,10811824,"TERMINAL",0,0,"[?25l.[1@o[?25h",,terminal_output +4471,10812494,"TERMINAL",0,0,"[?25l.[1@r[?25h",,terminal_output +4472,10812717,"TERMINAL",0,0,"[?25l.[1@e[?25h",,terminal_output +4473,10812821,"TERMINAL",0,0,"[?25l.[1@g[?25h",,terminal_output +4474,10813119,"TERMINAL",0,0,"[?25l.[1@r[?25h",,terminal_output +4475,10813295,"TERMINAL",0,0,"[?25l.[1@e[?25h",,terminal_output +4476,10813489,"TERMINAL",0,0,"[?25l.[1@s[?25h",,terminal_output +4477,10813638,"TERMINAL",0,0,"[?25l.[1@s[?25h",,terminal_output +4478,10813701,"TERMINAL",0,0,"[?25l.[1@i[?25h",,terminal_output +4479,10813825,"TERMINAL",0,0,"[?25l.[1@v[?25h",,terminal_output +4480,10813954,"TERMINAL",0,0,"[?25l.[1@e[?25h",,terminal_output +4481,10819060,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4482,10820037,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4483,10820121,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4484,10820190,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +4485,10820540,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4486,10821539,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4487,10822057,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4488,10822114,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4489,10822254,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4490,10822504,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4491,10823085,"TERMINAL",0,0,"[?25lu[?25h[?25ls[?25h",,terminal_output +4492,10823281,"TERMINAL",0,0,"\r\n[?2004l\rOn branch causal-transformer-dynamics-model\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4493,10825138,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4494,10825310,"TERMINAL",0,0,"[?25lit[?25h",,terminal_output +4495,10825473,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4496,10825534,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4497,10825698,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4498,10825760,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +4499,10825982,"TERMINAL",0,0,"[?25lf[?25h[?25l [?25h",,terminal_output +4500,10826134,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4501,10826288,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4502,10826394,"TERMINAL",0,0,"nkey-patch ",,terminal_output +4503,10826950,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4504,10827718,"TERMINAL",0,0,"",,terminal_output +4505,10827861,"TERMINAL",0,0,"",,terminal_output +4506,10828077,"TERMINAL",0,0,"",,terminal_output +4507,10828205,"TERMINAL",0,0,"",,terminal_output +4508,10828391,"TERMINAL",0,0,"",,terminal_output +4509,10829874,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4510,10829936,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4511,10829993,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4512,10830122,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4513,10830327,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4514,10830444,"TERMINAL",0,0,"[?25lh[?25h[?25le[?25h",,terminal_output +4515,10830594,"TERMINAL",0,0,"[?25lc[?25h[?25lk[?25h",,terminal_output +4516,10831490,"TERMINAL",0,0,"[?25lou[?25h",,terminal_output +4517,10831714,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4518,10831732,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4519,10831966,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4520,10832408,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4521,10832468,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4522,10832529,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4523,10832591,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4524,10832767,"TERMINAL",0,0,"\r\n[?2004l\rerror: Your local changes to the following files would be overwritten by checkout:\r\n\tmodels/dynamics.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4525,10833512,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +4526,10833624,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4527,10833686,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4528,10833913,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4529,10833982,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4530,10834100,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4531,10834178,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4532,10834285,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4533,10834511,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4534,10834691,"TERMINAL",0,0,"Saved working directory and index state WIP on causal-transformer-dynamics-model: fa133d5 implemented option to ablate maskgit\r\n",,terminal_output +4535,10834812,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4536,10834873,"TERMINAL",0,0,"git stash",,terminal_output +4537,10835074,"TERMINAL",0,0,"checkout main",,terminal_output +4538,10835597,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4539,10835719,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4540,10836160,"",0,0,"Switched from branch 'causal-transformer-dynamics-model' to 'main'",,git_branch_checkout +4541,10836343,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4542,10836447,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4543,10836509,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4544,10836641,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4545,10837717,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4546,10838198,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4547,10838413,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4548,10838518,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4549,10838680,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4550,10840521,"TERMINAL",0,0,"remote: Enumerating objects: 6, done.\r\nremote: Counting objects: 16% (1/6)\rremote: Counting objects: 33% (2/6)\rremote: Counting objects: 50% (3/6)\rremote: Counting objects: 66% (4/6)\rremote: Counting objects: 83% (5/6)\rremote: Counting objects: 100% (6/6)\rremote: Counting objects: 100% (6/6), done.\r\nremote: Compressing objects: 50% (1/2)\rremote: Compressing objects: 100% (2/2)\rremote: Compressing objects: 100% (2/2), done.\r\nremote: Total 6 (delta 4), reused 6 (delta 4), pack-reused 0 (from 0)\r\nUnpacking objects: 16% (1/6)\rUnpacking objects: 33% (2/6)\rUnpacking objects: 50% (3/6)\rUnpacking objects: 66% (4/6)\rUnpacking objects: 83% (5/6)\rUnpacking objects: 100% (6/6)\rUnpacking objects: 100% (6/6), 619 bytes | 12.00 KiB/s, done.\r\n",,terminal_output +4551,10840628,"TERMINAL",0,0,"From github.com:p-doom/jafar\r\n c7724bb..ae5ad00 cudnn-flash-attn-mixed-precision-4 -> origin/cudnn-flash-attn-mixed-precision-4\r\n * [new branch] hotfix/gt-logging -> origin/hotfix/gt-logging\r\n",,terminal_output +4552,10840808,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4553,10841889,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4554,10841994,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4555,10842102,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4556,10842862,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4557,10842969,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4558,10843117,"TERMINAL",0,0,"ain_",,terminal_output +4559,10843867,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4560,10844044,"TERMINAL",0,0,"ynamics.py ",,terminal_output +4561,10844754,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4562,10844815,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4563,10845030,"TERMINAL",0,0,"ain_",,terminal_output +4564,10845704,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4565,10845796,"TERMINAL",0,0,"ynamics.py ",,terminal_output +4566,10847954,"TERMINAL",0,0,"[?25l.[1@_[?25h",,terminal_output +4567,10848288,"TERMINAL",0,0,"[?25l.[1@m[?25h",,terminal_output +4568,10848349,"TERMINAL",0,0,"[?25l.[1@a[?25h",,terminal_output +4569,10848411,"TERMINAL",0,0,"[?25l.[1@s[?25h",,terminal_output +4570,10848473,"TERMINAL",0,0,"[?25l.[1@k[?25h",,terminal_output +4571,10848771,"TERMINAL",0,0,"[?25l.[1@g[?25h",,terminal_output +4572,10848938,"TERMINAL",0,0,"[?25l.[1@t[?25h",,terminal_output +4573,10849195,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4574,10849348,"TERMINAL",0,0,"[?25l.[1@u[?25h",,terminal_output +4575,10849409,"TERMINAL",0,0,"[?25l.[1@t[?25h",,terminal_output +4576,10849688,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4577,10850998,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +4578,10851136,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4579,10851193,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4580,10851322,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4581,10851384,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4582,10851491,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4583,10851604,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4584,10852041,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +4585,10852152,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4586,10852215,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4587,10852320,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4588,10852381,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4589,10858242,"TERMINAL",0,0,"causal-transformer-dynamics-model",,terminal_output +4590,10858690,"TERMINAL",0,0,"causal-transformer-dynamics-model\r\n[?2004l\rSwitched to branch 'causal-transformer-dynamics-model'\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4591,10859575,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4592,10859875,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4593,10859993,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4594,10860456,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4595,10860597,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4596,10860713,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4597,10860775,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4598,10860921,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4599,10861136,"",0,0,"Switched from branch 'main' to 'causal-transformer-dynamics-model'",,git_branch_checkout +4600,10861740,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4601,10861801,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4602,10862184,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4603,10862313,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4604,10862514,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4605,10862804,"TERMINAL",0,0,"\r\n[?2004l\rOn branch causal-transformer-dynamics-model\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\ttrain_dynamics_maskgut.py\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (18343dc98397f706aa4ab1203b865464838d6d1a)\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +4606,10864044,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4607,10864105,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4608,10864245,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +4609,10865998,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4610,10873776,"train_dynamics_maskgut.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4611,10877875,"train_dynamics_maskgit.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4612,10878570,"train_dynamics_maskgit.py",987,0,"",python,selection_mouse +4613,10878571,"train_dynamics_maskgit.py",986,0,"",python,selection_command +4614,10880714,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n if not args.use_maskgit:\n logits = outputs[""token_logits""][:, :, :-1]\n targets = outputs[""video_tokens""][:, :, 1:]\n mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4615,10881759,"train_dynamics_maskgit.py",0,0,"",python,tab +4616,10885045,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n if not args.use_maskgit:\n logits = outputs[""token_logits""][:, :, :-1]\n targets = outputs[""video_tokens""][:, :, 1:]\n mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4617,10890532,"train_dynamics_causal.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n if not args.use_maskgit:\n logits = outputs[""token_logits""][:, :, :-1]\n targets = outputs[""video_tokens""][:, :, 1:]\n mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4618,10893684,"train_dynamics_causal.py",1991,0,"",python,selection_mouse +4619,10895079,"train_dynamics_causal.py",1991,0,"_",python,content +4620,10895082,"train_dynamics_causal.py",1992,0,"",python,selection_keyboard +4621,10895434,"train_dynamics_causal.py",1992,0,"m",python,content +4622,10895435,"train_dynamics_causal.py",1993,0,"",python,selection_keyboard +4623,10895523,"train_dynamics_causal.py",1993,0,"a",python,content +4624,10895524,"train_dynamics_causal.py",1994,0,"",python,selection_keyboard +4625,10895573,"train_dynamics_causal.py",1994,0,"s",python,content +4626,10895574,"train_dynamics_causal.py",1995,0,"",python,selection_keyboard +4627,10895624,"train_dynamics_causal.py",1995,0,"k",python,content +4628,10895625,"train_dynamics_causal.py",1996,0,"",python,selection_keyboard +4629,10895789,"train_dynamics_causal.py",1996,0,"g",python,content +4630,10895790,"train_dynamics_causal.py",1997,0,"",python,selection_keyboard +4631,10895955,"train_dynamics_causal.py",1997,0,"t",python,content +4632,10895955,"train_dynamics_causal.py",1998,0,"",python,selection_keyboard +4633,10896533,"train_dynamics_causal.py",1997,1,"",python,content +4634,10896740,"train_dynamics_causal.py",1997,0,"i",python,content +4635,10896741,"train_dynamics_causal.py",1998,0,"",python,selection_keyboard +4636,10896890,"train_dynamics_causal.py",1998,0,"t",python,content +4637,10896891,"train_dynamics_causal.py",1999,0,"",python,selection_keyboard +4638,10898225,"train_dynamics_causal.py",2063,0,"",python,selection_mouse +4639,10899128,"train_dynamics_causal.py",2063,0,"_",python,content +4640,10899129,"train_dynamics_causal.py",2064,0,"",python,selection_keyboard +4641,10899448,"train_dynamics_causal.py",2064,0,"m",python,content +4642,10899449,"train_dynamics_causal.py",2065,0,"",python,selection_keyboard +4643,10899510,"train_dynamics_causal.py",2065,0,"a",python,content +4644,10899511,"train_dynamics_causal.py",2066,0,"",python,selection_keyboard +4645,10899603,"train_dynamics_causal.py",2066,0,"s",python,content +4646,10899604,"train_dynamics_causal.py",2067,0,"",python,selection_keyboard +4647,10899746,"train_dynamics_causal.py",2067,0,"k",python,content +4648,10899747,"train_dynamics_causal.py",2068,0,"",python,selection_keyboard +4649,10899823,"train_dynamics_causal.py",2068,0,"g",python,content +4650,10899823,"train_dynamics_causal.py",2069,0,"",python,selection_keyboard +4651,10899932,"train_dynamics_causal.py",2069,0,"i",python,content +4652,10899933,"train_dynamics_causal.py",2070,0,"",python,selection_keyboard +4653,10900019,"train_dynamics_causal.py",2070,0,"t",python,content +4654,10900019,"train_dynamics_causal.py",2071,0,"",python,selection_keyboard +4655,10901117,"train_dynamics_causal.py",2070,1,"",python,content +4656,10901244,"train_dynamics_causal.py",2069,1,"",python,content +4657,10901391,"train_dynamics_causal.py",2068,1,"",python,content +4658,10901536,"train_dynamics_causal.py",2067,1,"",python,content +4659,10901674,"train_dynamics_causal.py",2066,1,"",python,content +4660,10901814,"train_dynamics_causal.py",2065,1,"",python,content +4661,10901951,"train_dynamics_causal.py",2064,1,"",python,content +4662,10902086,"train_dynamics_causal.py",2063,1,"",python,content +4663,10902260,"train_dynamics_causal.py",2062,1,"",python,content +4664,10903308,"train_dynamics_causal.py",2062,0,"s",python,content +4665,10903308,"train_dynamics_causal.py",2063,0,"",python,selection_keyboard +4666,10903466,"train_dynamics_causal.py",2064,0,"",python,selection_command +4667,10904792,"train_dynamics_causal.py",2064,0,",",python,content +4668,10904793,"train_dynamics_causal.py",2065,0,"",python,selection_keyboard +4669,10904867,"train_dynamics_causal.py",2065,0," ",python,content +4670,10904868,"train_dynamics_causal.py",2066,0,"",python,selection_keyboard +4671,10905088,"train_dynamics_causal.py",2066,0,"""""",python,content +4672,10905089,"train_dynamics_causal.py",2067,0,"",python,selection_keyboard +4673,10905354,"train_dynamics_causal.py",2067,0,"m",python,content +4674,10905355,"train_dynamics_causal.py",2068,0,"",python,selection_keyboard +4675,10905487,"train_dynamics_causal.py",2068,0,"a",python,content +4676,10905488,"train_dynamics_causal.py",2069,0,"",python,selection_keyboard +4677,10905533,"train_dynamics_causal.py",2069,0,"s",python,content +4678,10905534,"train_dynamics_causal.py",2070,0,"",python,selection_keyboard +4679,10905588,"train_dynamics_causal.py",2070,0,"k",python,content +4680,10905589,"train_dynamics_causal.py",2071,0,"",python,selection_keyboard +4681,10906056,"train_dynamics_causal.py",2071,0,"g",python,content +4682,10906057,"train_dynamics_causal.py",2072,0,"",python,selection_keyboard +4683,10906559,"train_dynamics_causal.py",2072,0,"u",python,content +4684,10906560,"train_dynamics_causal.py",2073,0,"",python,selection_keyboard +4685,10906610,"train_dynamics_causal.py",2073,0,"t",python,content +4686,10906611,"train_dynamics_causal.py",2074,0,"",python,selection_keyboard +4687,10907253,"train_dynamics_causal.py",2073,1,"",python,content +4688,10907377,"train_dynamics_causal.py",2072,1,"",python,content +4689,10907546,"train_dynamics_causal.py",2072,0,"i",python,content +4690,10907547,"train_dynamics_causal.py",2073,0,"",python,selection_keyboard +4691,10907642,"train_dynamics_causal.py",2073,0,"t",python,content +4692,10907643,"train_dynamics_causal.py",2074,0,"",python,selection_keyboard +4693,10914868,"train_dynamics_causal.py",2071,0,"",python,selection_mouse +4694,10915022,"train_dynamics_causal.py",2067,7,"maskgit",python,selection_mouse +4695,10915314,"train_dynamics_causal.py",2067,7,"c",python,content +4696,10915315,"train_dynamics_causal.py",2068,0,"",python,selection_keyboard +4697,10915454,"train_dynamics_causal.py",2068,0,"a",python,content +4698,10915455,"train_dynamics_causal.py",2069,0,"",python,selection_keyboard +4699,10915557,"train_dynamics_causal.py",2069,0,"u",python,content +4700,10915557,"train_dynamics_causal.py",2070,0,"",python,selection_keyboard +4701,10915671,"train_dynamics_causal.py",2070,0,"s",python,content +4702,10915672,"train_dynamics_causal.py",2071,0,"",python,selection_keyboard +4703,10915817,"train_dynamics_causal.py",2071,0,"a",python,content +4704,10915818,"train_dynamics_causal.py",2072,0,"",python,selection_keyboard +4705,10915861,"train_dynamics_causal.py",2072,0,"l",python,content +4706,10915862,"train_dynamics_causal.py",2073,0,"",python,selection_keyboard +4707,10917028,"train_dynamics_causal.py",1992,0,"",python,selection_mouse +4708,10917253,"train_dynamics_causal.py",1992,1,"m",python,selection_mouse +4709,10917254,"train_dynamics_causal.py",1992,4,"mask",python,selection_mouse +4710,10917265,"train_dynamics_causal.py",1992,5,"maskg",python,selection_mouse +4711,10917558,"train_dynamics_causal.py",1992,6,"maskgi",python,selection_mouse +4712,10917558,"train_dynamics_causal.py",1992,47,"maskgit""\n tags: list[str] = field(default_fa",python,selection_mouse +4713,10918128,"train_dynamics_causal.py",1992,6,"maskgi",python,selection_mouse +4714,10918128,"train_dynamics_causal.py",1992,7,"maskgit",python,selection_mouse +4715,10918636,"train_dynamics_causal.py",1992,7,"c",python,content +4716,10918637,"train_dynamics_causal.py",1993,0,"",python,selection_keyboard +4717,10918819,"train_dynamics_causal.py",1993,0,"a",python,content +4718,10918820,"train_dynamics_causal.py",1994,0,"",python,selection_keyboard +4719,10918947,"train_dynamics_causal.py",1994,0,"u",python,content +4720,10918948,"train_dynamics_causal.py",1995,0,"",python,selection_keyboard +4721,10919050,"train_dynamics_causal.py",1995,0,"s",python,content +4722,10919050,"train_dynamics_causal.py",1996,0,"",python,selection_keyboard +4723,10919178,"train_dynamics_causal.py",1996,0,"a",python,content +4724,10919179,"train_dynamics_causal.py",1997,0,"",python,selection_keyboard +4725,10919260,"train_dynamics_causal.py",1997,0,"l",python,content +4726,10919261,"train_dynamics_causal.py",1998,0,"",python,selection_keyboard +4727,10921761,"train_dynamics_causal.py",0,0,"",python,tab +4728,10925055,"train_dynamics_maskgit.py",0,0,"",python,tab +4729,10925056,"train_dynamics_maskgit.py",1961,0,"",python,selection_mouse +4730,10926473,"train_dynamics_maskgit.py",1934,0,"",python,selection_command +4731,10926918,"train_dynamics_maskgit.py",1934,1,"",python,content +4732,10926993,"train_dynamics_maskgit.py",1934,0,"k",python,content +4733,10926994,"train_dynamics_maskgit.py",1935,0,"",python,selection_keyboard +4734,10927162,"train_dynamics_maskgit.py",1935,0,"g",python,content +4735,10927163,"train_dynamics_maskgit.py",1936,0,"",python,selection_keyboard +4736,10927290,"train_dynamics_maskgit.py",1936,0,"i",python,content +4737,10927291,"train_dynamics_maskgit.py",1937,0,"",python,selection_keyboard +4738,10927394,"train_dynamics_maskgit.py",1937,0,"t",python,content +4739,10927395,"train_dynamics_maskgit.py",1938,0,"",python,selection_keyboard +4740,10928214,"train_dynamics_maskgit.py",1930,4," ",python,content +4741,10929596,"train_dynamics_maskgit.py",1930,8," ",python,content +4742,10929912,"train_dynamics_maskgit.py",1934,4,"",python,content +4743,10930343,"train_dynamics_maskgit.py",1934,0,"n",python,content +4744,10932367,"train_dynamics_maskgit.py",1933,0,"",python,selection_command +4745,10933230,"train_dynamics_maskgit.py",1961,0,"",python,selection_mouse +4746,10934665,"train_dynamics_maskgit.py",1961,0,"_",python,content +4747,10934666,"train_dynamics_maskgit.py",1962,0,"",python,selection_keyboard +4748,10934987,"train_dynamics_maskgit.py",1962,0,"m",python,content +4749,10934987,"train_dynamics_maskgit.py",1963,0,"",python,selection_keyboard +4750,10935059,"train_dynamics_maskgit.py",1963,0,"a",python,content +4751,10935060,"train_dynamics_maskgit.py",1964,0,"",python,selection_keyboard +4752,10935142,"train_dynamics_maskgit.py",1964,0,"s",python,content +4753,10935142,"train_dynamics_maskgit.py",1965,0,"",python,selection_keyboard +4754,10935204,"train_dynamics_maskgit.py",1965,0,"k",python,content +4755,10935205,"train_dynamics_maskgit.py",1966,0,"",python,selection_keyboard +4756,10935627,"train_dynamics_maskgit.py",1966,0,"g",python,content +4757,10935628,"train_dynamics_maskgit.py",1967,0,"",python,selection_keyboard +4758,10935688,"train_dynamics_maskgit.py",1967,0,"i",python,content +4759,10935689,"train_dynamics_maskgit.py",1968,0,"",python,selection_keyboard +4760,10935767,"train_dynamics_maskgit.py",1968,0,"t",python,content +4761,10935768,"train_dynamics_maskgit.py",1969,0,"",python,selection_keyboard +4762,10937096,"train_dynamics_maskgit.py",2033,0,"",python,selection_mouse +4763,10937717,"train_dynamics_maskgit.py",2034,0,"",python,selection_command +4764,10938522,"train_dynamics_maskgit.py",2034,0,",",python,content +4765,10938523,"train_dynamics_maskgit.py",2035,0,"",python,selection_keyboard +4766,10938637,"train_dynamics_maskgit.py",2035,0," ",python,content +4767,10938638,"train_dynamics_maskgit.py",2036,0,"",python,selection_keyboard +4768,10938862,"train_dynamics_maskgit.py",2036,0,"""""",python,content +4769,10938863,"train_dynamics_maskgit.py",2037,0,"",python,selection_keyboard +4770,10939112,"train_dynamics_maskgit.py",2037,0,"m",python,content +4771,10939113,"train_dynamics_maskgit.py",2038,0,"",python,selection_keyboard +4772,10939204,"train_dynamics_maskgit.py",2038,0,"a",python,content +4773,10939205,"train_dynamics_maskgit.py",2039,0,"",python,selection_keyboard +4774,10939269,"train_dynamics_maskgit.py",2039,0,"s",python,content +4775,10939270,"train_dynamics_maskgit.py",2040,0,"",python,selection_keyboard +4776,10939414,"train_dynamics_maskgit.py",2040,0,"k",python,content +4777,10939414,"train_dynamics_maskgit.py",2041,0,"",python,selection_keyboard +4778,10939547,"train_dynamics_maskgit.py",2041,0,"g",python,content +4779,10939548,"train_dynamics_maskgit.py",2042,0,"",python,selection_keyboard +4780,10939807,"train_dynamics_maskgit.py",2042,0,"t",python,content +4781,10939808,"train_dynamics_maskgit.py",2043,0,"",python,selection_keyboard +4782,10940130,"train_dynamics_maskgit.py",2042,1,"",python,content +4783,10940391,"train_dynamics_maskgit.py",2042,0,"i",python,content +4784,10940392,"train_dynamics_maskgit.py",2043,0,"",python,selection_keyboard +4785,10940491,"train_dynamics_maskgit.py",2043,0,"t",python,content +4786,10940492,"train_dynamics_maskgit.py",2044,0,"",python,selection_keyboard +4787,10940848,"train_dynamics_maskgit.py",2043,0,"",python,selection_command +4788,10944571,"train_dynamics_maskgit.py",1929,0,"",python,selection_mouse +4789,10944578,"train_dynamics_maskgit.py",1928,0,"",python,selection_command +4790,10945100,"train_dynamics_maskgit.py",2107,0,"",python,selection_mouse +4791,10945105,"train_dynamics_maskgit.py",2106,0,"",python,selection_command +4792,10945755,"train_dynamics_maskgit.py",1907,0,"",python,selection_mouse +4793,10945785,"train_dynamics_maskgit.py",1906,0,"",python,selection_command +4794,10952505,"train_dynamics_maskgit.py",1962,0,"",python,selection_command +4795,10953508,"train_dynamics_causal.py",0,0,"",python,tab +4796,10953508,"train_dynamics_causal.py",1677,0,"",python,selection_mouse +4797,10954451,"train_dynamics_causal.py",1704,0,"",python,selection_mouse +4798,10954962,"train_dynamics_causal.py",1638,0,"",python,selection_mouse +4799,10957184,"train_dynamics_causal.py",1784,0,"",python,selection_mouse +4800,10957367,"train_dynamics_causal.py",1778,11,"use_maskgit",python,selection_mouse +4801,10958322,"train_dynamics_causal.py",1788,0,"",python,selection_command +4802,10960449,"train_dynamics_causal.py",1994,1,"u",python,selection_command +4803,10960622,"train_dynamics_causal.py",1994,2,"us",python,selection_command +4804,10960955,"train_dynamics_causal.py",2834,3,"use",python,selection_command +4805,10961210,"train_dynamics_causal.py",2834,4,"use_",python,selection_command +4806,10961530,"train_dynamics_causal.py",2834,5,"use_m",python,selection_command +4807,10961668,"train_dynamics_causal.py",2834,6,"use_ma",python,selection_command +4808,10961734,"train_dynamics_causal.py",2834,7,"use_mas",python,selection_command +4809,10961797,"train_dynamics_causal.py",2834,8,"use_mask",python,selection_command +4810,10962150,"train_dynamics_causal.py",2834,9,"use_maskg",python,selection_command +4811,10963423,"train_dynamics_causal.py",2834,10,"use_maskgi",python,selection_command +4812,10963563,"train_dynamics_causal.py",2834,11,"use_maskgit",python,selection_command +4813,10965595,"train_dynamics_causal.py",6312,11,"use_maskgit",python,selection_command +4814,10967875,"train_dynamics_causal.py",6281,0,"",python,selection_mouse +4815,10968908,"train_dynamics_causal.py",6317,0,"",python,selection_command +4816,10969533,"train_dynamics_causal.py",6304,38,"",python,content +4817,10969609,"train_dynamics_causal.py",6312,0,"",python,selection_command +4818,10971604,"train_dynamics_causal.py",1778,11,"use_maskgit",python,selection_command +4819,10974880,"train_dynamics_causal.py",1799,0,"",python,selection_mouse +4820,10975775,"train_dynamics_causal.py",1774,30,"",python,content +4821,10975861,"train_dynamics_causal.py",1778,0,"",python,selection_command +4822,10977447,"train_dynamics_causal.py",2804,11,"use_maskgit",python,selection_command +4823,10978858,"train_dynamics_causal.py",2838,0,"",python,selection_mouse +4824,10979257,"train_dynamics_causal.py",2812,0,"",python,selection_mouse +4825,10988661,"train_dynamics_causal.py",2788,29,"",python,content +4826,10988751,"train_dynamics_causal.py",2796,0,"",python,selection_command +4827,10989679,"train_dynamics_causal.py",2788,51," logits = outputs[""token_logits""][:, :, :-1]",python,selection_command +4828,10990347,"train_dynamics_causal.py",2788,103," logits = outputs[""token_logits""][:, :, :-1]\n targets = outputs[""video_tokens""][:, :, 1:]",python,selection_command +4829,10990477,"train_dynamics_causal.py",2788,145," logits = outputs[""token_logits""][:, :, :-1]\n targets = outputs[""video_tokens""][:, :, 1:]\n mask = outputs[""mask""][:, :, 1:] ",python,selection_command +4830,10990802,"train_dynamics_causal.py",2796,0,"",python,selection_command +4831,10991038,"train_dynamics_causal.py",2896,4,"",python,content +4832,10991038,"train_dynamics_causal.py",2844,4,"",python,content +4833,10991038,"train_dynamics_causal.py",2792,4,"",python,content +4834,10991251,"train_dynamics_causal.py",2791,0,"",python,selection_command +4835,10991522,"train_dynamics_causal.py",2787,0,"",python,selection_command +4836,10992126,"train_dynamics_causal.py",2787,1,"",python,content +4837,10992162,"train_dynamics_causal.py",2791,0,"",python,selection_command +4838,10992611,"train_dynamics_causal.py",2753,0,"",python,selection_command +4839,10992851,"train_dynamics_causal.py",2716,0,"",python,selection_command +4840,10993043,"train_dynamics_causal.py",2642,0,"",python,selection_command +4841,10996580,"train_dynamics_causal.py",2716,0,"",python,selection_command +4842,10997027,"train_dynamics_causal.py",2753,0,"",python,selection_command +4843,10997202,"train_dynamics_causal.py",2791,0,"",python,selection_command +4844,10997866,"train_dynamics_causal.py",2792,0,"",python,selection_command +4845,10998362,"train_dynamics_causal.py",2793,0,"",python,selection_command +4846,10998430,"train_dynamics_causal.py",2794,0,"",python,selection_command +4847,10998451,"train_dynamics_causal.py",2795,0,"",python,selection_command +4848,10998480,"train_dynamics_causal.py",2796,0,"",python,selection_command +4849,10998490,"train_dynamics_causal.py",2797,0,"",python,selection_command +4850,10998563,"train_dynamics_causal.py",2798,0,"",python,selection_command +4851,10998564,"train_dynamics_causal.py",2799,0,"",python,selection_command +4852,10998602,"train_dynamics_causal.py",2800,0,"",python,selection_command +4853,10998616,"train_dynamics_causal.py",2801,0,"",python,selection_command +4854,10998637,"train_dynamics_causal.py",2802,0,"",python,selection_command +4855,10998674,"train_dynamics_causal.py",2803,0,"",python,selection_command +4856,10999750,"train_dynamics_causal.py",2765,0,"",python,selection_command +4857,10999919,"train_dynamics_causal.py",2728,0,"",python,selection_command +4858,11000509,"train_dynamics_causal.py",2765,0,"",python,selection_command +4859,11000707,"train_dynamics_causal.py",2728,0,"",python,selection_command +4860,11003289,"train_dynamics_causal.py",2765,0,"",python,selection_command +4861,11003456,"train_dynamics_causal.py",2803,0,"",python,selection_command +4862,11003927,"train_dynamics_causal.py",2804,0,"",python,selection_command +4863,11004427,"train_dynamics_causal.py",2805,0,"",python,selection_command +4864,11004462,"train_dynamics_causal.py",2806,0,"",python,selection_command +4865,11004523,"train_dynamics_causal.py",2807,0,"",python,selection_command +4866,11004523,"train_dynamics_causal.py",2808,0,"",python,selection_command +4867,11004604,"train_dynamics_causal.py",2809,0,"",python,selection_command +4868,11004605,"train_dynamics_causal.py",2810,0,"",python,selection_command +4869,11004609,"train_dynamics_causal.py",2811,0,"",python,selection_command +4870,11004649,"train_dynamics_causal.py",2812,0,"",python,selection_command +4871,11004674,"train_dynamics_causal.py",2813,0,"",python,selection_command +4872,11004698,"train_dynamics_causal.py",2814,0,"",python,selection_command +4873,11004740,"train_dynamics_causal.py",2815,0,"",python,selection_command +4874,11004809,"train_dynamics_causal.py",2816,0,"",python,selection_command +4875,11004945,"train_dynamics_causal.py",2817,0,"",python,selection_command +4876,11005033,"train_dynamics_causal.py",2818,0,"",python,selection_command +4877,11005168,"train_dynamics_causal.py",2819,0,"",python,selection_command +4878,11005298,"train_dynamics_causal.py",2820,0,"",python,selection_command +4879,11005449,"train_dynamics_causal.py",2821,0,"",python,selection_command +4880,11005603,"train_dynamics_causal.py",2822,0,"",python,selection_command +4881,11005744,"train_dynamics_causal.py",2823,0,"",python,selection_command +4882,11006986,"train_dynamics_causal.py",2823,11,"",python,content +4883,11007016,"train_dynamics_causal.py",2822,0,"",python,selection_command +4884,11010081,"train_dynamics_causal.py",2785,0,"",python,selection_command +4885,11010336,"train_dynamics_causal.py",2747,0,"",python,selection_command +4886,11011268,"train_dynamics_causal.py",2823,0,"[:, :, :-1]",python,content +4887,11011308,"train_dynamics_causal.py",2823,0,"",python,selection_command +4888,11012343,"train_dynamics_causal.py",2785,0,"",python,selection_command +4889,11012654,"train_dynamics_causal.py",2784,0,"",python,selection_command +4890,11012951,"train_dynamics_causal.py",2822,0,"",python,selection_command +4891,11013944,"train_dynamics_causal.py",2822,1,"]",python,selection_command +4892,11014036,"train_dynamics_causal.py",2821,2,"""]",python,selection_command +4893,11014570,"train_dynamics_causal.py",2820,3,"s""]",python,selection_command +4894,11014652,"train_dynamics_causal.py",2819,4,"ts""]",python,selection_command +4895,11014652,"train_dynamics_causal.py",2818,5,"its""]",python,selection_command +4896,11014711,"train_dynamics_causal.py",2817,6,"gits""]",python,selection_command +4897,11014711,"train_dynamics_causal.py",2816,7,"ogits""]",python,selection_command +4898,11014772,"train_dynamics_causal.py",2815,8,"logits""]",python,selection_command +4899,11014773,"train_dynamics_causal.py",2814,9,"_logits""]",python,selection_command +4900,11014860,"train_dynamics_causal.py",2813,10,"n_logits""]",python,selection_command +4901,11014861,"train_dynamics_causal.py",2812,11,"en_logits""]",python,selection_command +4902,11014952,"train_dynamics_causal.py",2811,12,"ken_logits""]",python,selection_command +4903,11014953,"train_dynamics_causal.py",2810,13,"oken_logits""]",python,selection_command +4904,11014953,"train_dynamics_causal.py",2809,14,"token_logits""]",python,selection_command +4905,11015046,"train_dynamics_causal.py",2808,15,"""token_logits""]",python,selection_command +4906,11015047,"train_dynamics_causal.py",2807,16,"[""token_logits""]",python,selection_command +4907,11015047,"train_dynamics_causal.py",2806,17,"s[""token_logits""]",python,selection_command +4908,11015208,"train_dynamics_causal.py",2805,18,"ts[""token_logits""]",python,selection_command +4909,11015208,"train_dynamics_causal.py",2804,19,"uts[""token_logits""]",python,selection_command +4910,11015209,"train_dynamics_causal.py",2803,20,"puts[""token_logits""]",python,selection_command +4911,11015366,"train_dynamics_causal.py",2802,21,"tputs[""token_logits""]",python,selection_command +4912,11015367,"train_dynamics_causal.py",2801,22,"utputs[""token_logits""]",python,selection_command +4913,11015367,"train_dynamics_causal.py",2800,23,"outputs[""token_logits""]",python,selection_command +4914,11015367,"train_dynamics_causal.py",2799,24," outputs[""token_logits""]",python,selection_command +4915,11015368,"train_dynamics_causal.py",2798,25,"= outputs[""token_logits""]",python,selection_command +4916,11015398,"train_dynamics_causal.py",2797,26," = outputs[""token_logits""]",python,selection_command +4917,11015467,"train_dynamics_causal.py",2796,27,"s = outputs[""token_logits""]",python,selection_command +4918,11015836,"train_dynamics_causal.py",2797,26," = outputs[""token_logits""]",python,selection_command +4919,11016007,"train_dynamics_causal.py",2798,25,"= outputs[""token_logits""]",python,selection_command +4920,11016129,"train_dynamics_causal.py",2799,24," outputs[""token_logits""]",python,selection_command +4921,11016279,"train_dynamics_causal.py",2800,23,"outputs[""token_logits""]",python,selection_command +4922,11017055,"train_dynamics_causal.py",2800,0,"",python,selection_command +4923,11017182,"train_dynamics_causal.py",2799,0,"",python,selection_command +4924,11017685,"train_dynamics_causal.py",2798,0,"",python,selection_command +4925,11018097,"train_dynamics_causal.py",2797,0,"",python,selection_command +4926,11018354,"train_dynamics_causal.py",2798,0,"outputs[""token_logits""]",python,content +4927,11018383,"train_dynamics_causal.py",2820,0,"",python,selection_command +4928,11019663,"train_dynamics_causal.py",2820,0," ",python,content +4929,11019664,"train_dynamics_causal.py",2821,0,"",python,selection_keyboard +4930,11020163,"train_dynamics_causal.py",2820,1,"",python,content +4931,11020461,"train_dynamics_causal.py",2821,0,"",python,selection_command +4932,11020671,"train_dynamics_causal.py",2821,0," ",python,content +4933,11020671,"train_dynamics_causal.py",2822,0,"",python,selection_keyboard +4934,11021077,"train_dynamics_causal.py",2821,0,"",python,selection_command +4935,11021586,"train_dynamics_causal.py",2820,0,"",python,selection_command +4936,11022084,"train_dynamics_causal.py",2819,0,"",python,selection_command +4937,11022124,"train_dynamics_causal.py",2818,0,"",python,selection_command +4938,11022150,"train_dynamics_causal.py",2817,0,"",python,selection_command +4939,11022181,"train_dynamics_causal.py",2816,0,"",python,selection_command +4940,11022209,"train_dynamics_causal.py",2815,0,"",python,selection_command +4941,11022269,"train_dynamics_causal.py",2814,0,"",python,selection_command +4942,11022298,"train_dynamics_causal.py",2813,0,"",python,selection_command +4943,11022330,"train_dynamics_causal.py",2812,0,"",python,selection_command +4944,11022353,"train_dynamics_causal.py",2811,0,"",python,selection_command +4945,11022390,"train_dynamics_causal.py",2810,0,"",python,selection_command +4946,11022414,"train_dynamics_causal.py",2809,0,"",python,selection_command +4947,11022433,"train_dynamics_causal.py",2808,0,"",python,selection_command +4948,11022476,"train_dynamics_causal.py",2807,0,"",python,selection_command +4949,11022509,"train_dynamics_causal.py",2806,0,"",python,selection_command +4950,11022549,"train_dynamics_causal.py",2805,0,"",python,selection_command +4951,11022550,"train_dynamics_causal.py",2804,0,"",python,selection_command +4952,11022572,"train_dynamics_causal.py",2803,0,"",python,selection_command +4953,11022625,"train_dynamics_causal.py",2802,0,"",python,selection_command +4954,11022658,"train_dynamics_causal.py",2801,0,"",python,selection_command +4955,11022691,"train_dynamics_causal.py",2800,0,"",python,selection_command +4956,11022692,"train_dynamics_causal.py",2799,0,"",python,selection_command +4957,11022737,"train_dynamics_causal.py",2798,0,"",python,selection_command +4958,11022769,"train_dynamics_causal.py",2797,0,"",python,selection_command +4959,11022777,"train_dynamics_causal.py",2796,0,"",python,selection_command +4960,11022818,"train_dynamics_causal.py",2795,0,"",python,selection_command +4961,11022839,"train_dynamics_causal.py",2794,0,"",python,selection_command +4962,11022874,"train_dynamics_causal.py",2793,0,"",python,selection_command +4963,11022901,"train_dynamics_causal.py",2792,0,"",python,selection_command +4964,11022954,"train_dynamics_causal.py",2791,0,"",python,selection_command +4965,11023467,"train_dynamics_causal.py",2791,7,"",python,content +4966,11023715,"train_dynamics_causal.py",2791,1,"",python,content +4967,11025596,"train_dynamics_causal.py",2791,0,"o",python,content +4968,11025597,"train_dynamics_causal.py",2792,0,"",python,selection_keyboard +4969,11025661,"train_dynamics_causal.py",2791,0,"",python,selection_command +4970,11026118,"train_dynamics_causal.py",2856,0,"",python,selection_command +4971,11026337,"train_dynamics_causal.py",2791,0,"",python,selection_command +4972,11026805,"train_dynamics_causal.py",2856,0,"",python,selection_command +4973,11026956,"train_dynamics_causal.py",2857,0,"",python,selection_command +4974,11027489,"train_dynamics_causal.py",2858,0,"",python,selection_command +4975,11027516,"train_dynamics_causal.py",2859,0,"",python,selection_command +4976,11027553,"train_dynamics_causal.py",2860,0,"",python,selection_command +4977,11027580,"train_dynamics_causal.py",2861,0,"",python,selection_command +4978,11027605,"train_dynamics_causal.py",2862,0,"",python,selection_command +4979,11027643,"train_dynamics_causal.py",2863,0,"",python,selection_command +4980,11027662,"train_dynamics_causal.py",2864,0,"",python,selection_command +4981,11027769,"train_dynamics_causal.py",2865,0,"",python,selection_command +4982,11027919,"train_dynamics_causal.py",2866,0,"",python,selection_command +4983,11028367,"train_dynamics_causal.py",2866,1,"o",python,selection_command +4984,11028512,"train_dynamics_causal.py",2866,2,"ou",python,selection_command +4985,11029046,"train_dynamics_causal.py",2866,3,"out",python,selection_command +4986,11029110,"train_dynamics_causal.py",2866,4,"outp",python,selection_command +4987,11029111,"train_dynamics_causal.py",2866,5,"outpu",python,selection_command +4988,11029171,"train_dynamics_causal.py",2866,6,"output",python,selection_command +4989,11029171,"train_dynamics_causal.py",2866,7,"outputs",python,selection_command +4990,11029258,"train_dynamics_causal.py",2866,8,"outputs[",python,selection_command +4991,11029259,"train_dynamics_causal.py",2866,9,"outputs[""",python,selection_command +4992,11029347,"train_dynamics_causal.py",2866,10,"outputs[""v",python,selection_command +4993,11029348,"train_dynamics_causal.py",2866,11,"outputs[""vi",python,selection_command +4994,11029348,"train_dynamics_causal.py",2866,12,"outputs[""vid",python,selection_command +4995,11029437,"train_dynamics_causal.py",2866,13,"outputs[""vide",python,selection_command +4996,11029438,"train_dynamics_causal.py",2866,14,"outputs[""video",python,selection_command +4997,11029438,"train_dynamics_causal.py",2866,15,"outputs[""video_",python,selection_command +4998,11029528,"train_dynamics_causal.py",2866,16,"outputs[""video_t",python,selection_command +4999,11029529,"train_dynamics_causal.py",2866,17,"outputs[""video_to",python,selection_command +5000,11029529,"train_dynamics_causal.py",2866,18,"outputs[""video_tok",python,selection_command +5001,11029621,"train_dynamics_causal.py",2866,19,"outputs[""video_toke",python,selection_command +5002,11029622,"train_dynamics_causal.py",2866,20,"outputs[""video_token",python,selection_command +5003,11029622,"train_dynamics_causal.py",2866,21,"outputs[""video_tokens",python,selection_command +5004,11029696,"train_dynamics_causal.py",2866,22,"outputs[""video_tokens""",python,selection_command +5005,11029696,"train_dynamics_causal.py",2866,23,"outputs[""video_tokens""]",python,selection_command +5006,11029696,"train_dynamics_causal.py",2866,24,"outputs[""video_tokens""][",python,selection_command +5007,11029697,"train_dynamics_causal.py",2866,25,"outputs[""video_tokens""][:",python,selection_command +5008,11030144,"train_dynamics_causal.py",2866,24,"outputs[""video_tokens""][",python,selection_command +5009,11030512,"train_dynamics_causal.py",2866,23,"outputs[""video_tokens""]",python,selection_command +5010,11031148,"train_dynamics_causal.py",2866,0,"",python,selection_command +5011,11031255,"train_dynamics_causal.py",2865,0,"",python,selection_command +5012,11031773,"train_dynamics_causal.py",2864,0,"",python,selection_command +5013,11031827,"train_dynamics_causal.py",2863,0,"",python,selection_command +5014,11032033,"train_dynamics_causal.py",2862,0,"",python,selection_command +5015,11032352,"train_dynamics_causal.py",2863,0,"outputs[""video_tokens""]",python,content +5016,11032379,"train_dynamics_causal.py",2885,0,"",python,selection_command +5017,11033152,"train_dynamics_causal.py",2884,0,"",python,selection_command +5018,11033681,"train_dynamics_causal.py",2883,0,"",python,selection_command +5019,11033707,"train_dynamics_causal.py",2882,0,"",python,selection_command +5020,11033729,"train_dynamics_causal.py",2881,0,"",python,selection_command +5021,11033763,"train_dynamics_causal.py",2880,0,"",python,selection_command +5022,11033773,"train_dynamics_causal.py",2879,0,"",python,selection_command +5023,11033836,"train_dynamics_causal.py",2878,0,"",python,selection_command +5024,11033837,"train_dynamics_causal.py",2877,0,"",python,selection_command +5025,11033900,"train_dynamics_causal.py",2876,0,"",python,selection_command +5026,11033952,"train_dynamics_causal.py",2875,0,"",python,selection_command +5027,11033952,"train_dynamics_causal.py",2874,0,"",python,selection_command +5028,11033978,"train_dynamics_causal.py",2873,0,"",python,selection_command +5029,11034014,"train_dynamics_causal.py",2872,0,"",python,selection_command +5030,11034071,"train_dynamics_causal.py",2871,0,"",python,selection_command +5031,11034103,"train_dynamics_causal.py",2870,0,"",python,selection_command +5032,11034104,"train_dynamics_causal.py",2869,0,"",python,selection_command +5033,11034131,"train_dynamics_causal.py",2868,0,"",python,selection_command +5034,11034139,"train_dynamics_causal.py",2867,0,"",python,selection_command +5035,11034523,"train_dynamics_causal.py",2866,0,"",python,selection_command +5036,11034744,"train_dynamics_causal.py",2865,0,"",python,selection_command +5037,11034899,"train_dynamics_causal.py",2864,0,"",python,selection_command +5038,11035036,"train_dynamics_causal.py",2863,0,"",python,selection_command +5039,11035192,"train_dynamics_causal.py",2862,0,"",python,selection_command +5040,11036057,"train_dynamics_causal.py",2856,6,"",python,content +5041,11036799,"train_dynamics_causal.py",2856,1,"",python,content +5042,11037265,"train_dynamics_causal.py",2857,0,"",python,selection_command +5043,11037755,"train_dynamics_causal.py",2858,0,"",python,selection_command +5044,11037786,"train_dynamics_causal.py",2859,0,"",python,selection_command +5045,11038443,"train_dynamics_causal.py",2794,0,"",python,selection_command +5046,11038635,"train_dynamics_causal.py",2756,0,"",python,selection_command +5047,11040703,"train_dynamics_causal.py",2712,75,"",python,content +5048,11040759,"train_dynamics_causal.py",2716,0,"",python,selection_command +5049,11042911,"train_dynamics_causal.py",2645,0,"",python,selection_command +5050,11043185,"train_dynamics_causal.py",2618,0,"",python,selection_command +5051,11043482,"train_dynamics_causal.py",2645,0,"",python,selection_command +5052,11043963,"train_dynamics_causal.py",2719,0,"",python,selection_command +5053,11044001,"train_dynamics_causal.py",2784,0,"",python,selection_command +5054,11044073,"train_dynamics_causal.py",2848,0,"",python,selection_command +5055,11044312,"train_dynamics_causal.py",2886,0,"",python,selection_command +5056,11044524,"train_dynamics_causal.py",2949,0,"",python,selection_command +5057,11046062,"train_dynamics_causal.py",2867,0,"",python,selection_mouse +5058,11049631,"train_dynamics_causal.py",2886,1,"l",python,selection_command +5059,11049797,"train_dynamics_causal.py",2886,2,"lo",python,selection_command +5060,11049990,"train_dynamics_causal.py",2950,3,"log",python,selection_command +5061,11050034,"train_dynamics_causal.py",2950,4,"logi",python,selection_command +5062,11050184,"train_dynamics_causal.py",2950,5,"logit",python,selection_command +5063,11051004,"train_dynamics_causal.py",2950,6,"logits",python,selection_command +5064,11054235,"train_dynamics_causal.py",2719,0,"",python,selection_mouse +5065,11054384,"train_dynamics_causal.py",2716,7,"outputs",python,selection_mouse +5066,11054480,"train_dynamics_causal.py",2716,21,"outputs[""token_logits",python,selection_mouse +5067,11054664,"train_dynamics_causal.py",2716,22,"outputs[""token_logits""",python,selection_mouse +5068,11054836,"train_dynamics_causal.py",2716,23,"outputs[""token_logits""]",python,selection_mouse +5069,11060691,"train_dynamics_causal.py",2637,0,"",python,selection_mouse +5070,11060747,"train_dynamics_causal.py",2636,0,"",python,selection_command +5071,11061178,"train_dynamics_causal.py",2770,0,"",python,selection_mouse +5072,11062418,"train_dynamics_causal.py",2712,0," logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n",python,content +5073,11066884,"train_dynamics_causal.py",2814,0,"",python,selection_mouse +5074,11067892,"train_dynamics_causal.py",2817,0,"",python,selection_mouse +5075,11068087,"train_dynamics_causal.py",2817,2,"ou",python,selection_mouse +5076,11068088,"train_dynamics_causal.py",2817,7,"outputs",python,selection_mouse +5077,11068088,"train_dynamics_causal.py",2817,11,"outputs[""to",python,selection_mouse +5078,11068159,"train_dynamics_causal.py",2817,17,"outputs[""token_lo",python,selection_mouse +5079,11068160,"train_dynamics_causal.py",2817,21,"outputs[""token_logits",python,selection_mouse +5080,11068160,"train_dynamics_causal.py",2786,31,"\n outputs[""token_logits""] = ",python,selection_mouse +5081,11068397,"train_dynamics_causal.py",2817,34,"outputs[""token_logits""][:, :, :-1]",python,selection_mouse +5082,11069798,"train_dynamics_causal.py",2850,0,"",python,selection_command +5083,11071219,"train_dynamics_causal.py",2818,0,"",python,selection_mouse +5084,11071585,"train_dynamics_causal.py",2818,2,"ut",python,selection_mouse +5085,11071586,"train_dynamics_causal.py",2818,8,"utputs[""",python,selection_mouse +5086,11071586,"train_dynamics_causal.py",2818,12,"utputs[""toke",python,selection_mouse +5087,11071586,"train_dynamics_causal.py",2818,15,"utputs[""token_l",python,selection_mouse +5088,11071587,"train_dynamics_causal.py",2818,18,"utputs[""token_logi",python,selection_mouse +5089,11071587,"train_dynamics_causal.py",2818,20,"utputs[""token_logits",python,selection_mouse +5090,11071587,"train_dynamics_causal.py",2818,22,"utputs[""token_logits""]",python,selection_mouse +5091,11071587,"train_dynamics_causal.py",2818,24,"utputs[""token_logits""][:",python,selection_mouse +5092,11071634,"train_dynamics_causal.py",2818,25,"utputs[""token_logits""][:,",python,selection_mouse +5093,11071985,"train_dynamics_causal.py",2843,0,"",python,selection_mouse +5094,11072923,"train_dynamics_causal.py",2725,0,"",python,selection_mouse +5095,11073218,"train_dynamics_causal.py",2725,2,"ou",python,selection_mouse +5096,11073219,"train_dynamics_causal.py",2725,6,"output",python,selection_mouse +5097,11073219,"train_dynamics_causal.py",2725,9,"outputs[""",python,selection_mouse +5098,11073220,"train_dynamics_causal.py",2725,12,"outputs[""tok",python,selection_mouse +5099,11073220,"train_dynamics_causal.py",2725,15,"outputs[""token_",python,selection_mouse +5100,11073220,"train_dynamics_causal.py",2725,18,"outputs[""token_log",python,selection_mouse +5101,11073286,"train_dynamics_causal.py",2725,19,"outputs[""token_logi",python,selection_mouse +5102,11073287,"train_dynamics_causal.py",2725,22,"outputs[""token_logits""",python,selection_mouse +5103,11073322,"train_dynamics_causal.py",2725,23,"outputs[""token_logits""]",python,selection_mouse +5104,11074245,"train_dynamics_causal.py",2725,23,"",python,content +5105,11074312,"train_dynamics_causal.py",2724,0,"",python,selection_command +5106,11075399,"train_dynamics_causal.py",2740,0,"",python,selection_mouse +5107,11075645,"train_dynamics_causal.py",2740,4,"outp",python,selection_mouse +5108,11075645,"train_dynamics_causal.py",2740,9,"outputs[""",python,selection_mouse +5109,11075646,"train_dynamics_causal.py",2740,12,"outputs[""vid",python,selection_mouse +5110,11075646,"train_dynamics_causal.py",2740,15,"outputs[""video_",python,selection_mouse +5111,11075680,"train_dynamics_causal.py",2740,18,"outputs[""video_tok",python,selection_mouse +5112,11075718,"train_dynamics_causal.py",2740,22,"outputs[""video_tokens""",python,selection_mouse +5113,11075745,"train_dynamics_causal.py",2740,23,"outputs[""video_tokens""]",python,selection_mouse +5114,11076053,"train_dynamics_causal.py",2740,23,"",python,content +5115,11076093,"train_dynamics_causal.py",2739,0,"",python,selection_command +5116,11077465,"train_dynamics_causal.py",2771,0,"",python,selection_mouse +5117,11077822,"train_dynamics_causal.py",2771,1,"o",python,selection_mouse +5118,11077822,"train_dynamics_causal.py",2771,10,"outputs[""t",python,selection_mouse +5119,11077823,"train_dynamics_causal.py",2771,15,"outputs[""token_",python,selection_mouse +5120,11077833,"train_dynamics_causal.py",2771,18,"outputs[""token_log",python,selection_mouse +5121,11077834,"train_dynamics_causal.py",2771,21,"outputs[""token_logits",python,selection_mouse +5122,11077834,"train_dynamics_causal.py",2771,24,"outputs[""token_logits""][",python,selection_mouse +5123,11077834,"train_dynamics_causal.py",2771,26,"outputs[""token_logits""][:,",python,selection_mouse +5124,11077834,"train_dynamics_causal.py",2771,27,"outputs[""token_logits""][:, ",python,selection_mouse +5125,11077910,"train_dynamics_causal.py",2771,29,"outputs[""token_logits""][:, :,",python,selection_mouse +5126,11077910,"train_dynamics_causal.py",2771,31,"outputs[""token_logits""][:, :, :",python,selection_mouse +5127,11077936,"train_dynamics_causal.py",2771,32,"outputs[""token_logits""][:, :, :-",python,selection_mouse +5128,11077967,"train_dynamics_causal.py",2771,33,"outputs[""token_logits""][:, :, :-1",python,selection_mouse +5129,11078021,"train_dynamics_causal.py",2771,34,"outputs[""token_logits""][:, :, :-1]",python,selection_mouse +5130,11079182,"train_dynamics_causal.py",2771,34,"",python,content +5131,11079215,"train_dynamics_causal.py",2770,0,"",python,selection_command +5132,11079900,"train_dynamics_causal.py",2725,0,"",python,selection_mouse +5133,11079918,"train_dynamics_causal.py",2724,0,"",python,selection_command +5134,11080736,"train_dynamics_causal.py",2725,0,"outputs[""token_logits""][:, :, :-1]",python,content +5135,11080761,"train_dynamics_causal.py",2758,0,"",python,selection_command +5136,11082488,"train_dynamics_causal.py",2836,0,"",python,selection_mouse +5137,11082814,"train_dynamics_causal.py",2836,2,"ou",python,selection_mouse +5138,11082815,"train_dynamics_causal.py",2836,6,"output",python,selection_mouse +5139,11082815,"train_dynamics_causal.py",2836,71,"outputs[""video_tokens""][:, :, 1:]\n mask = outputs[""mask""][:, :, 1:] ",python,selection_mouse +5140,11083159,"train_dynamics_causal.py",2836,33,"outputs[""video_tokens""][:, :, 1:]",python,selection_mouse +5141,11083856,"train_dynamics_causal.py",2836,33,"",python,content +5142,11083890,"train_dynamics_causal.py",2835,0,"",python,selection_command +5143,11084602,"train_dynamics_causal.py",2774,0,"",python,selection_mouse +5144,11084625,"train_dynamics_causal.py",2773,0,"",python,selection_command +5145,11085325,"train_dynamics_causal.py",2774,0,"outputs[""video_tokens""][:, :, 1:]",python,content +5146,11085355,"train_dynamics_causal.py",2806,0,"",python,selection_command +5147,11085985,"train_dynamics_causal.py",2837,0,"",python,selection_command +5148,11086481,"train_dynamics_causal.py",2808,62,"",python,content +5149,11086541,"train_dynamics_causal.py",2812,0,"",python,selection_command +5150,11087832,"train_dynamics_causal.py",2829,0,"",python,selection_mouse +5151,11088408,"train_dynamics_causal.py",2824,0,"",python,selection_mouse +5152,11089219,"train_dynamics_causal.py",2767,0,"",python,selection_mouse +5153,11089720,"train_dynamics_causal.py",2814,0,"",python,selection_mouse +5154,11089868,"train_dynamics_causal.py",2812,4,"mask",python,selection_mouse +5155,11093016,"train_dynamics_causal.py",2629,0,"",python,selection_mouse +5156,11095486,"train_dynamics_causal.py",2643,0,"",python,selection_mouse +5157,11095671,"train_dynamics_causal.py",2642,7,"outputs",python,selection_mouse +5158,11096133,"train_dynamics_causal.py",2617,0,"",python,selection_mouse +5159,11096293,"train_dynamics_causal.py",2615,4,"mask",python,selection_mouse +5160,11098472,"train_dynamics_causal.py",2615,4,"",python,content +5161,11100161,"train_dynamics_causal.py",2611,23,"",python,content +5162,11100238,"train_dynamics_causal.py",2615,0,"",python,selection_command +5163,11101044,"train_dynamics_causal.py",2689,0,"",python,selection_command +5164,11101220,"train_dynamics_causal.py",2737,0,"",python,selection_command +5165,11101381,"train_dynamics_causal.py",2785,0,"",python,selection_command +5166,11101489,"train_dynamics_causal.py",2823,0,"",python,selection_command +5167,11101881,"train_dynamics_causal.py",2785,0,"",python,selection_command +5168,11102268,"train_dynamics_causal.py",2781,38,"",python,content +5169,11102354,"train_dynamics_causal.py",2785,0,"",python,selection_command +5170,11102355,"train_dynamics_causal.py",2737,0,"",python,selection_command +5171,11102462,"train_dynamics_causal.py",2689,0,"",python,selection_command +5172,11102616,"train_dynamics_causal.py",2615,0,"",python,selection_command +5173,11102740,"train_dynamics_causal.py",2609,0,"",python,selection_command +5174,11102946,"train_dynamics_causal.py",2610,0,"\n mask = outputs[""mask""][:, :, 1:] ",python,content +5175,11103013,"train_dynamics_causal.py",2615,0,"",python,selection_command +5176,11104678,"train_dynamics_causal.py",2611,38,"",python,content +5177,11104734,"train_dynamics_causal.py",2615,0,"",python,selection_command +5178,11105001,"train_dynamics_causal.py",2684,0,"\n mask = outputs[""mask""][:, :, 1:] ",python,content +5179,11105011,"train_dynamics_causal.py",2689,0,"",python,selection_command +5180,11106854,"train_dynamics_causal.py",2685,38,"",python,content +5181,11106896,"train_dynamics_causal.py",2689,0,"",python,selection_command +5182,11106977,"train_dynamics_causal.py",2737,0,"",python,selection_command +5183,11107359,"train_dynamics_causal.py",2780,0,"\n mask = outputs[""mask""][:, :, 1:] ",python,content +5184,11107408,"train_dynamics_causal.py",2785,0,"",python,selection_command +5185,11108618,"train_dynamics_causal.py",2737,0,"",python,selection_command +5186,11108803,"train_dynamics_causal.py",2689,0,"",python,selection_command +5187,11117282,"train_dynamics_causal.py",3629,0,"",python,selection_mouse +5188,11117296,"train_dynamics_causal.py",3628,0,"",python,selection_command +5189,11122154,"train_dynamics_causal.py",2763,0,"",python,selection_mouse +5190,11122287,"train_dynamics_causal.py",2756,12,"video_tokens",python,selection_mouse +5191,11215916,"train_dynamics_causal.py",4201,0,"",python,selection_mouse +5192,11215946,"train_dynamics_causal.py",4200,0,"",python,selection_command +5193,11216636,"train_dynamics_causal.py",3584,0,"",python,selection_mouse +5194,11333136,"train_dynamics_causal.py",3844,0,"",python,selection_mouse +5195,11333145,"train_dynamics_causal.py",3843,0,"",python,selection_command +5196,11582724,"TERMINAL",0,0,"bash",,terminal_focus +5197,11585566,"TERMINAL",0,0,"srun",,terminal_focus +5198,11588214,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5199,11588276,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5200,11588338,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5201,11588509,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5202,11588642,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5203,11588703,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5204,11588765,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5205,11588953,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5206,11589014,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5207,11589074,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5208,11589283,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5209,11589432,"TERMINAL",0,0,"On branch causal-transformer-dynamics-model\r\nChanges not staged for commit:\r\n (use ""git add/rm ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\tdeleted: train_dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\ttrain_dynamics_causal.py\r\n\ttrain_dynamics_maskgit.py\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5210,11592108,"train_dynamics_maskgit.py",0,0,"",python,tab +5211,11593162,"train_dynamics_maskgit.py",2750,0,"",python,selection_mouse +5212,11595529,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5213,11595682,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5214,11595810,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5215,11595915,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5216,11596186,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5217,11597809,"TERMINAL",0,0,"[?25ld[?25h[?25li[?25h",,terminal_output +5218,11598438,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5219,11598590,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5220,11598651,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5221,11599484,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5222,11599715,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +5223,11599828,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5224,11600387,"TERMINAL",0,0,"",,terminal_output +5225,11601600,"TERMINAL",0,0,"[?25lT[?25h",,terminal_output +5226,11601676,"TERMINAL",0,0,"[?25lR[?25h",,terminal_output +5227,11601807,"TERMINAL",0,0,"",,terminal_output +5228,11603346,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5229,11603408,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5230,11603590,"TERMINAL",0,0,"ain_",,terminal_output +5231,11604308,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5232,11604543,"TERMINAL",0,0,"ynamics_",,terminal_output +5233,11606829,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5234,11606936,"TERMINAL",0,0,"askgit.py ",,terminal_output +5235,11607833,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5236,11608279,"TERMINAL",0,0,"",,terminal_output +5237,11609108,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5238,11609319,"TERMINAL",0,0,"ain_",,terminal_output +5239,11609654,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5240,11609806,"TERMINAL",0,0,"ynamics_",,terminal_output +5241,11610374,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5242,11610560,"TERMINAL",0,0,"ausal.py ",,terminal_output +5243,11611316,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +5244,11611489,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5245,11612101,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5246,11612161,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5247,11612476,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5248,11612716,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5249,11612979,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5250,11614339,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5251,11614523,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5252,11614584,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5253,11615068,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5254,11624271,"diff.log",0,0,"71,72c71,72\n< name: str = ""train_dynamics_maskgit""\n< tags: list[str] = field(default_factory=lambda: [""dynamics"", ""maskgit""])\n---\n> name: str = ""train_dynamics_causal""\n> tags: list[str] = field(default_factory=lambda: [""dynamics"", ""causal""])\n94d93\n< mask = outputs[""mask""]\n95a95,97\n> logits = outputs[""token_logits""][:, :, :-1]\n> targets = outputs[""video_tokens""][:, :, 1:]\n> mask = outputs[""mask""][:, :, 1:] \n97c99\n< outputs[""token_logits""], outputs[""video_tokens""]\n---\n> logits, targets\n100c102\n< acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n---\n> acc = logits.argmax(-1) == targets\n102c104\n< select_probs = jax.nn.softmax(outputs[""token_logits""])\n---\n> select_probs = jax.nn.softmax(logits)\n118c120\n< select_logit=outputs[""token_logits""].max(-1).mean(),\n---\n> select_logit=logits.max(-1).mean(),\n",log,tab +5255,11625933,"diff.log",301,0,"",log,selection_mouse +5256,11625934,"diff.log",300,0,"",log,selection_command +5257,11626418,"diff.log",89,0,"",log,selection_mouse +5258,11722807,"diff.log",838,0,"",log,selection_mouse +5259,11723343,"diff.log",855,0,"",log,selection_mouse +5260,11723360,"diff.log",854,0,"",log,selection_command +5261,11723474,"diff.log",855,0,"",log,selection_mouse +5262,11723478,"diff.log",854,0,"",log,selection_command +5263,11723631,"diff.log",852,4,"---\n",log,selection_mouse +5264,11723644,"diff.log",853,3,"--\n",log,selection_command +5265,11724118,"diff.log",901,0,"",log,selection_mouse +5266,11724147,"diff.log",900,0,"",log,selection_command +5267,11724250,"diff.log",901,0,"",log,selection_mouse +5268,11724265,"diff.log",900,0,"",log,selection_command +5269,11724684,"diff.log",901,0,"",log,selection_mouse +5270,11724685,"diff.log",900,0,"",log,selection_command +5271,11724998,"diff.log",901,0,"",log,selection_mouse +5272,11725003,"diff.log",900,0,"",log,selection_command +5273,11725326,"diff.log",901,0,"",log,selection_mouse +5274,11725328,"diff.log",900,0,"",log,selection_command +5275,11725713,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5276,11725715,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5277,11725856,"diff.log",901,0,"",log,selection_mouse +5278,11725872,"diff.log",900,0,"",log,selection_command +5279,11726198,"diff.log",901,0,"",log,selection_mouse +5280,11726213,"diff.log",900,0,"",log,selection_command +5281,11726370,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5282,11726386,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5283,11729145,"diff.log",901,0,"",log,selection_mouse +5284,11729153,"diff.log",900,0,"",log,selection_command +5285,11729541,"diff.log",901,0,"",log,selection_mouse +5286,11729556,"diff.log",900,0,"",log,selection_command +5287,11729708,"diff.log",901,0,"",log,selection_mouse +5288,11729715,"diff.log",900,0,"",log,selection_command +5289,11729946,"diff.log",901,0,"",log,selection_mouse +5290,11729947,"diff.log",900,0,"",log,selection_command +5291,11730339,"diff.log",901,0,"",log,selection_mouse +5292,11730341,"diff.log",900,0,"",log,selection_command +5293,11730487,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5294,11730491,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5295,11730666,"diff.log",901,0,"",log,selection_mouse +5296,11730700,"diff.log",900,0,"",log,selection_command +5297,11731046,"diff.log",901,0,"",log,selection_mouse +5298,11731049,"diff.log",900,0,"",log,selection_command +5299,11731204,"diff.log",901,0,"",log,selection_mouse +5300,11731211,"diff.log",900,0,"",log,selection_command +5301,11731449,"diff.log",901,0,"",log,selection_mouse +5302,11731461,"diff.log",900,0,"",log,selection_command +5303,11731797,"diff.log",901,0,"",log,selection_mouse +5304,11731812,"diff.log",900,0,"",log,selection_command +5305,11731963,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5306,11731965,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5307,11732154,"diff.log",901,0,"",log,selection_mouse +5308,11732162,"diff.log",900,0,"",log,selection_command +5309,11732464,"diff.log",901,0,"",log,selection_mouse +5310,11732476,"diff.log",900,0,"",log,selection_command +5311,11732612,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5312,11732640,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5313,11732781,"diff.log",901,0,"",log,selection_mouse +5314,11732811,"diff.log",900,0,"",log,selection_command +5315,11733114,"diff.log",901,0,"",log,selection_mouse +5316,11733127,"diff.log",900,0,"",log,selection_command +5317,11733265,"diff.log",856,46,"> select_logit=logits.max(-1).mean(),\n",log,selection_mouse +5318,11733279,"diff.log",857,45," select_logit=logits.max(-1).mean(),\n",log,selection_command +5319,11733877,"diff.log",901,0,"",log,selection_mouse +5320,11733882,"diff.log",900,0,"",log,selection_command +5321,11734689,"train_dynamics_causal.py",0,0,"",python,tab +5322,11734689,"train_dynamics_causal.py",4153,0,"",python,selection_mouse +5323,11734723,"train_dynamics_causal.py",4152,0,"",python,selection_command +5324,11735275,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5325,11735294,"train_dynamics_causal.py",4021,0,"",python,selection_command +5326,11735856,"train_dynamics_causal.py",4041,0,"",python,selection_mouse +5327,11735858,"train_dynamics_causal.py",4040,0,"",python,selection_command +5328,11736852,"train_dynamics_causal.py",4041,0,"",python,selection_mouse +5329,11736853,"train_dynamics_causal.py",4040,0,"",python,selection_command +5330,11737508,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5331,11737508,"train_dynamics_causal.py",4021,0,"",python,selection_command +5332,11737976,"train_dynamics_causal.py",4041,0,"",python,selection_mouse +5333,11737976,"train_dynamics_causal.py",4040,0,"",python,selection_command +5334,11738507,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5335,11738508,"train_dynamics_causal.py",4021,0,"",python,selection_command +5336,11739031,"train_dynamics_causal.py",4041,0,"",python,selection_mouse +5337,11739044,"train_dynamics_causal.py",4040,0,"",python,selection_command +5338,11739572,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5339,11739581,"train_dynamics_causal.py",4021,0,"",python,selection_command +5340,11740133,"train_dynamics_causal.py",4088,0,"",python,selection_mouse +5341,11740136,"train_dynamics_causal.py",4087,0,"",python,selection_command +5342,11740712,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5343,11740721,"train_dynamics_causal.py",4021,0,"",python,selection_command +5344,11741266,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5345,11741271,"train_dynamics_causal.py",4021,0,"",python,selection_command +5346,11741920,"train_dynamics_causal.py",4088,0,"",python,selection_mouse +5347,11741928,"train_dynamics_causal.py",4087,0,"",python,selection_command +5348,11742502,"train_dynamics_causal.py",4022,0,"",python,selection_mouse +5349,11742519,"train_dynamics_causal.py",4021,0,"",python,selection_command +5350,11743010,"train_dynamics_causal.py",4088,0,"",python,selection_mouse +5351,11743032,"train_dynamics_causal.py",4087,0,"",python,selection_command +5352,11743764,"train_dynamics_causal.py",3733,0,"",python,selection_mouse +5353,11745994,"diff.log",0,0,"",log,tab +5354,11745997,"diff.log",902,0,"",log,selection_mouse +5355,11746684,"diff.log",844,0,"",log,selection_mouse +5356,11747227,"diff.log",901,0,"",log,selection_mouse +5357,11747229,"diff.log",900,0,"",log,selection_command +5358,11954078,"diff.log",902,0,"",log,selection_mouse +5359,11957673,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5360,11957742,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5361,11957845,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5362,11957906,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5363,11958132,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5364,11958193,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5365,11958335,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5366,11958507,"TERMINAL",0,0,"[?25lf[?25h[?25l [?25h",,terminal_output +5367,11958689,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5368,11958803,"TERMINAL",0,0,"[?25la[?25h[?25li[?25h",,terminal_output +5369,11958864,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5370,11958976,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5371,11959372,"TERMINAL",0,0,"[?25l>[?25h",,terminal_output +5372,11959559,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5373,11961197,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5374,11961399,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5375,11961463,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5376,11961681,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5377,11962596,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5378,11962806,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5379,11962872,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5380,11962933,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5381,11963189,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5382,11965579,"diff.log",0,0,"",log,tab +5383,11965791,"diff.log",0,902,"diff --git a/genie.py b/genie.py\nindex 0e66676..8186d03 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -7,7 +7,7 @@ import flax.linen as nn\n from flax.training.train_state import TrainState\n import orbax.checkpoint as ocp\n \n-from models.dynamics import DynamicsMaskGIT\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\n from models.lam import LatentActionModel\n from models.tokenizer import TokenizerVQVAE\n \n@@ -38,6 +38,7 @@ class Genie(nn.Module):\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n+ use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n- self.dynamics = DynamicsMaskGIT(\n- model_dim=self.dyna_dim,\n- num_latents=self.num_patch_latents,\n- num_blocks=self.dyna_num_blocks,\n- num_heads=self.dyna_num_heads,\n- dropout=self.dropout,\n- mask_limit=self.mask_limit,\n- param_dtype=self.param_dtype,\n- dtype=self.dtype,\n- )\n+\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\ndiff --git a/models/dynamics.py b/models/dynamics.py\nindex 8b183dc..76af7a1 100644\n--- a/models/dynamics.py\n+++ b/models/dynamics.py\n@@ -28,6 +28,7 @@ class DynamicsMaskGIT(nn.Module):\n self.dropout,\n self.param_dtype,\n self.dtype,\n+ spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n@@ -58,3 +59,41 @@ class DynamicsMaskGIT(nn.Module):\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n+\n+\n+class DynamicsAutoregressive(nn.Module):\n+ """"""Autoregressive (causal) dynamics model""""""\n+\n+ model_dim: int\n+ num_latents: int\n+ num_blocks: int\n+ num_heads: int\n+ dropout: float\n+ param_dtype: jnp.dtype\n+ dtype: jnp.dtype\n+\n+ def setup(self):\n+ self.dynamics = STTransformer(\n+ self.model_dim,\n+ self.num_latents,\n+ self.num_blocks,\n+ self.num_heads,\n+ self.dropout,\n+ self.param_dtype,\n+ self.dtype,\n+ spacial_bert=False,\n+ )\n+ self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n+ self.action_up = nn.Dense(\n+ self.model_dim,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n+\n+ def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n+ vid_embed = self.patch_embed(batch[""video_tokens""])\n+ act_embed = self.action_up(batch[""latent_actions""])\n+ vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n+ logits = self.dynamics(vid_embed)\n+ mask = jnp.ones(vid_embed.shape[:-1])\n+ return dict(token_logits=logits, mask=mask)\n\ No newline at end of file\ndiff --git a/train_dynamics.py b/train_dynamics.py\ndeleted file mode 100644\nindex a8e6a2a..0000000\n--- a/train_dynamics.py\n+++ /dev/null\n@@ -1,373 +0,0 @@\n-from dataclasses import dataclass, field\n-import os\n-\n-import einops\n-from flax.training.train_state import TrainState\n-from jax.sharding import Mesh, PartitionSpec, NamedSharding\n-from jax.experimental.mesh_utils import create_device_mesh\n-import optax\n-import orbax.checkpoint as ocp\n-import numpy as np\n-import dm_pix as pix\n-import jax\n-import jax.numpy as jnp\n-import tyro\n-import wandb\n-import grain\n-\n-from genie import Genie, restore_genie_components\n-from utils.dataloader import get_dataloader\n-from utils.lr_utils import get_lr_schedule\n-from utils.parameter_utils import count_parameters_by_component\n-\n-@dataclass\n-class Args:\n- # Experiment\n- num_steps: int = 200_000\n- seed: int = 0\n- seq_len: int = 16\n- image_channels: int = 3\n- image_height: int = 90\n- image_width: int = 160\n- data_dir: str = """"\n- save_ckpt: bool = False\n- restore_ckpt: bool = False\n- # Optimization\n- batch_size: int = 36\n- init_lr: float = 0.0\n- max_lr: float = 3e-5\n- decay_end: float = 0.0\n- wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n- warmup_steps: int = 5000\n- lr_schedule : str = ""wsd"" # supported options: wsd, cos\n- # Tokenizer\n- tokenizer_dim: int = 512\n- latent_patch_dim: int = 32\n- num_patch_latents: int = 1024\n- patch_size: int = 4\n- tokenizer_num_blocks: int = 8\n- tokenizer_num_heads: int = 8\n- tokenizer_checkpoint: str = """"\n- # LAM\n- lam_dim: int = 512\n- latent_action_dim: int = 32\n- num_latent_actions: int = 6\n- lam_patch_size: int = 16\n- lam_num_blocks: int = 8\n- lam_num_heads: int = 8\n- lam_checkpoint: str = """"\n- # Dynamics\n- dyna_dim: int = 512\n- dyna_num_blocks: int = 12\n- dyna_num_heads: int = 8\n- dropout: float = 0.0\n- mask_limit: float = 0.5\n- param_dtype: jnp.dtype = jnp.float32\n- dtype: jnp.dtype = jnp.bfloat16\n- # Logging\n- log: bool = False\n- entity: str = """"\n- project: str = """"\n- name: str = ""train_dynamics""\n- tags: list[str] = field(default_factory=lambda: [""dynamics""])\n- log_interval: int = 5\n- log_image_interval: int = 250\n- ckpt_dir: str = """"\n- log_checkpoint_interval: int = 25000\n- log_checkpoint_keep_period: int = 20000\n- log_gradients: bool = False\n- wandb_id: str = """"\n-\n-\n-args = tyro.cli(Args)\n-\n-\n-def dynamics_loss_fn(params, state, inputs):\n- """"""Compute masked dynamics loss""""""\n- inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n- outputs = state.apply_fn(\n- params,\n- inputs,\n- training=True,\n- rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n- )\n- mask = outputs[""mask""]\n- outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n- ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n- outputs[""token_logits""], outputs[""video_tokens""]\n- )\n- ce_loss = (mask * ce_loss).sum() / mask.sum()\n- acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n- acc = (mask * acc).sum() / mask.sum()\n- select_probs = jax.nn.softmax(outputs[""token_logits""])\n- gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n- recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n- psnr = pix.psnr(gt, recon).mean() # type: ignore\n- ssim = pix.ssim(gt, recon).mean() # type: ignore\n- _, index_counts_lam = jnp.unique_counts(\n- jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n- )\n- _, index_counts_tokenizer = jnp.unique_counts(\n- jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n- )\n- codebook_usage_lam = (index_counts_lam != 0).mean()\n- codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n- metrics = dict(\n- cross_entropy_loss=ce_loss,\n- masked_token_accuracy=acc,\n- select_logit=outputs[""token_logits""].max(-1).mean(),\n- select_p=select_probs.max(-1).mean(),\n- entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n- psnr=psnr,\n- ssim=ssim,\n- codebook_usage_lam=codebook_usage_lam,\n- codebook_usage_tokenizer=codebook_usage_tokenizer,\n- )\n- return ce_loss, (outputs[""recon""], metrics)\n-\n-\n-@jax.jit\n-def train_step(state, inputs):\n- """"""Update state and compute metrics""""""\n- grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n- (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n- state = state.apply_gradients(grads=grads)\n- if args.log_gradients:\n- metrics[""gradients_std/""] = jax.tree.map(\n- lambda x: x.std(), grads[""params""][""dynamics""]\n- )\n- return state, loss, recon, metrics\n-\n-\n-if __name__ == ""__main__"":\n- jax.distributed.initialize()\n- num_devices = jax.device_count()\n- if num_devices == 0:\n- raise ValueError(""No JAX devices found."")\n- print(f""Running on {num_devices} devices."")\n-\n- if args.batch_size % num_devices != 0:\n- raise ValueError(\n- f""Global batch size {args.batch_size} must be divisible by ""\n- f""number of devices {num_devices}.""\n- )\n-\n- per_device_batch_size_for_init = args.batch_size // num_devices\n-\n- rng = jax.random.PRNGKey(args.seed)\n-\n- # --- Initialize model ---\n- genie = Genie(\n- # Tokenizer\n- in_dim=args.image_channels,\n- tokenizer_dim=args.tokenizer_dim,\n- latent_patch_dim=args.latent_patch_dim,\n- num_patch_latents=args.num_patch_latents,\n- patch_size=args.patch_size,\n- tokenizer_num_blocks=args.tokenizer_num_blocks,\n- tokenizer_num_heads=args.tokenizer_num_heads,\n- # LAM\n- lam_dim=args.lam_dim,\n- latent_action_dim=args.latent_action_dim,\n- num_latent_actions=args.num_latent_actions,\n- lam_patch_size=args.lam_patch_size,\n- lam_num_blocks=args.lam_num_blocks,\n- lam_num_heads=args.lam_num_heads,\n- lam_co_train=not args.lam_checkpoint,\n- # Dynamics\n- dyna_dim=args.dyna_dim,\n- dyna_num_blocks=args.dyna_num_blocks,\n- dyna_num_heads=args.dyna_num_heads,\n- dropout=args.dropout,\n- mask_limit=args.mask_limit,\n- param_dtype=args.param_dtype,\n- dtype=args.dtype,\n- )\n- rng, _rng = jax.random.split(rng)\n- image_shape = (args.image_height, args.image_width, args.image_channels)\n- dummy_inputs = dict(\n- videos=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len, *image_shape),\n- dtype=args.dtype,\n- ),\n- action=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n- ),\n- mask_rng=_rng,\n- )\n- rng, _rng = jax.random.split(rng)\n- init_params = genie.init(_rng, dummy_inputs)\n-\n- param_counts = count_parameters_by_component(init_params)\n-\n- if args.log and jax.process_index() == 0:\n- wandb_init_kwargs = {\n- ""entity"": args.entity,\n- ""project"": args.project,\n- ""name"": args.name,\n- ""tags"": args.tags,\n- ""group"": ""debug"",\n- ""config"": args,\n- }\n-\n- if args.wandb_id:\n- wandb_init_kwargs.update(\n- {\n- ""id"": args.wandb_id,\n- ""resume"": ""allow"",\n- }\n- )\n- wandb.init(**wandb_init_kwargs)\n-\n- wandb.config.update({""model_param_count"": param_counts})\n-\n- print(""Parameter counts:"")\n- print(param_counts)\n-\n- # --- Initialize optimizer ---\n- lr_schedule = get_lr_schedule(args.lr_schedule, \n- args.init_lr, \n- args.max_lr, \n- args.decay_end, \n- args.num_steps, \n- args.warmup_steps, \n- args.wsd_decay_steps)\n- tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n- train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n-\n- device_mesh_arr = create_device_mesh((num_devices,))\n- mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n-\n- replicated_sharding = NamedSharding(mesh, PartitionSpec())\n- videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n- train_state = jax.device_put(train_state, replicated_sharding)\n-\n- # --- Initialize checkpoint manager ---\n- step = 0\n- handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n-\n- checkpoint_options = ocp.CheckpointManagerOptions(\n- save_interval_steps=args.log_checkpoint_interval,\n- max_to_keep=3,\n- keep_period=args.log_checkpoint_keep_period,\n- step_format_fixed_length=6,\n- cleanup_tmp_directories=True,\n- )\n-\n- checkpoint_manager = ocp.CheckpointManager(\n- args.ckpt_dir,\n- options=checkpoint_options,\n- handler_registry=handler_registry,\n- )\n-\n- # --- Create DataLoaderIterator from dataloader ---\n- array_record_files = [\n- os.path.join(args.data_dir, x)\n- for x in os.listdir(args.data_dir)\n- if x.endswith("".array_record"")\n- ]\n- grain_dataloader = get_dataloader(\n- array_record_files,\n- args.seq_len,\n- # NOTE: We deliberately pass the global batch size\n- # The dataloader shards the dataset across all processes\n- args.batch_size,\n- *image_shape,\n- num_workers=8,\n- prefetch_buffer_size=1,\n- seed=args.seed,\n- )\n- initial_state = grain_dataloader._create_initial_state()\n- grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n-\n- # --- Restore checkpoint ---\n- if args.restore_ckpt:\n- # Restore full dynamics model\n- abstract_train_state = jax.tree_util.tree_map(\n- ocp.utils.to_shape_dtype_struct, train_state\n- )\n- restored = checkpoint_manager.restore(\n- checkpoint_manager.latest_step(),\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardRestore(abstract_train_state),\n- dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n- ),\n- )\n- train_state = restored[""model_state""]\n- grain_iterator = restored[""dataloader_state""]\n- step = checkpoint_manager.latest_step() or 0\n- print(f""Restored dataloader and model state from step {step}"")\n- else:\n- # Restore from pre-trained tokenizer (and LAM)\n- train_state = restore_genie_components(\n- train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n- )\n-\n- # --- TRAIN LOOP ---\n- dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n- while step < args.num_steps:\n- for videos in dataloader:\n- # --- Train step ---\n- rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n-\n- inputs = dict(\n- videos=videos,\n- rng=_rng,\n- dropout_rng=_rng_dropout,\n- mask_rng=_rng_mask,\n- )\n- train_state, loss, recon, metrics = train_step(train_state, inputs)\n- metrics[""lr""] = lr_schedule(step)\n- print(f""Step {step}, loss: {loss}"")\n- step += 1\n-\n- # --- Logging ---\n- if args.log:\n- if step % args.log_interval == 0 and jax.process_index() == 0:\n- wandb.log(\n- {\n- ""loss"": loss,\n- ""step"": step,\n- **metrics,\n- }\n- )\n- if step % args.log_image_interval == 0:\n- gt_seq = inputs[""videos""][0]\n- recon_seq = recon[0].clip(0, 1)\n- comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n- comparison_seq = einops.rearrange(\n- comparison_seq * 255, ""t h w c -> h (t w) c""\n- )\n- if jax.process_index() == 0:\n- log_images = dict(\n- image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n- recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n- true_vs_recon=wandb.Image(\n- np.asarray(comparison_seq.astype(np.uint8))\n- ),\n- )\n- wandb.log(log_images)\n- # --- Checkpointing ---\n- if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n- checkpoint_manager.save(\n- step,\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardSave(train_state),\n- dataloader_state=grain.checkpoint.CheckpointSave(\n- grain_iterator\n- ),\n- ),\n- )\n- print(f""Saved checkpoint at step {step}"")\n- if step >= args.num_steps:\n- break\n-\n- checkpoint_manager.close()\ndiff --git a/utils/nn.py b/utils/nn.py\nindex b7bec9f..3b64fa0 100644\n--- a/utils/nn.py\n+++ b/utils/nn.py\n@@ -26,6 +26,112 @@ class PositionalEncoding(nn.Module):\n x = x + self.pe[: x.shape[2]]\n return x\n \n+# class STBlock2(nn.Module):\n+ # dim: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.remat\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # --- Spatial attention ---\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+\n+ # # --- Temporal attention ---\n+ # x = x.swapaxes(1, 2)\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+ # x = x.swapaxes(1, 2)\n+\n+ # # --- Feedforward ---\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n+ # z = nn.Dense(\n+ # self.dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # z = nn.gelu(z)\n+ # x = x + z\n+\n+ # return x\n+\n+# class CausalTransformer(nn.Module):\n+ # model_dim: int\n+ # out_dim: int\n+ # num_blocks: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # Input projection and normalization\n+ # x = nn.Sequential(\n+ # [\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.Dense(self.model_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # ]\n+ # )(x)\n+ # # Causal transformer blocks\n+ # for _ in range(self.num_blocks):\n+ # x = STBlock2(\n+ # dim=self.model_dim,\n+ # num_heads=self.num_heads,\n+ # dropout=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+\n+ # # Output projection\n+ # x = nn.Dense(\n+ # self.out_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # return x # (B, T, E)\n+\n \n class STBlock(nn.Module):\n dim: int\n@@ -33,6 +139,7 @@ class STBlock(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.remat\n @nn.compact\n@@ -43,13 +150,14 @@ class STBlock(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n+ spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n- )(z)\n+ )(z, mask=spacial_mask)\n x = x + z\n \n # --- Temporal attention ---\n@@ -95,6 +203,7 @@ class STTransformer(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n@@ -121,6 +230,7 @@ class STTransformer(nn.Module):\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n+ spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n",log,content +5384,11967265,"diff.log",401,0,"",log,selection_mouse +5385,11967870,"diff.log",208,0,"",log,selection_mouse +5386,11968552,"diff.log",409,0,"",log,selection_mouse +5387,11978146,"diff.log",309,0,"",log,selection_mouse +5388,11978294,"diff.log",308,22,"DynamicsAutoregressive",log,selection_mouse +5389,11979383,"diff.log",367,0,"",log,selection_mouse +5390,11979943,"diff.log",301,0,"",log,selection_mouse +5391,11980130,"diff.log",291,15,"DynamicsMaskGIT",log,selection_mouse +5392,12081059,"TERMINAL",0,0,"git diff main > diff.log",,terminal_output +5393,12082338,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5394,12082446,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5395,12082510,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5396,12082691,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5397,12082796,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5398,12086952,"diff.diff",0,0,"diff --git a/genie.py b/genie.py\nindex 0e66676..8186d03 100644\n--- a/genie.py\n+++ b/genie.py\n@@ -7,7 +7,7 @@ import flax.linen as nn\n from flax.training.train_state import TrainState\n import orbax.checkpoint as ocp\n \n-from models.dynamics import DynamicsMaskGIT\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\n from models.lam import LatentActionModel\n from models.tokenizer import TokenizerVQVAE\n \n@@ -38,6 +38,7 @@ class Genie(nn.Module):\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n+ use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n@@ -70,16 +71,28 @@ class Genie(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n- self.dynamics = DynamicsMaskGIT(\n- model_dim=self.dyna_dim,\n- num_latents=self.num_patch_latents,\n- num_blocks=self.dyna_num_blocks,\n- num_heads=self.dyna_num_heads,\n- dropout=self.dropout,\n- mask_limit=self.mask_limit,\n- param_dtype=self.param_dtype,\n- dtype=self.dtype,\n- )\n+\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\ndiff --git a/models/dynamics.py b/models/dynamics.py\nindex 8b183dc..76af7a1 100644\n--- a/models/dynamics.py\n+++ b/models/dynamics.py\n@@ -28,6 +28,7 @@ class DynamicsMaskGIT(nn.Module):\n self.dropout,\n self.param_dtype,\n self.dtype,\n+ spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n@@ -58,3 +59,41 @@ class DynamicsMaskGIT(nn.Module):\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n+\n+\n+class DynamicsAutoregressive(nn.Module):\n+ """"""Autoregressive (causal) dynamics model""""""\n+\n+ model_dim: int\n+ num_latents: int\n+ num_blocks: int\n+ num_heads: int\n+ dropout: float\n+ param_dtype: jnp.dtype\n+ dtype: jnp.dtype\n+\n+ def setup(self):\n+ self.dynamics = STTransformer(\n+ self.model_dim,\n+ self.num_latents,\n+ self.num_blocks,\n+ self.num_heads,\n+ self.dropout,\n+ self.param_dtype,\n+ self.dtype,\n+ spacial_bert=False,\n+ )\n+ self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n+ self.action_up = nn.Dense(\n+ self.model_dim,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n+\n+ def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n+ vid_embed = self.patch_embed(batch[""video_tokens""])\n+ act_embed = self.action_up(batch[""latent_actions""])\n+ vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n+ logits = self.dynamics(vid_embed)\n+ mask = jnp.ones(vid_embed.shape[:-1])\n+ return dict(token_logits=logits, mask=mask)\n\ No newline at end of file\ndiff --git a/train_dynamics.py b/train_dynamics.py\ndeleted file mode 100644\nindex a8e6a2a..0000000\n--- a/train_dynamics.py\n+++ /dev/null\n@@ -1,373 +0,0 @@\n-from dataclasses import dataclass, field\n-import os\n-\n-import einops\n-from flax.training.train_state import TrainState\n-from jax.sharding import Mesh, PartitionSpec, NamedSharding\n-from jax.experimental.mesh_utils import create_device_mesh\n-import optax\n-import orbax.checkpoint as ocp\n-import numpy as np\n-import dm_pix as pix\n-import jax\n-import jax.numpy as jnp\n-import tyro\n-import wandb\n-import grain\n-\n-from genie import Genie, restore_genie_components\n-from utils.dataloader import get_dataloader\n-from utils.lr_utils import get_lr_schedule\n-from utils.parameter_utils import count_parameters_by_component\n-\n-@dataclass\n-class Args:\n- # Experiment\n- num_steps: int = 200_000\n- seed: int = 0\n- seq_len: int = 16\n- image_channels: int = 3\n- image_height: int = 90\n- image_width: int = 160\n- data_dir: str = """"\n- save_ckpt: bool = False\n- restore_ckpt: bool = False\n- # Optimization\n- batch_size: int = 36\n- init_lr: float = 0.0\n- max_lr: float = 3e-5\n- decay_end: float = 0.0\n- wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n- warmup_steps: int = 5000\n- lr_schedule : str = ""wsd"" # supported options: wsd, cos\n- # Tokenizer\n- tokenizer_dim: int = 512\n- latent_patch_dim: int = 32\n- num_patch_latents: int = 1024\n- patch_size: int = 4\n- tokenizer_num_blocks: int = 8\n- tokenizer_num_heads: int = 8\n- tokenizer_checkpoint: str = """"\n- # LAM\n- lam_dim: int = 512\n- latent_action_dim: int = 32\n- num_latent_actions: int = 6\n- lam_patch_size: int = 16\n- lam_num_blocks: int = 8\n- lam_num_heads: int = 8\n- lam_checkpoint: str = """"\n- # Dynamics\n- dyna_dim: int = 512\n- dyna_num_blocks: int = 12\n- dyna_num_heads: int = 8\n- dropout: float = 0.0\n- mask_limit: float = 0.5\n- param_dtype: jnp.dtype = jnp.float32\n- dtype: jnp.dtype = jnp.bfloat16\n- # Logging\n- log: bool = False\n- entity: str = """"\n- project: str = """"\n- name: str = ""train_dynamics""\n- tags: list[str] = field(default_factory=lambda: [""dynamics""])\n- log_interval: int = 5\n- log_image_interval: int = 250\n- ckpt_dir: str = """"\n- log_checkpoint_interval: int = 25000\n- log_checkpoint_keep_period: int = 20000\n- log_gradients: bool = False\n- wandb_id: str = """"\n-\n-\n-args = tyro.cli(Args)\n-\n-\n-def dynamics_loss_fn(params, state, inputs):\n- """"""Compute masked dynamics loss""""""\n- inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n- outputs = state.apply_fn(\n- params,\n- inputs,\n- training=True,\n- rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n- )\n- mask = outputs[""mask""]\n- outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n- ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n- outputs[""token_logits""], outputs[""video_tokens""]\n- )\n- ce_loss = (mask * ce_loss).sum() / mask.sum()\n- acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n- acc = (mask * acc).sum() / mask.sum()\n- select_probs = jax.nn.softmax(outputs[""token_logits""])\n- gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n- recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n- psnr = pix.psnr(gt, recon).mean() # type: ignore\n- ssim = pix.ssim(gt, recon).mean() # type: ignore\n- _, index_counts_lam = jnp.unique_counts(\n- jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n- )\n- _, index_counts_tokenizer = jnp.unique_counts(\n- jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n- )\n- codebook_usage_lam = (index_counts_lam != 0).mean()\n- codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n- metrics = dict(\n- cross_entropy_loss=ce_loss,\n- masked_token_accuracy=acc,\n- select_logit=outputs[""token_logits""].max(-1).mean(),\n- select_p=select_probs.max(-1).mean(),\n- entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n- psnr=psnr,\n- ssim=ssim,\n- codebook_usage_lam=codebook_usage_lam,\n- codebook_usage_tokenizer=codebook_usage_tokenizer,\n- )\n- return ce_loss, (outputs[""recon""], metrics)\n-\n-\n-@jax.jit\n-def train_step(state, inputs):\n- """"""Update state and compute metrics""""""\n- grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n- (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n- state = state.apply_gradients(grads=grads)\n- if args.log_gradients:\n- metrics[""gradients_std/""] = jax.tree.map(\n- lambda x: x.std(), grads[""params""][""dynamics""]\n- )\n- return state, loss, recon, metrics\n-\n-\n-if __name__ == ""__main__"":\n- jax.distributed.initialize()\n- num_devices = jax.device_count()\n- if num_devices == 0:\n- raise ValueError(""No JAX devices found."")\n- print(f""Running on {num_devices} devices."")\n-\n- if args.batch_size % num_devices != 0:\n- raise ValueError(\n- f""Global batch size {args.batch_size} must be divisible by ""\n- f""number of devices {num_devices}.""\n- )\n-\n- per_device_batch_size_for_init = args.batch_size // num_devices\n-\n- rng = jax.random.PRNGKey(args.seed)\n-\n- # --- Initialize model ---\n- genie = Genie(\n- # Tokenizer\n- in_dim=args.image_channels,\n- tokenizer_dim=args.tokenizer_dim,\n- latent_patch_dim=args.latent_patch_dim,\n- num_patch_latents=args.num_patch_latents,\n- patch_size=args.patch_size,\n- tokenizer_num_blocks=args.tokenizer_num_blocks,\n- tokenizer_num_heads=args.tokenizer_num_heads,\n- # LAM\n- lam_dim=args.lam_dim,\n- latent_action_dim=args.latent_action_dim,\n- num_latent_actions=args.num_latent_actions,\n- lam_patch_size=args.lam_patch_size,\n- lam_num_blocks=args.lam_num_blocks,\n- lam_num_heads=args.lam_num_heads,\n- lam_co_train=not args.lam_checkpoint,\n- # Dynamics\n- dyna_dim=args.dyna_dim,\n- dyna_num_blocks=args.dyna_num_blocks,\n- dyna_num_heads=args.dyna_num_heads,\n- dropout=args.dropout,\n- mask_limit=args.mask_limit,\n- param_dtype=args.param_dtype,\n- dtype=args.dtype,\n- )\n- rng, _rng = jax.random.split(rng)\n- image_shape = (args.image_height, args.image_width, args.image_channels)\n- dummy_inputs = dict(\n- videos=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len, *image_shape),\n- dtype=args.dtype,\n- ),\n- action=jnp.zeros(\n- (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n- ),\n- mask_rng=_rng,\n- )\n- rng, _rng = jax.random.split(rng)\n- init_params = genie.init(_rng, dummy_inputs)\n-\n- param_counts = count_parameters_by_component(init_params)\n-\n- if args.log and jax.process_index() == 0:\n- wandb_init_kwargs = {\n- ""entity"": args.entity,\n- ""project"": args.project,\n- ""name"": args.name,\n- ""tags"": args.tags,\n- ""group"": ""debug"",\n- ""config"": args,\n- }\n-\n- if args.wandb_id:\n- wandb_init_kwargs.update(\n- {\n- ""id"": args.wandb_id,\n- ""resume"": ""allow"",\n- }\n- )\n- wandb.init(**wandb_init_kwargs)\n-\n- wandb.config.update({""model_param_count"": param_counts})\n-\n- print(""Parameter counts:"")\n- print(param_counts)\n-\n- # --- Initialize optimizer ---\n- lr_schedule = get_lr_schedule(args.lr_schedule, \n- args.init_lr, \n- args.max_lr, \n- args.decay_end, \n- args.num_steps, \n- args.warmup_steps, \n- args.wsd_decay_steps)\n- tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n- train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n-\n- device_mesh_arr = create_device_mesh((num_devices,))\n- mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n-\n- replicated_sharding = NamedSharding(mesh, PartitionSpec())\n- videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n- train_state = jax.device_put(train_state, replicated_sharding)\n-\n- # --- Initialize checkpoint manager ---\n- step = 0\n- handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(\n- ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n- )\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n- handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n-\n- checkpoint_options = ocp.CheckpointManagerOptions(\n- save_interval_steps=args.log_checkpoint_interval,\n- max_to_keep=3,\n- keep_period=args.log_checkpoint_keep_period,\n- step_format_fixed_length=6,\n- cleanup_tmp_directories=True,\n- )\n-\n- checkpoint_manager = ocp.CheckpointManager(\n- args.ckpt_dir,\n- options=checkpoint_options,\n- handler_registry=handler_registry,\n- )\n-\n- # --- Create DataLoaderIterator from dataloader ---\n- array_record_files = [\n- os.path.join(args.data_dir, x)\n- for x in os.listdir(args.data_dir)\n- if x.endswith("".array_record"")\n- ]\n- grain_dataloader = get_dataloader(\n- array_record_files,\n- args.seq_len,\n- # NOTE: We deliberately pass the global batch size\n- # The dataloader shards the dataset across all processes\n- args.batch_size,\n- *image_shape,\n- num_workers=8,\n- prefetch_buffer_size=1,\n- seed=args.seed,\n- )\n- initial_state = grain_dataloader._create_initial_state()\n- grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n-\n- # --- Restore checkpoint ---\n- if args.restore_ckpt:\n- # Restore full dynamics model\n- abstract_train_state = jax.tree_util.tree_map(\n- ocp.utils.to_shape_dtype_struct, train_state\n- )\n- restored = checkpoint_manager.restore(\n- checkpoint_manager.latest_step(),\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardRestore(abstract_train_state),\n- dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n- ),\n- )\n- train_state = restored[""model_state""]\n- grain_iterator = restored[""dataloader_state""]\n- step = checkpoint_manager.latest_step() or 0\n- print(f""Restored dataloader and model state from step {step}"")\n- else:\n- # Restore from pre-trained tokenizer (and LAM)\n- train_state = restore_genie_components(\n- train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n- )\n-\n- # --- TRAIN LOOP ---\n- dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n- while step < args.num_steps:\n- for videos in dataloader:\n- # --- Train step ---\n- rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n-\n- inputs = dict(\n- videos=videos,\n- rng=_rng,\n- dropout_rng=_rng_dropout,\n- mask_rng=_rng_mask,\n- )\n- train_state, loss, recon, metrics = train_step(train_state, inputs)\n- metrics[""lr""] = lr_schedule(step)\n- print(f""Step {step}, loss: {loss}"")\n- step += 1\n-\n- # --- Logging ---\n- if args.log:\n- if step % args.log_interval == 0 and jax.process_index() == 0:\n- wandb.log(\n- {\n- ""loss"": loss,\n- ""step"": step,\n- **metrics,\n- }\n- )\n- if step % args.log_image_interval == 0:\n- gt_seq = inputs[""videos""][0]\n- recon_seq = recon[0].clip(0, 1)\n- comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n- comparison_seq = einops.rearrange(\n- comparison_seq * 255, ""t h w c -> h (t w) c""\n- )\n- if jax.process_index() == 0:\n- log_images = dict(\n- image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n- recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n- true_vs_recon=wandb.Image(\n- np.asarray(comparison_seq.astype(np.uint8))\n- ),\n- )\n- wandb.log(log_images)\n- # --- Checkpointing ---\n- if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n- checkpoint_manager.save(\n- step,\n- args=ocp.args.Composite(\n- model_state=ocp.args.StandardSave(train_state),\n- dataloader_state=grain.checkpoint.CheckpointSave(\n- grain_iterator\n- ),\n- ),\n- )\n- print(f""Saved checkpoint at step {step}"")\n- if step >= args.num_steps:\n- break\n-\n- checkpoint_manager.close()\ndiff --git a/utils/nn.py b/utils/nn.py\nindex b7bec9f..3b64fa0 100644\n--- a/utils/nn.py\n+++ b/utils/nn.py\n@@ -26,6 +26,112 @@ class PositionalEncoding(nn.Module):\n x = x + self.pe[: x.shape[2]]\n return x\n \n+# class STBlock2(nn.Module):\n+ # dim: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.remat\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # --- Spatial attention ---\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+\n+ # # --- Temporal attention ---\n+ # x = x.swapaxes(1, 2)\n+ # z = PositionalEncoding(self.dim)(x)\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # causal_mask = jnp.tri(z.shape[-2])\n+ # z = nn.MultiHeadAttention(\n+ # num_heads=self.num_heads,\n+ # qkv_features=self.dim,\n+ # dropout_rate=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z, mask=causal_mask)\n+ # x = x + z\n+ # x = x.swapaxes(1, 2)\n+\n+ # # --- Feedforward ---\n+ # z = nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n+ # z = nn.Dense(\n+ # self.dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(z)\n+ # z = nn.gelu(z)\n+ # x = x + z\n+\n+ # return x\n+\n+# class CausalTransformer(nn.Module):\n+ # model_dim: int\n+ # out_dim: int\n+ # num_blocks: int\n+ # num_heads: int\n+ # dropout: float\n+ # param_dtype: jnp.dtype\n+ # dtype: jnp.dtype\n+\n+ # @nn.compact\n+ # def __call__(self, x: jax.Array) -> jax.Array:\n+ # # Input projection and normalization\n+ # x = nn.Sequential(\n+ # [\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.Dense(self.model_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # nn.LayerNorm(\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # ),\n+ # ]\n+ # )(x)\n+ # # Causal transformer blocks\n+ # for _ in range(self.num_blocks):\n+ # x = STBlock2(\n+ # dim=self.model_dim,\n+ # num_heads=self.num_heads,\n+ # dropout=self.dropout,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+\n+ # # Output projection\n+ # x = nn.Dense(\n+ # self.out_dim,\n+ # param_dtype=self.param_dtype,\n+ # dtype=self.dtype,\n+ # )(x)\n+ # return x # (B, T, E)\n+\n \n class STBlock(nn.Module):\n dim: int\n@@ -33,6 +139,7 @@ class STBlock(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.remat\n @nn.compact\n@@ -43,13 +150,14 @@ class STBlock(nn.Module):\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n+ spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n- )(z)\n+ )(z, mask=spacial_mask)\n x = x + z\n \n # --- Temporal attention ---\n@@ -95,6 +203,7 @@ class STTransformer(nn.Module):\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n+ spacial_bert: bool = True\n \n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n@@ -121,6 +230,7 @@ class STTransformer(nn.Module):\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n+ spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n",diff,tab +5399,12107296,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +5400,12108057,"genie.py",0,0,"",python,tab +5401,12109072,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)",python,tab +5402,12109702,"models/dynamics.py",0,0,"",python,tab +5403,12110531,"diff.diff",0,0,"",diff,tab +5404,12110531,"diff.diff",506,0,"",diff,selection_mouse +5405,12110581,"diff.diff",505,0,"",diff,selection_command +5406,12112275,"genie.py",0,0,"",python,tab +5407,12113323,"genie.py",224,0,"",python,selection_mouse +5408,12114239,"genie.py",351,0,"",python,selection_mouse +5409,12114717,"genie.py",344,0,"",python,selection_mouse +5410,12250664,"genie.py",3920,0,"",python,selection_mouse +5411,12250675,"genie.py",3919,0,"",python,selection_command +5412,12251345,"genie.py",3828,0,"",python,selection_mouse +5413,12251375,"genie.py",3827,0,"",python,selection_command +5414,12252034,"genie.py",3812,0,"",python,selection_mouse +5415,12252048,"genie.py",3811,0,"",python,selection_command +5416,12252642,"genie.py",3796,0,"",python,selection_mouse +5417,12275463,"diff.diff",0,0,"",diff,tab +5418,12275464,"diff.diff",1139,0,"",diff,selection_mouse +5419,12275516,"diff.diff",1138,0,"",diff,selection_command +5420,12315913,"diff.diff",3596,0,"",diff,selection_mouse +5421,12315931,"diff.diff",3595,0,"",diff,selection_command +5422,12324125,"diff.diff",4328,0,"",diff,selection_mouse +5423,12324139,"diff.diff",4327,0,"",diff,selection_command +5424,12324720,"diff.diff",4152,0,"",diff,selection_mouse +5425,12324722,"diff.diff",4151,0,"",diff,selection_command +5426,12325403,"diff.diff",4129,0,"",diff,selection_mouse +5427,12325413,"diff.diff",4128,0,"",diff,selection_command +5428,12326385,"diff.diff",4152,0,"",diff,selection_mouse +5429,12326407,"diff.diff",4151,0,"",diff,selection_command +5430,12327035,"diff.diff",4104,0,"",diff,selection_mouse +5431,12327038,"diff.diff",4103,0,"",diff,selection_command +5432,12372711,"genie.py",0,0,"",python,tab +5433,12372712,"genie.py",7791,0,"",python,selection_mouse +5434,12372756,"genie.py",7790,0,"",python,selection_command +5435,12373439,"genie.py",7544,0,"",python,selection_mouse +5436,12373441,"genie.py",7543,0,"",python,selection_command +5437,12374041,"genie.py",7545,0,"",python,selection_mouse +5438,12374900,"genie.py",7576,0,"",python,selection_mouse +5439,12374903,"genie.py",7575,0,"",python,selection_command +5440,12375533,"genie.py",7546,0,"",python,selection_mouse +5441,12377868,"genie.py",7545,0,"",python,selection_mouse +5442,12377873,"genie.py",7546,0,"",python,selection_mouse +5443,12377873,"genie.py",7545,0,"",python,selection_mouse +5444,12377923,"genie.py",7576,0,"",python,selection_mouse +5445,12377947,"genie.py",7575,0,"",python,selection_command +5446,12378509,"genie.py",7544,0,"",python,selection_mouse +5447,12378527,"genie.py",7543,0,"",python,selection_command +5448,12379030,"genie.py",7545,0,"",python,selection_mouse +5449,12380306,"genie.py",7576,0,"",python,selection_mouse +5450,12380320,"genie.py",7575,0,"",python,selection_command +5451,12380934,"genie.py",7545,0,"",python,selection_mouse +5452,12381643,"genie.py",7546,0,"",python,selection_mouse +5453,12382329,"genie.py",7545,0,"",python,selection_mouse +5454,12382977,"genie.py",7546,0,"",python,selection_mouse +5455,12383623,"genie.py",7545,0,"",python,selection_mouse +5456,12384290,"genie.py",7546,0,"",python,selection_mouse +5457,12384935,"genie.py",7545,0,"",python,selection_mouse +5458,12385530,"genie.py",7546,0,"",python,selection_mouse +5459,12386095,"genie.py",7545,0,"",python,selection_mouse +5460,12387279,"genie.py",7546,0,"",python,selection_mouse +5461,12405998,"models/dynamics.py",0,0,"",python,tab +5462,12409791,"models/dynamics.py",2375,0,"",python,selection_mouse +5463,12410270,"models/dynamics.py",2397,0,"",python,selection_mouse +5464,12411252,"models/dynamics.py",2108,0,"",python,selection_mouse +5465,12411938,"models/dynamics.py",2003,0,"",python,selection_mouse +5466,12412520,"models/dynamics.py",2080,0,"",python,selection_mouse +5467,12413015,"models/dynamics.py",2107,0,"",python,selection_mouse +5468,12413649,"models/dynamics.py",2043,0,"",python,selection_mouse +5469,12414211,"models/dynamics.py",1984,0,"",python,selection_mouse +5470,12414805,"models/dynamics.py",2003,0,"",python,selection_mouse +5471,12415363,"models/dynamics.py",2024,0,"",python,selection_mouse +5472,12415893,"models/dynamics.py",2044,0,"",python,selection_mouse +5473,12416436,"models/dynamics.py",2062,0,"",python,selection_mouse +5474,12416972,"models/dynamics.py",2081,0,"",python,selection_mouse +5475,12418621,"models/dynamics.py",2406,0,"",python,selection_mouse +5476,12418752,"models/dynamics.py",2403,5,"False",python,selection_mouse +5477,12419645,"models/dynamics.py",2403,0,"",python,selection_mouse +5478,12419645,"models/dynamics.py",2403,5,"False",python,selection_mouse +5479,12420384,"models/dynamics.py",2407,0,"",python,selection_mouse +5480,12420975,"models/dynamics.py",2403,5,"False",python,selection_mouse +5481,12421762,"models/dynamics.py",2407,0,"",python,selection_mouse +5482,12422192,"models/dynamics.py",2403,5,"False",python,selection_mouse +5483,12422920,"models/dynamics.py",2407,0,"",python,selection_mouse +5484,12423079,"models/dynamics.py",2403,5,"False",python,selection_mouse +5485,12423276,"models/dynamics.py",2378,32," spacial_bert=False,\n",python,selection_mouse +5486,12423871,"models/dynamics.py",2407,0,"",python,selection_mouse +5487,12423920,"models/dynamics.py",2403,5,"False",python,selection_mouse +5488,12425435,"models/dynamics.py",2392,0,"",python,selection_mouse +5489,12425588,"models/dynamics.py",2390,12,"spacial_bert",python,selection_mouse +5490,12425860,"models/dynamics.py",2371,31,"dtype,\n spacial_bert",python,selection_mouse +5491,12425860,"models/dynamics.py",2376,26,",\n spacial_bert",python,selection_mouse +5492,12425861,"models/dynamics.py",2377,25,"\n spacial_bert",python,selection_mouse +5493,12426056,"models/dynamics.py",2390,18,"spacial_bert=False",python,selection_mouse +5494,12426057,"models/dynamics.py",2390,19,"spacial_bert=False,",python,selection_mouse +5495,12426504,"models/dynamics.py",2409,0,"",python,selection_mouse +5496,12426551,"models/dynamics.py",2408,0,"",python,selection_command +5497,12426901,"models/dynamics.py",2409,0,"",python,selection_mouse +5498,12426931,"models/dynamics.py",2408,0,"",python,selection_command +5499,12427123,"models/dynamics.py",2403,5,"False",python,selection_mouse +5500,12427124,"models/dynamics.py",2403,6,"False,",python,selection_command +5501,12427279,"models/dynamics.py",2402,7,"=False,",python,selection_mouse +5502,12427280,"models/dynamics.py",2390,19,"spacial_bert=False,",python,selection_mouse +5503,12427611,"models/dynamics.py",2395,0,"",python,selection_mouse +5504,12490576,"models/dynamics.py",2552,0,"",python,selection_mouse +5505,12490583,"models/dynamics.py",2551,0,"",python,selection_command +5506,12491194,"models/dynamics.py",2419,0,"",python,selection_mouse +5507,12491206,"models/dynamics.py",2418,0,"",python,selection_command +5508,12491763,"models/dynamics.py",2374,0,"",python,selection_mouse +5509,12492268,"models/dynamics.py",2419,0,"",python,selection_mouse +5510,12492301,"models/dynamics.py",2418,0,"",python,selection_command +5511,12492816,"models/dynamics.py",2397,0,"",python,selection_mouse +5512,12495864,"models/dynamics.py",2398,0,"",python,selection_command +5513,12497204,"models/dynamics.py",2397,0,"",python,selection_command +5514,12497497,"models/dynamics.py",2398,0,"",python,selection_command +5515,12497893,"models/dynamics.py",2397,0,"",python,selection_command +5516,12498141,"models/dynamics.py",2398,0,"",python,selection_command +5517,12498353,"models/dynamics.py",2399,0,"",python,selection_command +5518,12498646,"models/dynamics.py",2398,0,"",python,selection_command +5519,12498814,"models/dynamics.py",2397,0,"",python,selection_command +5520,12499089,"models/dynamics.py",2398,0,"",python,selection_command +5521,12499251,"models/dynamics.py",2399,0,"",python,selection_command +5522,12499438,"models/dynamics.py",2400,0,"",python,selection_command +5523,12499612,"models/dynamics.py",2401,0,"",python,selection_command +5524,12500438,"models/dynamics.py",2400,0,"",python,selection_command +5525,12500621,"models/dynamics.py",2399,0,"",python,selection_command +5526,12500779,"models/dynamics.py",2398,0,"",python,selection_command +5527,12501061,"models/dynamics.py",2399,0,"",python,selection_command +5528,12501249,"models/dynamics.py",2400,0,"",python,selection_command +5529,12501388,"models/dynamics.py",2401,0,"",python,selection_command +5530,12501520,"models/dynamics.py",2402,0,"",python,selection_command +5531,12501745,"models/dynamics.py",2401,0,"",python,selection_command +5532,12501924,"models/dynamics.py",2400,0,"",python,selection_command +5533,12502098,"models/dynamics.py",2399,0,"",python,selection_command +5534,12502286,"models/dynamics.py",2398,0,"",python,selection_command +5535,12502514,"models/dynamics.py",2399,0,"",python,selection_command +5536,12502690,"models/dynamics.py",2400,0,"",python,selection_command +5537,12502868,"models/dynamics.py",2401,0,"",python,selection_command +5538,12503465,"models/dynamics.py",2400,0,"",python,selection_command +5539,12503631,"models/dynamics.py",2399,0,"",python,selection_command +5540,12516505,"models/dynamics.py",1903,0,"",python,selection_mouse +5541,12516689,"models/dynamics.py",1891,22,"DynamicsAutoregressive",python,selection_mouse +5542,12520040,"genie.py",0,0,"",python,tab +5543,12525432,"TERMINAL",0,0,"bash",,terminal_focus +5544,12574993,"genie.py",7459,0,"",python,selection_mouse +5545,12575981,"genie.py",7545,0,"",python,selection_mouse +5546,12576592,"genie.py",7546,0,"",python,selection_mouse +5547,12577201,"genie.py",7545,0,"",python,selection_mouse +5548,12577700,"genie.py",7546,0,"",python,selection_mouse +5549,12578649,"genie.py",7545,0,"",python,selection_mouse +5550,12580052,"genie.py",7546,0,"",python,selection_mouse +5551,12580971,"genie.py",7545,0,"",python,selection_mouse +5552,12581610,"genie.py",7546,0,"",python,selection_mouse +5553,12583643,"genie.py",7840,0,"",python,selection_mouse +5554,12583646,"genie.py",7839,0,"",python,selection_command +5555,12585249,"genie.py",7335,0,"",python,selection_mouse +5556,12585954,"genie.py",7545,0,"",python,selection_mouse +5557,12586570,"genie.py",7576,0,"",python,selection_mouse +5558,12586571,"genie.py",7575,0,"",python,selection_command +5559,12587010,"genie.py",7576,0,"",python,selection_mouse +5560,12587011,"genie.py",7575,0,"",python,selection_command +5561,12587174,"genie.py",7576,0,"",python,selection_mouse +5562,12587190,"genie.py",7575,0,"",python,selection_command +5563,12587730,"genie.py",7546,0,"",python,selection_mouse +5564,12589249,"genie.py",7545,0,"",python,selection_mouse +5565,12589957,"diff.diff",0,0,"",diff,tab +5566,12589958,"diff.diff",1991,0,"",diff,selection_mouse +5567,12590004,"diff.diff",1990,0,"",diff,selection_command +5568,12595268,"genie.py",0,0,"",python,tab +5569,12596433,"genie.py",7546,0,"",python,selection_mouse +5570,12598909,"genie.py",7545,0,"",python,selection_mouse +5571,12600988,"genie.py",7546,0,"",python,selection_mouse +5572,12617375,"genie.py",7545,0,"",python,selection_command +5573,12617513,"genie.py",7546,0,"",python,selection_command +5574,12617672,"genie.py",7545,0,"",python,selection_command +5575,12617923,"genie.py",7508,0,"",python,selection_command +5576,12617967,"genie.py",7545,0,"",python,selection_command +5577,12618222,"genie.py",7508,0,"",python,selection_command +5578,12618253,"genie.py",7545,0,"",python,selection_command +5579,12618427,"genie.py",7508,0,"",python,selection_command +5580,12618495,"genie.py",7545,0,"",python,selection_command +5581,12618651,"genie.py",7508,0,"",python,selection_command +5582,12618741,"genie.py",7545,0,"",python,selection_command +5583,12619080,"genie.py",7546,0,"",python,selection_command +5584,12619251,"genie.py",7545,0,"",python,selection_command +5585,12619428,"genie.py",7546,0,"",python,selection_command +5586,12619591,"genie.py",7545,0,"",python,selection_command +5587,12619678,"genie.py",7546,0,"",python,selection_command +5588,12619827,"genie.py",7545,0,"",python,selection_command +5589,12619897,"genie.py",7546,0,"",python,selection_command +5590,12620043,"genie.py",7545,0,"",python,selection_command +5591,12620125,"genie.py",7546,0,"",python,selection_command +5592,12620303,"genie.py",7545,0,"",python,selection_command +5593,12620388,"genie.py",7546,0,"",python,selection_command +5594,12620521,"genie.py",7545,0,"",python,selection_command +5595,12620575,"genie.py",7546,0,"",python,selection_command +5596,12620723,"genie.py",7545,0,"",python,selection_command +5597,12620823,"genie.py",7546,0,"",python,selection_command +5598,12620964,"genie.py",7545,0,"",python,selection_command +5599,12621034,"genie.py",7546,0,"",python,selection_command +5600,12621163,"genie.py",7545,0,"",python,selection_command +5601,12621269,"genie.py",7546,0,"",python,selection_command +5602,12621400,"genie.py",7545,0,"",python,selection_command +5603,12621633,"genie.py",7508,0,"",python,selection_command +5604,12621729,"genie.py",7545,0,"",python,selection_command +5605,12622013,"genie.py",7546,0,"",python,selection_command +5606,12622277,"genie.py",7545,0,"",python,selection_command +5607,12622464,"genie.py",7508,0,"",python,selection_command +5608,12622571,"genie.py",7545,0,"",python,selection_command +5609,12622834,"genie.py",7546,0,"",python,selection_command +5610,12623074,"genie.py",7545,0,"",python,selection_command +5611,12623312,"genie.py",7546,0,"",python,selection_command +5612,12623518,"genie.py",7545,0,"",python,selection_command +5613,12623600,"genie.py",7546,0,"",python,selection_command +5614,12623810,"genie.py",7547,0,"",python,selection_command +5615,12623890,"genie.py",7546,0,"",python,selection_command +5616,12624029,"genie.py",7547,0,"",python,selection_command +5617,12624155,"genie.py",7546,0,"",python,selection_command +5618,12624338,"genie.py",7545,0,"",python,selection_command +5619,12624538,"genie.py",7508,0,"",python,selection_command +5620,12624710,"genie.py",7545,0,"",python,selection_command +5621,12624919,"genie.py",7546,0,"",python,selection_command +5622,12625082,"genie.py",7545,0,"",python,selection_command +5623,12625244,"genie.py",7546,0,"",python,selection_command +5624,12625410,"genie.py",7545,0,"",python,selection_command +5625,12625472,"genie.py",7546,0,"",python,selection_command +5626,12625609,"genie.py",7545,0,"",python,selection_command +5627,12625696,"genie.py",7546,0,"",python,selection_command +5628,12625835,"genie.py",7545,0,"",python,selection_command +5629,12625902,"genie.py",7546,0,"",python,selection_command +5630,12626058,"genie.py",7545,0,"",python,selection_command +5631,12626149,"genie.py",7546,0,"",python,selection_command +5632,12626328,"genie.py",7545,0,"",python,selection_command +5633,12626588,"genie.py",7508,0,"",python,selection_command +5634,12626716,"genie.py",7545,0,"",python,selection_command +5635,12627160,"genie.py",7508,0,"",python,selection_command +5636,12627453,"genie.py",7545,0,"",python,selection_command +5637,12627683,"genie.py",7546,0,"",python,selection_command +5638,12627954,"genie.py",7545,0,"",python,selection_command +5639,12628153,"genie.py",7546,0,"",python,selection_command +5640,12628300,"genie.py",7545,0,"",python,selection_command +5641,12628412,"genie.py",7546,0,"",python,selection_command +5642,12628581,"genie.py",7545,0,"",python,selection_command +5643,12628720,"genie.py",7546,0,"",python,selection_command +5644,12628829,"genie.py",7545,0,"",python,selection_command +5645,12628946,"genie.py",7546,0,"",python,selection_command +5646,12629164,"genie.py",7545,0,"",python,selection_command +5647,12629233,"genie.py",7546,0,"",python,selection_command +5648,12629416,"genie.py",7545,0,"",python,selection_command +5649,12629500,"genie.py",7546,0,"",python,selection_command +5650,12629686,"genie.py",7545,0,"",python,selection_command +5651,12629779,"genie.py",7546,0,"",python,selection_command +5652,12629875,"genie.py",7545,0,"",python,selection_command +5653,12630129,"genie.py",7546,0,"",python,selection_command +5654,12630371,"genie.py",7545,0,"",python,selection_command +5655,12630585,"genie.py",7546,0,"",python,selection_command +5656,12630788,"genie.py",7545,0,"",python,selection_command +5657,12630960,"genie.py",7546,0,"",python,selection_command +5658,12631226,"genie.py",7545,0,"",python,selection_command +5659,12631457,"genie.py",7546,0,"",python,selection_command +5660,12631728,"genie.py",7545,0,"",python,selection_command +5661,12632053,"genie.py",7546,0,"",python,selection_command +5662,12632386,"genie.py",7545,0,"",python,selection_command +5663,12632803,"genie.py",7546,0,"",python,selection_command +5664,12633160,"genie.py",7545,0,"",python,selection_command +5665,12633497,"genie.py",7546,0,"",python,selection_command +5666,12633864,"genie.py",7545,0,"",python,selection_command +5667,12634177,"genie.py",7546,0,"",python,selection_command +5668,12634500,"genie.py",7545,0,"",python,selection_command +5669,12682412,"genie.py",7576,0,"",python,selection_mouse +5670,12682424,"genie.py",7575,0,"",python,selection_command +5671,12682991,"genie.py",7546,0,"",python,selection_mouse +5672,12689385,"diff.diff",0,0,"",diff,tab +5673,12689386,"diff.diff",2812,0,"",diff,selection_mouse +5674,12689411,"diff.diff",2811,0,"",diff,selection_command +5675,12689926,"diff.diff",2410,0,"",diff,selection_mouse +5676,12689932,"diff.diff",2409,0,"",diff,selection_command +5677,12690651,"diff.diff",2435,0,"",diff,selection_mouse +5678,12690652,"diff.diff",2434,0,"",diff,selection_command +5679,12691407,"diff.diff",2467,0,"",diff,selection_mouse +5680,12691411,"diff.diff",2466,0,"",diff,selection_command +5681,12697217,"genie.py",0,0,"",python,tab +5682,12697218,"genie.py",3396,0,"",python,selection_mouse +5683,12697227,"genie.py",3395,0,"",python,selection_command +5684,12697953,"genie.py",2223,0,"",python,selection_mouse +5685,12785033,"models/dynamics.py",0,0,"",python,tab +5686,12847336,"models/dynamics.py",588,0,"",python,selection_mouse +5687,12849043,"models/dynamics.py",136,0,"",python,selection_mouse +5688,12849409,"models/dynamics.py",133,15,"DynamicsMaskGIT",python,selection_mouse +5689,12850237,"models/dynamics.py",136,0,"",python,selection_mouse +5690,12850237,"models/dynamics.py",133,15,"DynamicsMaskGIT",python,selection_mouse +5691,12850886,"models/dynamics.py",136,0,"",python,selection_mouse +5692,12851366,"models/dynamics.py",128,0,"",python,selection_mouse +5693,12851510,"models/dynamics.py",127,5,"class",python,selection_mouse +5694,12851794,"models/dynamics.py",136,0,"",python,selection_mouse +5695,12851999,"models/dynamics.py",133,15,"DynamicsMaskGIT",python,selection_mouse +5696,12852754,"models/dynamics.py",405,0,"",python,selection_mouse +5697,12852936,"models/dynamics.py",398,8,"dynamics",python,selection_mouse +5698,12853403,"models/dynamics.py",614,0,"",python,selection_mouse +5699,12853552,"models/dynamics.py",612,5,"dtype",python,selection_mouse +5700,12854007,"models/dynamics.py",1032,0,"",python,selection_mouse +5701,12854037,"models/dynamics.py",1031,0,"",python,selection_command +5702,12854213,"models/dynamics.py",1032,0,"",python,selection_mouse +5703,12854220,"models/dynamics.py",1031,0,"",python,selection_command +5704,12854603,"models/dynamics.py",1176,0,"",python,selection_mouse +5705,12854764,"models/dynamics.py",1172,4,"self",python,selection_mouse +5706,12860969,"models/dynamics.py",1899,0,"",python,selection_mouse +5707,12866001,"diff.diff",0,0,"",diff,tab +5708,12866002,"diff.diff",1086,0,"",diff,selection_mouse +5709,13036051,"diff.diff",1541,0,"",diff,selection_mouse +5710,13036061,"diff.diff",1540,0,"",diff,selection_command +5711,13036757,"diff.diff",1171,0,"",diff,selection_mouse +5712,13036760,"diff.diff",1170,0,"",diff,selection_command +5713,13037450,"diff.diff",1171,0,"",diff,selection_mouse +5714,13037452,"diff.diff",1170,0,"",diff,selection_command +5715,13037580,"diff.diff",1171,0,"",diff,selection_mouse +5716,13037583,"diff.diff",1170,0,"",diff,selection_command +5717,13037867,"diff.diff",1170,1,":",diff,selection_mouse +5718,13037868,"diff.diff",1170,89,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,",diff,selection_mouse +5719,13037868,"diff.diff",1170,192,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,",diff,selection_mouse +5720,13037868,"diff.diff",1170,279,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,",diff,selection_mouse +5721,13037868,"diff.diff",1170,324,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,",diff,selection_mouse +5722,13037869,"diff.diff",1170,371,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,",diff,selection_mouse +5723,13037869,"diff.diff",1170,406,":\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,",diff,selection_mouse +5724,13037870,"diff.diff",1171,0,"",diff,selection_command +5725,13037912,"diff.diff",1171,421,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) ",diff,selection_mouse +5726,13037942,"diff.diff",1171,489,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(",diff,selection_mouse +5727,13037995,"diff.diff",1171,531,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,",diff,selection_mouse +5728,13038012,"diff.diff",1171,584,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,",diff,selection_mouse +5729,13038036,"diff.diff",1171,634,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,",diff,selection_mouse +5730,13038087,"diff.diff",1171,682,"\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,",diff,selection_mouse +5731,13039320,"diff.diff",1141,0,"",diff,selection_mouse +5732,13039350,"diff.diff",1140,0,"",diff,selection_command +5733,13039460,"diff.diff",1141,0,"",diff,selection_mouse +5734,13039461,"diff.diff",1140,0,"",diff,selection_command +5735,13039593,"diff.diff",1140,2,"+\n",diff,selection_mouse +5736,13039608,"diff.diff",1141,1,"\n",diff,selection_command +5737,13039737,"diff.diff",1141,31,"\n+ if self.use_maskgit:\n",diff,selection_mouse +5738,13039933,"diff.diff",1141,222,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n",diff,selection_mouse +5739,13039934,"diff.diff",1141,452,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n",diff,selection_mouse +5740,13039934,"diff.diff",1141,752,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n",diff,selection_mouse +5741,13039935,"diff.diff",1141,834,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n",diff,selection_mouse +5742,13039935,"diff.diff",1141,851,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n",diff,selection_mouse +5743,13040009,"diff.diff",1141,940,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n",diff,selection_mouse +5744,13040310,"diff.diff",1141,851,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n \n",diff,selection_mouse +5745,13040384,"diff.diff",1141,849,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n",diff,selection_mouse +5746,13040515,"diff.diff",1141,834,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n",diff,selection_mouse +5747,13041150,"diff.diff",1141,849,"\n+ if self.use_maskgit:\n+ self.dynamics = DynamicsMaskGIT(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ mask_limit=self.mask_limit,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ ) \n+ else:\n+ self.dynamics = DynamicsAutoregressive(\n+ model_dim=self.dyna_dim,\n+ num_latents=self.num_patch_latents,\n+ num_blocks=self.dyna_num_blocks,\n+ num_heads=self.dyna_num_heads,\n+ dropout=self.dropout,\n+ param_dtype=self.param_dtype,\n+ dtype=self.dtype,\n+ )\n",diff,selection_mouse +5748,13043198,"diff.diff",1717,0,"",diff,selection_mouse +5749,14704036,"models/dynamics.py",0,0,"",python,tab +5750,14704037,"models/dynamics.py",1151,0,"",python,selection_mouse +5751,14704049,"models/dynamics.py",1150,0,"",python,selection_command +5752,14725224,"train_dynamics_maskgit.py",0,0,"",python,tab +5753,14737201,"train_dynamics_causal.py",0,0,"",python,tab +5754,14835969,"models/dynamics.py",0,0,"",python,tab +5755,14835970,"models/dynamics.py",1022,0,"",python,selection_mouse +5756,14836004,"models/dynamics.py",1021,0,"",python,selection_command +5757,14837888,"models/dynamics.py",1031,0,"",python,selection_command +5758,14838109,"models/dynamics.py",1033,0,"",python,selection_command +5759,14838310,"models/dynamics.py",1062,0,"",python,selection_command +5760,14838638,"models/dynamics.py",1033,0,"",python,selection_command +5761,14838794,"models/dynamics.py",1031,0,"",python,selection_command +5762,14838978,"models/dynamics.py",1021,0,"",python,selection_command +5763,14839477,"models/dynamics.py",979,0,"",python,selection_command +5764,14839512,"models/dynamics.py",949,0,"",python,selection_command +5765,14839541,"models/dynamics.py",916,0,"",python,selection_command +5766,14839574,"models/dynamics.py",886,0,"",python,selection_command +5767,14839720,"models/dynamics.py",867,0,"",python,selection_command +5768,14839812,"models/dynamics.py",886,0,"",python,selection_command +5769,14840289,"models/dynamics.py",916,0,"",python,selection_command +5770,14840333,"models/dynamics.py",949,0,"",python,selection_command +5771,14840362,"models/dynamics.py",979,0,"",python,selection_command +5772,14840391,"models/dynamics.py",1021,0,"",python,selection_command +5773,14840398,"models/dynamics.py",1031,0,"",python,selection_command +5774,14840470,"models/dynamics.py",1033,0,"",python,selection_command +5775,14840481,"models/dynamics.py",1062,0,"",python,selection_command +5776,14840509,"models/dynamics.py",1150,0,"",python,selection_command +5777,14840605,"models/dynamics.py",1062,0,"",python,selection_command +5778,14841129,"models/dynamics.py",1033,0,"",python,selection_command +5779,14841177,"models/dynamics.py",1031,0,"",python,selection_command +5780,14841178,"models/dynamics.py",1021,0,"",python,selection_command +5781,14841221,"models/dynamics.py",979,0,"",python,selection_command +5782,14841348,"models/dynamics.py",949,0,"",python,selection_command +5783,14841486,"models/dynamics.py",916,0,"",python,selection_command +5784,14841487,"models/dynamics.py",886,0,"",python,selection_command +5785,14841487,"models/dynamics.py",867,0,"",python,selection_command +5786,14841488,"models/dynamics.py",822,0,"",python,selection_command +5787,14841577,"models/dynamics.py",792,0,"",python,selection_command +5788,14841578,"models/dynamics.py",758,0,"",python,selection_command +5789,14841578,"models/dynamics.py",688,0,"",python,selection_command +5790,14841579,"models/dynamics.py",658,0,"",python,selection_command +5791,14841579,"models/dynamics.py",647,0,"",python,selection_command +5792,14841579,"models/dynamics.py",617,0,"",python,selection_command +5793,14841667,"models/dynamics.py",593,0,"",python,selection_command +5794,14841668,"models/dynamics.py",563,0,"",python,selection_command +5795,14841669,"models/dynamics.py",537,0,"",python,selection_command +5796,14841696,"models/dynamics.py",509,0,"",python,selection_command +5797,14841697,"models/dynamics.py",480,0,"",python,selection_command +5798,14841935,"models/dynamics.py",450,0,"",python,selection_command +5799,14842210,"models/dynamics.py",413,0,"",python,selection_command +5800,14842384,"models/dynamics.py",383,0,"",python,selection_command +5801,14842575,"models/dynamics.py",363,0,"",python,selection_command +5802,14842847,"models/dynamics.py",383,0,"",python,selection_command +5803,14843090,"models/dynamics.py",413,0,"",python,selection_command +5804,14843586,"models/dynamics.py",450,0,"",python,selection_command +5805,14843651,"models/dynamics.py",480,0,"",python,selection_command +5806,14843667,"models/dynamics.py",509,0,"",python,selection_command +5807,14843668,"models/dynamics.py",537,0,"",python,selection_command +5808,14843739,"models/dynamics.py",563,0,"",python,selection_command +5809,14843749,"models/dynamics.py",593,0,"",python,selection_command +5810,14843773,"models/dynamics.py",617,0,"",python,selection_command +5811,14843807,"models/dynamics.py",647,0,"",python,selection_command +5812,14843846,"models/dynamics.py",658,0,"",python,selection_command +5813,14843888,"models/dynamics.py",688,0,"",python,selection_command +5814,14843895,"models/dynamics.py",758,0,"",python,selection_command +5815,14843926,"models/dynamics.py",792,0,"",python,selection_command +5816,14843982,"models/dynamics.py",822,0,"",python,selection_command +5817,14843992,"models/dynamics.py",867,0,"",python,selection_command +5818,14844014,"models/dynamics.py",886,0,"",python,selection_command +5819,14844041,"models/dynamics.py",916,0,"",python,selection_command +5820,14844448,"models/dynamics.py",949,0,"",python,selection_command +5821,14844630,"models/dynamics.py",979,0,"",python,selection_command +5822,14844786,"models/dynamics.py",1021,0,"",python,selection_command +5823,14844933,"models/dynamics.py",1031,0,"",python,selection_command +5824,14846150,"train_dynamics_causal.py",0,0,"",python,tab +5825,14846151,"train_dynamics_causal.py",6063,0,"",python,selection_mouse +5826,14846180,"train_dynamics_causal.py",6062,0,"",python,selection_command +5827,14876776,"models/dynamics.py",0,0,"",python,tab +5828,14876777,"models/dynamics.py",836,0,"",python,selection_mouse +5829,14881098,"models/dynamics.py",451,0,"",python,selection_mouse +5830,14881099,"models/dynamics.py",450,0,"",python,selection_command +5831,14913597,"genie.py",0,0,"",python,tab +5832,14981095,"genie.py",448,0,"",python,selection_mouse +5833,14981097,"genie.py",447,0,"",python,selection_command +5834,14981909,"genie.py",393,0,"",python,selection_mouse +5835,14982547,"genie.py",361,0,"",python,selection_mouse +5836,14983067,"genie.py",368,0,"",python,selection_mouse +5837,14994378,"genie.py",370,0,"",python,selection_mouse +5838,14994574,"genie.py",368,5,"Genie",python,selection_mouse +5839,15006671,"genie.py",368,0,"",python,selection_mouse +5840,15007817,"genie.py",368,5,"",python,content +5841,15008289,"genie.py",368,0,"W",python,content +5842,15008289,"genie.py",369,0,"",python,selection_keyboard +5843,15008414,"genie.py",369,0,"o",python,content +5844,15008415,"genie.py",370,0,"",python,selection_keyboard +5845,15008535,"genie.py",370,0,"r",python,content +5846,15008536,"genie.py",371,0,"",python,selection_keyboard +5847,15008680,"genie.py",371,0,"l",python,content +5848,15008681,"genie.py",372,0,"",python,selection_keyboard +5849,15009199,"genie.py",372,0,"d",python,content +5850,15009200,"genie.py",373,0,"",python,selection_keyboard +5851,15009600,"genie.py",373,0,"M",python,content +5852,15009600,"genie.py",374,0,"",python,selection_keyboard +5853,15009820,"genie.py",374,0,"o",python,content +5854,15009821,"genie.py",375,0,"",python,selection_keyboard +5855,15009902,"genie.py",375,0,"d",python,content +5856,15009903,"genie.py",376,0,"",python,selection_keyboard +5857,15010038,"genie.py",376,0,"e",python,content +5858,15010039,"genie.py",377,0,"",python,selection_keyboard +5859,15010069,"genie.py",377,0,"l",python,content +5860,15010070,"genie.py",378,0,"",python,selection_keyboard +5861,15021589,"world_model.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass WorldModel(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +5862,15024490,"world_model.py",2169,0,"",python,selection_mouse +5863,15025035,"world_model.py",2006,0,"",python,selection_mouse +5864,15032308,"world_model.py",2054,0,"",python,selection_mouse +5865,15032460,"world_model.py",2046,15,"DynamicsMaskGIT",python,selection_mouse +5866,15041534,"world_model.py",398,0,"",python,selection_mouse +5867,15042829,"world_model.py",398,6,"",python,content +5868,15043350,"world_model.py",398,0,"W",python,content +5869,15043351,"world_model.py",399,0,"",python,selection_keyboard +5870,15043531,"world_model.py",399,0,"o",python,content +5871,15043532,"world_model.py",400,0,"",python,selection_keyboard +5872,15043594,"world_model.py",400,0,"r",python,content +5873,15043595,"world_model.py",401,0,"",python,selection_keyboard +5874,15043788,"world_model.py",401,0,"d",python,content +5875,15043788,"world_model.py",402,0,"",python,selection_keyboard +5876,15044083,"world_model.py",401,1,"",python,content +5877,15044244,"world_model.py",401,0,"l",python,content +5878,15044245,"world_model.py",402,0,"",python,selection_keyboard +5879,15044362,"world_model.py",402,0,"d",python,content +5880,15044363,"world_model.py",403,0,"",python,selection_keyboard +5881,15044445,"world_model.py",403,0," ",python,content +5882,15044446,"world_model.py",404,0,"",python,selection_keyboard +5883,15044710,"world_model.py",403,0,"",python,selection_command +5884,15064938,"train_dynamics_causal.py",0,0,"",python,tab +5885,15064939,"train_dynamics_causal.py",2313,0,"",python,selection_mouse +5886,15065044,"train_dynamics_causal.py",2305,16,"dynamics_loss_fn",python,selection_mouse +5887,15099580,"train_dynamics_causal.py",5284,0,"",python,selection_mouse +5888,15099593,"train_dynamics_causal.py",5283,0,"",python,selection_command +5889,15100186,"train_dynamics_causal.py",5008,0,"",python,selection_mouse +5890,15100910,"train_dynamics_causal.py",4681,0,"",python,selection_mouse +5891,15101703,"train_dynamics_causal.py",4398,0,"",python,selection_mouse +5892,15102295,"train_dynamics_causal.py",4547,0,"",python,selection_mouse +5893,15102950,"train_dynamics_causal.py",4516,0,"",python,selection_mouse +5894,15103527,"train_dynamics_causal.py",4508,13,"log_gradients",python,selection_mouse +5895,15104056,"train_dynamics_causal.py",4471,0,"",python,selection_mouse +5896,15104611,"train_dynamics_causal.py",4397,0,"",python,selection_mouse +5897,15105187,"train_dynamics_causal.py",4385,0,"",python,selection_mouse +5898,15105712,"train_dynamics_causal.py",4388,0,"",python,selection_mouse +5899,15106219,"train_dynamics_causal.py",4393,0,"",python,selection_mouse +5900,15108446,"train_dynamics_causal.py",3529,0,"",python,selection_mouse +5901,15116866,"train_dynamics_causal.py",3543,0,"",python,selection_mouse +5902,15116867,"train_dynamics_causal.py",3542,0,"",python,selection_command +5903,15117789,"train_dynamics_causal.py",3543,0,"",python,selection_mouse +5904,15117816,"train_dynamics_causal.py",3542,0,"",python,selection_command +5905,15119190,"train_dynamics_causal.py",3628,0,"",python,selection_mouse +5906,15120178,"train_dynamics_causal.py",3629,0,"",python,selection_mouse +5907,15120202,"train_dynamics_causal.py",3628,0,"",python,selection_command +5908,15120896,"train_dynamics_causal.py",3542,0,"",python,selection_command +5909,15121081,"train_dynamics_causal.py",3491,0,"",python,selection_command +5910,15121225,"train_dynamics_causal.py",3485,0,"",python,selection_command +5911,15121420,"train_dynamics_causal.py",3399,0,"",python,selection_command +5912,15121652,"train_dynamics_causal.py",3354,0,"",python,selection_command +5913,15122150,"train_dynamics_causal.py",3301,0,"",python,selection_command +5914,15137718,"diff.log",0,0,"",log,tab +5915,15139373,"train_dynamics_causal.py",0,0,"",python,tab +5916,15140965,"diff.diff",0,0,"",diff,tab +5917,15150287,"world_model.py",0,0,"",python,tab +5918,15150288,"world_model.py",798,0,"",python,selection_mouse +5919,15151017,"world_model.py",889,0,"",python,selection_mouse +5920,15151130,"world_model.py",887,11,"use_maskgit",python,selection_mouse +5921,15160879,"world_model.py",897,0,"",python,selection_mouse +5922,15160880,"world_model.py",887,11,"use_maskgit",python,selection_mouse +5923,15161600,"world_model.py",889,0,"",python,selection_mouse +5924,15161600,"world_model.py",887,11,"use_maskgit",python,selection_mouse +5925,15162292,"world_model.py",886,0,"",python,selection_mouse +5926,15162945,"world_model.py",893,0,"",python,selection_mouse +5927,15163123,"world_model.py",887,11,"use_maskgit",python,selection_mouse +5928,15164009,"world_model.py",889,0,"",python,selection_mouse +5929,15164869,"world_model.py",887,0,"",python,selection_mouse +5930,15198884,"TERMINAL",0,0,"srun",,terminal_focus +5931,15209291,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5932,15209353,"TERMINAL",0,0,"[?25lti[?25h",,terminal_output +5933,15209557,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5934,15209716,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5935,15211002,"TERMINAL",0,0,"[?25li[?25h[?25lt[?25h",,terminal_output +5936,15211226,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5937,15211417,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5938,15211632,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5939,15211816,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5940,15213190,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +5941,15213340,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5942,15213527,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5943,15213608,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5944,15213731,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +5945,15214008,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n* causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n preprocess_video\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5946,15214854,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5947,15215014,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5948,15215067,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5949,15215124,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5950,15215241,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5951,15215302,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5952,15215424,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5953,15215574,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5954,15215811,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +5955,15215920,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5956,15215982,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5957,15216179,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5958,15216231,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5959,15216580,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +5960,15216866,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +5961,15217042,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5962,15217310,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5963,15218116,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5964,15218179,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5965,15218392,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5966,15220673,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5967,15220867,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5968,15221035,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5969,15221417,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5970,15221571,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5971,15221762,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5972,15221823,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5973,15221990,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5974,15222391,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +5975,15223051,"TERMINAL",0,0,"[?25lt[?25h[?25lm[?25h",,terminal_output +5976,15223294,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5977,15223642,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5978,15224304,"TERMINAL",0,0,"\r\n[?2004l\rSwitched to a new branch 'refactor-tmp'\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5979,15226100,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +5980,15226383,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5981,15226455,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5982,15226515,"",0,0,"Switched from branch 'causal-transformer-dynamics-model' to 'refactor-tmp'",,git_branch_checkout +5983,15226602,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5984,15226753,"TERMINAL",0,0,"[?25lt[?25h[?25la[?25h",,terminal_output +5985,15226861,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5986,15227047,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5987,15227924,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5988,15227987,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5989,15228158,"TERMINAL",0,0,"\r\n[?2004l\rOn branch refactor-tmp\r\nChanges not staged for commit:\r\n (use ""git add/rm ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tdeleted: genie.py\r\n\tmodified: models/dynamics.py\r\n\tmodified: sample.py\r\n\tdeleted: train_dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\ttrain_dynamics_causal.py\r\n\ttrain_dynamics_maskgit.py\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\tworld_model.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +5990,15229014,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +5991,15229128,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5992,15229240,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5993,15229304,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5994,15230115,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5995,15230177,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5996,15230355,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5997,15230505,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5998,15230612,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5999,15230674,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6000,15230803,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6001,15230970,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +6002,15231151,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6003,15231212,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +6004,15231273,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6005,15231517,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +6006,15232244,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +6007,15232897,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6008,15233083,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +6009,15233247,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6010,15233396,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6011,15233583,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6012,15233646,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +6013,15233897,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6014,15234284,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +6015,15234390,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6016,15234712,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6017,15234794,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +6018,15234917,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +6019,15235271,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +6020,15235475,"TERMINAL",0,0,"\r\n[?2004l\r[refactor-tmp 2477a4e] refactor tmp\r\n 4 files changed, 1 insertion(+), 772 deletions(-)\r\n delete mode 100644 genie.py\r\n delete mode 100644 train_dynamics.py\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6021,15236453,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6022,15236582,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6023,15238277,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +6024,15238963,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6025,15239070,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6026,15239316,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +6027,15239378,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +6028,15239592,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6029,15239654,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +6030,15239834,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6031,15239897,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +6032,15240191,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n preprocess_video\r\n* refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6033,15244210,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6034,15244304,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +6035,15244367,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6036,15244430,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6037,15244743,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +6038,15245355,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6039,15245424,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +6040,15245537,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6041,15245599,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6042,15245754,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +6043,15245900,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +6044,15246013,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +6045,15246171,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6046,15246232,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6047,15247127,"TERMINAL",0,0,"",,terminal_output +6048,15247267,"TERMINAL",0,0,"",,terminal_output +6049,15247809,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6050,15247921,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +6051,15247980,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6052,15248086,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6053,15248244,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +6054,15248318,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6055,15248443,"TERMINAL",0,0,"[?25la[?25h[?25lt[?25h",,terminal_output +6056,15248616,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +6057,15248677,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +6058,15248867,"TERMINAL",0,0,"\r\n[?2004l\rOn branch refactor-tmp\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\ttrain_dynamics_causal.py\r\n\ttrain_dynamics_maskgit.py\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\tworld_model.py\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6059,15254591,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6060,15254655,"TERMINAL",0,0,"[?25lit[?25h",,terminal_output +6061,15254911,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6062,15255140,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6063,15255337,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +6064,15255490,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +6065,15255586,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6066,15255969,"TERMINAL",0,0,"train_dynamics_causal.py\r\n\r train_dynamics_maskgit.py",,terminal_output +6067,15256540,"TERMINAL",0,0,"train_dynamics_causal.py\r\n\r train_dynamics_maskgit.py\r\n[?2004l\rbash: ./train_dynamics_maskgit.py: Permission denied\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6068,15257754,"TERMINAL",0,0," train_dynamics_maskgit.py",,terminal_output +6069,15259137,"TERMINAL",0,0,"git add train_dynamics_causal.py",,terminal_output +6070,15261823,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +6071,15261973,"TERMINAL",0,0,"askgit.py ",,terminal_output +6072,15262559,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6073,15263162,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +6074,15263399,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6075,15263443,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6076,15263593,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6077,15263740,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +6078,15263967,"TERMINAL",0,0,"[?25ld[?25h[?25l [?25h",,terminal_output +6079,15264163,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +6080,15264222,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +6081,15264852,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +6082,15265036,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +6083,15265328,"TERMINAL",0,0,"",,terminal_output +6084,15266067,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +6085,15266128,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +6086,15266366,"TERMINAL",0,0,"ld_model.py ",,terminal_output +6087,15266794,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6088,15267805,"TERMINAL",0,0,"git add world_model.py ",,terminal_output +6089,15268057,"TERMINAL",0,0,"[11@train_dynamics_maskgit",,terminal_output +6090,15268190,"TERMINAL",0,0," train_dynamics_maskgit.py",,terminal_output +6091,15268507,"TERMINAL",0,0,"git add train_dynamics_causal.py",,terminal_output +6092,15268856,"TERMINAL",0,0,"status",,terminal_output +6093,15269167,"TERMINAL",0,0,"branch",,terminal_output +6094,15269607,"TERMINAL",0,0,"commit -am ""refactor tmp""",,terminal_output +6095,15271523,"TERMINAL",0,0,"\r\n[?2004l\r[refactor-tmp 40076ed] refactor tmp\r\n 3 files changed, 1136 insertions(+)\r\n create mode 100644 train_dynamics_causal.py\r\n create mode 100644 train_dynamics_maskgit.py\r\n create mode 100644 world_model.py\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6096,15292666,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6097,15292808,"TERMINAL",0,0,"[?25li[?25h[?25lt[?25h",,terminal_output +6098,15292980,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6099,15293158,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6100,15293393,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6101,15293955,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +6102,15294017,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6103,15294232,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6104,15294357,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +6105,15294464,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +6106,15294639,"TERMINAL",0,0,"[?25lu[?25h[?25lt[?25h",,terminal_output +6107,15294824,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6108,15295237,"TERMINAL",0,0,"causal-transformer-dynamics-model",,terminal_output +6109,15295613,"TERMINAL",0,0,"causal-transformer-dynamics-model\r\n[?2004l\rSwitched to branch 'causal-transformer-dynamics-model'\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6110,15296509,"",0,0,"Switched from branch 'refactor-tmp' to 'causal-transformer-dynamics-model'",,git_branch_checkout +6111,15299520,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6112,15315899,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +6113,15319341,"TERMINAL",0,0,"[?25lls[?25h[?25ls[?25h",,terminal_output +6114,15319565,"TERMINAL",0,0,"\r\n[?2004l\rdata frame-knoms.png generation_1752489078.1856709.gif generation_1752503689.8298378.gif generation_1752513384.5762262.gif generation_1752579794.2949483.gif generation_1752581091.8428152.gif genie.py logs README.md sample.py tests utils\r\ndebug frame.png generation_1752489445.163335.gif generation_1752504934.1629438.gif generation_1752513923.7489405.gif generation_1752579931.2817705.gif generation_1752581503.520897.gif gifs models read_tf_record.py scripts_cremers train_dynamics.py wandb\r\ndiff.diff frames generation_1752501077.2698705.gif generation_1752505829.3945305.gif generation_1752579157.0310874.gif generation_1752580458.8344245.gif generation_1752581641.3452077.gif input_pipeline overfit_dir requirements-franz.txt scripts_horeka train_lam.py weekend-job-requeuer.sh\r\ndiff.log generate_dataset.py generation_1752502813.7130806.gif generation_1752513109.1235461.gif generation_1752579372.4300406.gif generation_1752580934.2848504.gif generation_1752588193.6372015.gif LICENSE __pycache__ requirements.txt slurm train_tokenizer.py weekend-job-starter.sh\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6115,15321831,"TERMINAL",0,0,"l",,terminal_output +6116,15322492,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +6117,15322601,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6118,15322809,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6119,15323669,"TERMINAL",0,0,"",,terminal_output +6120,15324264,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6121,15324420,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +6122,15324483,"TERMINAL",0,0,"",,terminal_output +6123,15325616,"TERMINAL",0,0,"",,terminal_output +6124,15325780,"TERMINAL",0,0,"",,terminal_output +6125,15325923,"TERMINAL",0,0,"",,terminal_output +6126,15326109,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +6127,15326270,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +6128,15326342,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6129,15326466,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6130,15326583,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6131,15326645,"TERMINAL",0,0,"",,terminal_output +6132,15327261,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +6133,15327577,"TERMINAL",0,0,"n",,terminal_output +6134,15330110,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +6135,15330313,"TERMINAL",0,0,"e.py ",,terminal_output +6136,15330807,"TERMINAL",0,0,"\r\n[?2004l\rfrom typing import Dict, Any\r\n\r\nimport optax\r\nimport jax\r\nimport jax.numpy as jnp\r\nimport flax.linen as nn\r\nfrom flax.training.train_state import TrainState\r\nimport orbax.checkpoint as ocp\r\n\r\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\nfrom models.lam import LatentActionModel\r\nfrom models.tokenizer import TokenizerVQVAE\r\n\r\nimport os\r\nimport grain\r\n\r\n\r\nclass Genie(nn.Module):\r\n """"""Genie model""""""\r\n\r\n # --- Tokenizer ---\r\n in_dim: int\r\n tokenizer_dim: int\r\n latent_patch_dim: int\r\n num_patch_latents: int\r\n patch_size: int\r\n tokenizer_num_blocks: int\r\n tokenizer_num_heads: int\r\n # --- LAM ---\r\n lam_dim: int\r\n latent_action_dim: int\r\n num_latent_actions: int\r\n lam_patch_size: int\r\n lam_num_blocks: int\r\n lam_num_heads: int\r\n lam_co_train: bool\r\n # --- Dynamics ---\r\n dyna_dim: int\r\n dyna_num_blocks: int\r\n dyna_num_heads: int\r\n use_maskgit: bool\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n dropout: float = 0.0\r\n mask_limit: float = 0.0\r\n\r\n def setup(self):\r\n self.tokenizer = TokenizerVQVAE(\r\n in_dim=self.in_dim,\r\n model_dim=self.tokenizer_dim,\r\n latent_dim=self.latent_patch_dim,\r\n num_latents=self.num_patch_latents,\r\n patch_size=self.patch_size,\r\n num_blocks=self.tokenizer_num_blocks,\r\n num_heads=self.tokenizer_num_heads,\r\n dropout=0.0,\r\n codebook_dropout=0.0,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )\r\n self.lam = LatentActionModel(\r\n in_dim=self.in_dim,\r\n model_dim=self.lam_dim,\r\n latent_dim=self.latent_patch_dim,\r\n num_latents=self.num_latent_actions,\r\n patch_size=self.lam_patch_size,\r\n num_blocks=self.lam_num_blocks,\r\n num_heads=self.lam_num_heads,\r\n dropout=0.0,\r\n codebook_dropout=0.0,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )\r\n\r\n if self.use_maskgit:\r\n self.dynamics = DynamicsMaskGIT(\r\n model_dim=self.dyna_dim,\r\n num_latents=self.num_patch_latents,\r\n num_blocks=self.dyna_num_blocks,\r\n num_heads=self.dyna_num_heads,\r\n dropout=self.dropout,\r\n mask_limit=self.mask_limit,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ) \r\n else:\r\n self.dynamics = DynamicsAutoregressive(\r\n model_dim=self.dyna_dim,\r\n num_latents=self.num_patch_latents,\r\n num_blocks=self.dyna_num_blocks,\r\n num_heads=self.dyna_num_heads,\r\n dropout=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )\r\n\r\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n latent_actions = jax.lax.cond(\r\n self.lam_co_train,\r\n lambda: lam_outputs[""z_q""],\r\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\r\n )\r\n outputs = dict(\r\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\r\n latent_actions=latent_actions,\r\n )\r\n outputs[""mask_rng""] = batch[""mask_rng""]\r\n dyna_outputs = self.dynamics(outputs, training)\r\n outputs.update(dyna_outputs)\r\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\r\n outputs[""recon""] = self.tokenizer.decode(\r\n mle_indices, batch[""videos""].shape[2:4]\r\n )\r\n outputs[""lam_indices""] = lam_outputs[""indices""]\r\n return outputs\r\n\r\n @nn.compact\r\n def sample(\r\n self,\r\n batch: Dict[str, Any],\r\n seq_len: int,\r\n steps: int = 25,\r\n temperature: float = 1,\r\n sample_argmax: bool = False,\r\n ) -> Any:\r\n """"""\r\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\r\n\r\n - Input frames are tokenized once.\r\n - Future frames are generated autoregressively in token space.\r\n - All frames are detokenized in a single pass.\r\n\r\n Note:\r\n - For interactive or step-wise sampling, detokenization should occur after each action.\r\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\r\n - Temporal causal structure is preserved by \r\n a) reapplying the mask before each decoding step.\r\n b) a temporal causal mask is applied within each ST-transformer block.\r\n\r\n Dimension keys:\r\n B: batch size \r\n T: number of input (conditioning) frames \r\n N: patches per frame \r\n S: sequence length \r\n A: action space \r\n D: model latent dimension\r\n """"""\r\n # --- Encode videos and actions ---\r\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\r\n B, T, N = token_idxs.shape\r\n pad_shape = (B, seq_len - T, N)\r\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\r\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\r\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\r\n\r\n MaskGITLoop = nn.scan(\r\n MaskGITStep,\r\n variable_broadcast=""params"",\r\n split_rngs={""params"": False},\r\n in_axes=0,\r\n out_axes=0,\r\n length=steps,\r\n )\r\n \r\n loop_fn = MaskGITLoop(\r\n dynamics=self.dynamics,\r\n tokenizer=self.tokenizer,\r\n temperature=temperature,\r\n sample_argmax=sample_argmax,\r\n steps=steps,\r\n )\r\n\r\n def generation_step_fn(carry, step_t):\r\n rng, current_token_idxs = carry\r\n rng, step_rng = jax.random.split(rng)\r\n\r\n # Mask current and future frames (i.e., t >= step_t)\r\n mask = jnp.arange(seq_len) >= step_t # (S,)\r\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\r\n mask = mask.astype(bool)\r\n masked_token_idxs = current_token_idxs * ~mask\r\n\r\n # --- Initialize and run MaskGIT loop ---\r\n init_carry_maskgit = (\r\n step_rng,\r\n masked_token_idxs,\r\n mask,\r\n action_tokens,\r\n )\r\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\r\n updated_token_idxs = final_carry_maskgit[1]\r\n new_carry = (rng, updated_token_idxs)\r\n return new_carry, None\r\n\r\n # --- Run the autoregressive generation using scan ---\r\n initial_carry = (batch[""rng""], token_idxs)\r\n timesteps_to_scan = jnp.arange(T, seq_len)\r\n final_carry, _ = jax.lax.scan(\r\n generation_step_fn,\r\n initial_carry,\r\n timesteps_to_scan\r\n )\r\n final_token_idxs = final_carry[1]\r\n\r\n # --- Decode all tokens at once at the end ---\r\n final_frames = self.tokenizer.decode(\r\n final_token_idxs,\r\n video_hw=batch[""videos""].shape[2:4],\r\n )\r\n return final_frames\r\n\r\n def vq_encode(self, batch, training) -> Dict[str, Any]:\r\n # --- Preprocess videos ---\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n return lam_output[""indices""]\r\n\r\n\r\nclass MaskGITStep(nn.Module):\r\n dynamics: nn.Module\r\n tokenizer: nn.Module\r\n temperature: float\r\n sample_argmax: bool\r\n steps: int\r\n\r\n @nn.compact\r\n def __call__(self, carry, x):\r\n rng, token_idxs, mask, action_tokens = carry\r\n step = x\r\n N = token_idxs.shape[2]\r\n\r\n # --- Construct + encode video ---\r\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\r\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\r\n mask_expanded = mask[..., None] # (B, S, N, 1) \r\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\r\n\r\n # --- Predict transition ---\r\n act_embed = self.dynamics.action_up(action_tokens)\r\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n step_temp = self.temperature * (1.0 - unmasked_ratio)\r\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\r\n\r\n # --- Sample new tokens for final frame ---\r\n if self.sample_argmax:\r\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\r\n else:\r\n rng, _rng = jax.random.split(rng)\r\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\r\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\r\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\r\n final_token_probs += ~mask\r\n # Update masked tokens only\r\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\r\n\r\n # --- Update mask ---\r\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\r\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\r\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\r\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\r\n new_mask = mask_update_fn(mask, sorted_idxs)\r\n\r\n new_carry = (rng, token_idxs, new_mask, action_tokens)\r\n return new_carry, None\r\n\r\ndef restore_genie_components(\r\n train_state: TrainState,\r\n sharding: jax.sharding.NamedSharding,\r\n grain_iterator: grain.DataLoaderIterator,\r\n inputs: Dict[str, jax.Array],\r\n rng: jax.Array,\r\n args,\r\n):\r\n """"""Restore pre-trained Genie components""""""\r\n rng, _rng = jax.random.split(rng)\r\n\r\n # dummy values since we only use tx to initialize the dummy train states\r\n dummy_tx = optax.adamw(\r\n learning_rate=optax.constant_schedule(args.max_lr),\r\n b1=0.9,\r\n b2=0.9,\r\n weight_decay=1e-4,\r\n )\r\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\r\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\r\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\r\n \r\n\r\n checkpoint_options = ocp.CheckpointManagerOptions(\r\n step_format_fixed_length=6,\r\n )\r\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\r\n directory=args.tokenizer_checkpoint,\r\n options=checkpoint_options,\r\n handler_registry=handler_registry,\r\n )\r\n dummy_tokenizer = TokenizerVQVAE(\r\n in_dim=args.image_channels,\r\n model_dim=args.tokenizer_dim,\r\n latent_dim=args.latent_patch_dim,\r\n num_latents=args.num_patch_latents,\r\n patch_size=args.patch_size,\r\n num_blocks=args.tokenizer_num_blocks,\r\n num_heads=args.tokenizer_num_heads,\r\n dropout=args.dropout,\r\n codebook_dropout=args.dropout,\r\n param_dtype=args.param_dtype,\r\n dtype=args.dtype,\r\n )\r\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\r\n dummy_tokenizer_train_state = TrainState.create(\r\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\r\n )\r\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\r\n dummy_tokenizer_train_state, sharding\r\n )\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n step=tokenizer_checkpoint_manager.latest_step(),\r\n args=ocp.args.Composite(\r\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\r\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\r\n ),\r\n )[""model_state""]\r\n restored_tokenizer_params = restored_tokenizer.params[""params""]\r\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\r\n tokenizer_checkpoint_manager.close()\r\n\r\n if args.lam_checkpoint:\r\n lam_checkpoint_manager = ocp.CheckpointManager(\r\n directory=args.lam_checkpoint,\r\n options=checkpoint_options,\r\n handler_registry=handler_registry,\r\n )\r\n dummy_lam = LatentActionModel(\r\n in_dim=args.image_channels,\r\n model_dim=args.lam_dim,\r\n latent_dim=args.latent_patch_dim,\r\n num_latents=args.num_latent_actions,\r\n patch_size=args.lam_patch_size,\r\n num_blocks=args.lam_num_blocks,\r\n num_heads=args.lam_num_heads,\r\n dropout=args.dropout,\r\n codebook_dropout=args.dropout,\r\n param_dtype=args.param_dtype,\r\n dtype=args.dtype,\r\n )\r\n lam_init_params = dummy_lam.init(_rng, inputs)\r\n dummy_lam_train_state = TrainState.create(\r\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\r\n )\r\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\r\n dummy_lam_train_state, sharding\r\n )\r\n restored_lam = lam_checkpoint_manager.restore(\r\n step=lam_checkpoint_manager.latest_step(),\r\n args=ocp.args.Composite(\r\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\r\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\r\n ),\r\n )[""model_state""]\r\n restored_lam_params = restored_lam.params[""params""]\r\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\r\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\r\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\r\n restored_lam_params = {\r\n k: v\r\n for k, v in restored_lam_params.items()\r\n if k in train_state.params[""params""][""lam""]\r\n }\r\n train_state.params[""params""][""lam""].update(restored_lam_params)\r\n lam_checkpoint_manager.close()\r\n\r\n return train_state\r\n\r\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\r\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\r\n\r\n def map_fn(leaf_template):\r\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\r\n return jax.ShapeDtypeStruct(\r\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\r\n )\r\n return leaf_template\r\n\r\n return jax.tree_util.tree_map(map_fn, pytree_template)]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6137,15331674,"TERMINAL",0,0,"m",,terminal_output +6138,15331950,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6139,15332368,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +6140,15332431,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6141,15332931,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +6142,15333710,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +6143,15334040,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +6144,15334198,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6145,15334261,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6146,15335487,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +6147,15335755,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +6148,15336552,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +6149,15336614,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +6150,15336762,"TERMINAL",0,0,"fs/",,terminal_output +6151,15337298,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6152,15337424,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6153,15338697,"TERMINAL",0,0,"mv *.gif gifs/",,terminal_output +6154,15339356,"TERMINAL",0,0,"",,terminal_output +6155,15343019,"genie.py",0,0,"",python,tab +6156,15345762,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +6157,15345825,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +6158,15346232,"TERMINAL",0,0,"\r\n[?2004l\rdata diff.diff frame-knoms.png frames genie.py input_pipeline logs overfit_dir README.md requirements-franz.txt sample.py scripts_horeka tests train_lam.py utils weekend-job-requeuer.sh\r\ndebug diff.log frame.png generate_dataset.py gifs LICENSE models __pycache__ read_tf_record.py requirements.txt scripts_cremers slurm train_dynamics.py train_tokenizer.py wandb weekend-job-starter.sh\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6159,15350012,"genie.py",0,0,"",python,tab +6160,15361475,"genie.py",3827,0,"",python,selection_mouse +6161,15362776,"genie.py",3827,0,"_",python,content +6162,15362778,"genie.py",3828,0,"",python,selection_keyboard +6163,15363075,"genie.py",3828,0,"m",python,content +6164,15363078,"genie.py",3829,0,"",python,selection_keyboard +6165,15363125,"genie.py",3829,0,"a",python,content +6166,15363127,"genie.py",3830,0,"",python,selection_keyboard +6167,15363171,"genie.py",3830,0,"s",python,content +6168,15363173,"genie.py",3831,0,"",python,selection_keyboard +6169,15363256,"genie.py",3831,0,"k",python,content +6170,15363257,"genie.py",3832,0,"",python,selection_keyboard +6171,15364009,"genie.py",3832,0,"g",python,content +6172,15364010,"genie.py",3833,0,"",python,selection_keyboard +6173,15364097,"genie.py",3833,0,"i",python,content +6174,15364099,"genie.py",3834,0,"",python,selection_keyboard +6175,15364188,"genie.py",3834,0,"t",python,content +6176,15364190,"genie.py",3835,0,"",python,selection_keyboard +6177,15365902,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\n# --- Get video + latent actions ---\narray_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n]\ndataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n)\nvideo_batch = next(iter(dataloader))\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +6178,15367162,"sample.py",2010,0,"",python,selection_mouse +6179,15367164,"sample.py",2009,0,"",python,selection_command +6180,15369537,"sample.py",2660,0,"",python,selection_command +6181,15371885,"sample.py",2661,0,"",python,selection_command +6182,15372032,"sample.py",2662,0,"",python,selection_command +6183,15372158,"sample.py",2663,0,"",python,selection_command +6184,15372295,"sample.py",2664,0,"",python,selection_command +6185,15372451,"sample.py",2665,0,"",python,selection_command +6186,15372587,"sample.py",2666,0,"",python,selection_command +6187,15377041,"sample.py",2731,0,"",python,selection_command +6188,15377689,"sample.py",2806,0,"",python,selection_command +6189,15378972,"sample.py",2731,0,"",python,selection_command +6190,15380598,"sample.py",2665,0,"",python,selection_mouse +6191,15381416,"sample.py",2666,0,"",python,selection_command +6192,15381958,"sample.py",2666,0,"_",python,content +6193,15381959,"sample.py",2667,0,"",python,selection_keyboard +6194,15382200,"sample.py",2667,0,"m",python,content +6195,15382201,"sample.py",2668,0,"",python,selection_keyboard +6196,15382294,"sample.py",2668,0,"a",python,content +6197,15382295,"sample.py",2669,0,"",python,selection_keyboard +6198,15382355,"sample.py",2669,0,"s",python,content +6199,15382357,"sample.py",2670,0,"",python,selection_keyboard +6200,15382467,"sample.py",2670,0,"k",python,content +6201,15382468,"sample.py",2671,0,"",python,selection_keyboard +6202,15382798,"sample.py",2671,0,"g",python,content +6203,15382798,"sample.py",2672,0,"",python,selection_keyboard +6204,15382959,"sample.py",2672,0,"i",python,content +6205,15382960,"sample.py",2673,0,"",python,selection_keyboard +6206,15383032,"sample.py",2673,0,"t",python,content +6207,15383033,"sample.py",2674,0,"",python,selection_keyboard +6208,15383431,"sample.py",2673,0,"",python,selection_command +6209,15387695,"genie.py",0,0,"",python,tab +6210,15395328,"genie.py",3806,0,"",python,selection_mouse +6211,15395980,"genie.py",3796,0,"",python,selection_mouse +6212,15396804,"genie.py",3796,0,"\n",python,content +6213,15396972,"genie.py",3797,0,"\n",python,content +6214,15397109,"genie.py",3798,0,"\n",python,content +6215,15397460,"genie.py",3798,0,"",python,selection_command +6216,15397631,"genie.py",3797,0,"",python,selection_command +6217,15398050,"genie.py",3797,0,"\n",python,content +6218,15399447,"genie.py",3798,1,"",python,content +6219,15400675,"genie.py",3798,0," ",python,content +6220,15401306,"genie.py",3802,0,"d",python,content +6221,15401307,"genie.py",3803,0,"",python,selection_keyboard +6222,15401420,"genie.py",3803,0,"e",python,content +6223,15401421,"genie.py",3804,0,"",python,selection_keyboard +6224,15401543,"genie.py",3804,0,"f",python,content +6225,15401544,"genie.py",3805,0,"",python,selection_keyboard +6226,15401689,"genie.py",3805,0," ",python,content +6227,15401690,"genie.py",3806,0,"",python,selection_keyboard +6228,15402053,"genie.py",3806,0,"s",python,content +6229,15402054,"genie.py",3807,0,"",python,selection_keyboard +6230,15402256,"genie.py",3807,0,"a",python,content +6231,15402258,"genie.py",3808,0,"",python,selection_keyboard +6232,15402399,"genie.py",3808,0,"m",python,content +6233,15402401,"genie.py",3809,0,"",python,selection_keyboard +6234,15402557,"genie.py",3809,0,"p",python,content +6235,15402559,"genie.py",3810,0,"",python,selection_keyboard +6236,15402617,"genie.py",3810,0,"l",python,content +6237,15402619,"genie.py",3811,0,"",python,selection_keyboard +6238,15402699,"genie.py",3811,0,"e",python,content +6239,15402701,"genie.py",3812,0,"",python,selection_keyboard +6240,15402958,"genie.py",3812,0,"_",python,content +6241,15402960,"genie.py",3813,0,"",python,selection_keyboard +6242,15404555,"genie.py",3813,0,"c",python,content +6243,15404556,"genie.py",3814,0,"",python,selection_keyboard +6244,15404705,"genie.py",3814,0,"a",python,content +6245,15404706,"genie.py",3815,0,"",python,selection_keyboard +6246,15404835,"genie.py",3815,0,"u",python,content +6247,15404836,"genie.py",3816,0,"",python,selection_keyboard +6248,15404937,"genie.py",3816,0,"s",python,content +6249,15404939,"genie.py",3817,0,"",python,selection_keyboard +6250,15405103,"genie.py",3817,0,"a",python,content +6251,15405104,"genie.py",3818,0,"",python,selection_keyboard +6252,15405164,"genie.py",3818,0,"l",python,content +6253,15405165,"genie.py",3819,0,"",python,selection_keyboard +6254,15406093,"genie.py",3819,0,"()",python,content +6255,15406094,"genie.py",3820,0,"",python,selection_keyboard +6256,15406277,"genie.py",3820,0,"\n \n ",python,content +6257,15406354,"genie.py",3829,0," ",python,content +6258,15408885,"genie.py",3839,0,"",python,selection_command +6259,15409132,"genie.py",3838,1,"",python,content +6260,15409583,"genie.py",3834,4,"",python,content +6261,15409968,"genie.py",3833,1,"",python,content +6262,15410385,"genie.py",3829,4,"",python,content +6263,15410691,"genie.py",3825,4,"",python,content +6264,15410865,"genie.py",3821,4,"",python,content +6265,15411211,"genie.py",3820,1,"",python,content +6266,15412500,"genie.py",3820,0,")",python,content +6267,15412502,"genie.py",3821,0,"",python,selection_keyboard +6268,15413031,"genie.py",3821,0,":",python,content +6269,15413032,"genie.py",3822,0,"",python,selection_keyboard +6270,15413490,"genie.py",3822,0,"\n ",python,content +6271,15414300,"genie.py",3823,8,"",python,content +6272,15414715,"genie.py",3930,0,"",python,selection_mouse +6273,15415533,"genie.py",3878,0,"",python,selection_mouse +6274,15415545,"genie.py",3877,0,"",python,selection_command +6275,15416469,"genie.py",3865,13," self,",python,selection_command +6276,15416714,"genie.py",3865,44," self,\n batch: Dict[str, Any],",python,selection_command +6277,15416987,"genie.py",3865,66," self,\n batch: Dict[str, Any],\n seq_len: int,",python,selection_command +6278,15417137,"genie.py",3865,91," self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,",python,selection_command +6279,15417287,"genie.py",3865,123," self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,",python,selection_command +6280,15417420,"genie.py",3865,160," self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,",python,selection_command +6281,15417687,"genie.py",3865,0,"",python,selection_command +6282,15417812,"genie.py",3841,0,"",python,selection_command +6283,15417941,"genie.py",3825,0,"",python,selection_command +6284,15418107,"genie.py",3824,0,"",python,selection_command +6285,15418272,"genie.py",3823,0,"",python,selection_command +6286,15418428,"genie.py",3798,0,"",python,selection_command +6287,15418629,"genie.py",3799,0,"",python,selection_command +6288,15419116,"genie.py",3800,0,"",python,selection_command +6289,15419148,"genie.py",3801,0,"",python,selection_command +6290,15419216,"genie.py",3802,0,"",python,selection_command +6291,15419217,"genie.py",3803,0,"",python,selection_command +6292,15419230,"genie.py",3804,0,"",python,selection_command +6293,15419299,"genie.py",3805,0,"",python,selection_command +6294,15419309,"genie.py",3806,0,"",python,selection_command +6295,15419334,"genie.py",3807,0,"",python,selection_command +6296,15419382,"genie.py",3808,0,"",python,selection_command +6297,15419398,"genie.py",3809,0,"",python,selection_command +6298,15419444,"genie.py",3810,0,"",python,selection_command +6299,15419455,"genie.py",3811,0,"",python,selection_command +6300,15419484,"genie.py",3812,0,"",python,selection_command +6301,15419517,"genie.py",3813,0,"",python,selection_command +6302,15419548,"genie.py",3814,0,"",python,selection_command +6303,15419579,"genie.py",3815,0,"",python,selection_command +6304,15419604,"genie.py",3816,0,"",python,selection_command +6305,15419633,"genie.py",3817,0,"",python,selection_command +6306,15419663,"genie.py",3818,0,"",python,selection_command +6307,15419696,"genie.py",3819,0,"",python,selection_command +6308,15421467,"genie.py",3822,0,"\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,",python,content +6309,15421496,"genie.py",3831,0,"",python,selection_command +6310,15422889,"genie.py",3832,0,"",python,selection_command +6311,15423181,"genie.py",3833,0,"",python,selection_command +6312,15423462,"genie.py",3808,0,"",python,selection_command +6313,15423687,"genie.py",3809,0,"",python,selection_command +6314,15424176,"genie.py",3810,0,"",python,selection_command +6315,15424204,"genie.py",3811,0,"",python,selection_command +6316,15424276,"genie.py",3812,0,"",python,selection_command +6317,15424276,"genie.py",3813,0,"",python,selection_command +6318,15424299,"genie.py",3814,0,"",python,selection_command +6319,15424353,"genie.py",3815,0,"",python,selection_command +6320,15424353,"genie.py",3816,0,"",python,selection_command +6321,15424413,"genie.py",3817,0,"",python,selection_command +6322,15424585,"genie.py",3818,0,"",python,selection_command +6323,15424762,"genie.py",3819,0,"",python,selection_command +6324,15424911,"genie.py",3820,0,"",python,selection_command +6325,15426415,"genie.py",3820,2,"",python,content +6326,15426481,"genie.py",3819,0,"",python,selection_command +6327,15426944,"genie.py",3833,0,"",python,selection_command +6328,15427946,"genie.py",3857,0,"",python,selection_command +6329,15428130,"genie.py",3886,0,"",python,selection_command +6330,15428278,"genie.py",3910,0,"",python,selection_command +6331,15428418,"genie.py",3935,0,"",python,selection_command +6332,15428575,"genie.py",3967,0,"",python,selection_command +6333,15428890,"genie.py",3981,0,"\n ",python,content +6334,15430420,"genie.py",3982,8," )",python,content +6335,15430422,"genie.py",3987,0,"",python,selection_keyboard +6336,15430773,"genie.py",3987,0,":",python,content +6337,15430774,"genie.py",3988,0,"",python,selection_keyboard +6338,15430980,"genie.py",3988,0,"\n ",python,content +6339,15431075,"genie.py",3993,0," ",python,content +6340,15431623,"genie.py",3993,4,"",python,content +6341,15432384,"genie.py",3993,0," ",python,content +6342,15432688,"genie.py",3997,0,"p",python,content +6343,15432689,"genie.py",3998,0,"",python,selection_keyboard +6344,15432869,"genie.py",3998,0,"a",python,content +6345,15432870,"genie.py",3999,0,"",python,selection_keyboard +6346,15432954,"genie.py",3999,0,"s",python,content +6347,15432955,"genie.py",4000,0,"",python,selection_keyboard +6348,15433091,"genie.py",4000,0,"s",python,content +6349,15433092,"genie.py",4001,0,"",python,selection_keyboard +6350,15433771,"genie.py",4000,0,"",python,selection_command +6351,15434524,"genie.py",3901,0,"",python,selection_mouse +6352,15435618,"genie.py",3888,25,"",python,content +6353,15435655,"genie.py",3896,0,"",python,selection_command +6354,15435837,"genie.py",3874,0,"",python,selection_command +6355,15436117,"genie.py",3843,0,"",python,selection_command +6356,15436588,"genie.py",3874,0,"",python,selection_command +6357,15436766,"genie.py",3896,0,"",python,selection_command +6358,15436889,"genie.py",3928,0,"",python,selection_command +6359,15437218,"genie.py",3896,0,"",python,selection_command +6360,15437737,"genie.py",3928,0,"",python,selection_command +6361,15438037,"genie.py",3896,0,"",python,selection_command +6362,15438124,"genie.py",3928,0,"",python,selection_command +6363,15438283,"genie.py",3896,0,"",python,selection_command +6364,15438337,"genie.py",3928,0,"",python,selection_command +6365,15438490,"genie.py",3896,0,"",python,selection_command +6366,15438575,"genie.py",3928,0,"",python,selection_command +6367,15438700,"genie.py",3896,0,"",python,selection_command +6368,15438869,"genie.py",3874,0,"",python,selection_command +6369,15439044,"genie.py",3843,0,"",python,selection_command +6370,15440738,"genie.py",3963,0,"",python,selection_mouse +6371,15440748,"genie.py",3962,0,"",python,selection_command +6372,15441246,"genie.py",3974,0,"",python,selection_mouse +6373,15441384,"genie.py",3972,4,"pass",python,selection_mouse +6374,15442183,"genie.py",3977,0,"",python,selection_mouse +6375,15443199,"genie.py",3975,0,"",python,selection_mouse +6376,15443359,"genie.py",3972,4,"pass",python,selection_mouse +6377,15444703,"genie.py",3976,0,"",python,selection_mouse +6378,15444730,"genie.py",3975,0,"",python,selection_command +6379,15444997,"genie.py",3972,4,"pass",python,selection_mouse +6380,15445011,"genie.py",3973,3,"ass",python,selection_command +6381,15445204,"genie.py",3963,10,"\n p",python,selection_mouse +6382,15445333,"genie.py",3927,46," sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6383,15445334,"genie.py",3870,103," seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6384,15445334,"genie.py",3837,136," batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6385,15445334,"genie.py",3822,151," self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6386,15445334,"genie.py",3821,152," self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6387,15445335,"genie.py",3798,175," def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6388,15445450,"genie.py",3797,176,"\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6389,15445748,"genie.py",3798,175," def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6390,15445819,"genie.py",3821,152," self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6391,15446100,"genie.py",3798,175," def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n p",python,selection_mouse +6392,15483015,"genie.py",3798,0,"",python,selection_command +6393,15622957,"genie.py",3976,0,"",python,selection_mouse +6394,15622975,"genie.py",3975,0,"",python,selection_command +6395,15623564,"genie.py",3976,0,"",python,selection_mouse +6396,15623577,"genie.py",3975,0,"",python,selection_command +6397,15636527,"sample.py",0,0,"",python,tab +6398,15649354,"sample.py",2944,0,"",python,selection_mouse +6399,15649487,"sample.py",2938,17,"_sampling_wrapper",python,selection_mouse +6400,15708319,"genie.py",0,0,"",python,tab +6401,15755279,"genie.py",3963,0,"",python,selection_mouse +6402,15755291,"genie.py",3962,0,"",python,selection_command +6403,15755819,"genie.py",3976,0,"",python,selection_mouse +6404,15755820,"genie.py",3975,0,"",python,selection_command +6405,15756834,"genie.py",3976,0,"",python,selection_command +6406,15757158,"genie.py",3975,1,"",python,content +6407,15757336,"genie.py",3974,1,"",python,content +6408,15757459,"genie.py",3973,1,"",python,content +6409,15757610,"genie.py",3972,1,"",python,content +6410,15758395,"genie.py",3968,4,"",python,content +6411,15758540,"genie.py",3964,4,"",python,content +6412,15758708,"genie.py",3963,1,"",python,content +6413,15759288,"genie.py",3963,0,"\n ",python,content +6414,15759406,"genie.py",3968,0," ",python,content +6415,15759655,"genie.py",3968,4,"",python,content +6416,15760091,"genie.py",3964,4,"",python,content +6417,15760533,"genie.py",3964,0," """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n # If you have action tokens, use them; otherwise, use zeros\n if ""latent_actions"" in batch:\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n else:\n action_tokens = jnp.zeros((B, seq_len, self.num_latent_actions), dtype=jnp.int32)\n\n # --- Autoregressive generation loop ---\n rng = batch.get(""rng"", None)\n for t in range(T, seq_len):\n # Feed all tokens up to t (i.e., frames 0..t-1) to the dynamics model\n dyna_inputs = {\n ""video_tokens"": token_idxs_full[:, :t, :], # (B, t, N)\n ""latent_actions"": action_tokens[:, :t, ...], # (B, t, ...)\n ""mask_rng"": batch.get(""mask_rng"", None),\n }\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # We want the logits for the last time step (frame t-1 predicting t)\n next_token_logits = dyna_outputs[""token_logits""][:, -1, :, :] # (B, N, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, N)\n else:\n if rng is not None:\n rng, step_rng = jax.random.split(rng)\n else:\n step_rng = jax.random.PRNGKey(0)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, N)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, :].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames",python,content +6418,15768757,"genie.py",4140,0,"",python,selection_mouse +6419,15769328,"genie.py",4097,0,"",python,selection_mouse +6420,15770137,"genie.py",4087,0,"",python,selection_mouse +6421,15770666,"genie.py",4134,0,"",python,selection_mouse +6422,15770821,"genie.py",4131,6,"Future",python,selection_mouse +6423,15771026,"genie.py",4131,13,"Future frames",python,selection_mouse +6424,15771055,"genie.py",4131,17,"Future frames are",python,selection_mouse +6425,15771109,"genie.py",4105,32,"tokenized once.\n - Future",python,selection_mouse +6426,15771163,"genie.py",4114,23," once.\n - Future",python,selection_mouse +6427,15771163,"genie.py",4115,22,"once.\n - Future",python,selection_mouse +6428,15771187,"genie.py",4119,18,".\n - Future",python,selection_mouse +6429,15771187,"genie.py",4131,34,"Future frames are generated one at",python,selection_mouse +6430,15771188,"genie.py",4131,36,"Future frames are generated one at a",python,selection_mouse +6431,15771211,"genie.py",4131,41,"Future frames are generated one at a time",python,selection_mouse +6432,15771266,"genie.py",4131,43,"Future frames are generated one at a time, ",python,selection_mouse +6433,15771266,"genie.py",4131,47,"Future frames are generated one at a time, each",python,selection_mouse +6434,15771294,"genie.py",4131,48,"Future frames are generated one at a time, each ",python,selection_mouse +6435,15771314,"genie.py",4131,59,"Future frames are generated one at a time, each conditioned",python,selection_mouse +6436,15771503,"genie.py",4131,60,"Future frames are generated one at a time, each conditioned ",python,selection_mouse +6437,15771504,"genie.py",4131,62,"Future frames are generated one at a time, each conditioned on",python,selection_mouse +6438,15771504,"genie.py",4131,63,"Future frames are generated one at a time, each conditioned on ",python,selection_mouse +6439,15771504,"genie.py",4131,66,"Future frames are generated one at a time, each conditioned on all",python,selection_mouse +6440,15771546,"genie.py",4131,67,"Future frames are generated one at a time, each conditioned on all ",python,selection_mouse +6441,15771599,"genie.py",4131,75,"Future frames are generated one at a time, each conditioned on all previous",python,selection_mouse +6442,15771758,"genie.py",4131,76,"Future frames are generated one at a time, each conditioned on all previous ",python,selection_mouse +6443,15771759,"genie.py",4131,82,"Future frames are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6444,15772121,"genie.py",4208,0,"",python,selection_mouse +6445,15772121,"genie.py",4207,6,"frames",python,selection_mouse +6446,15772286,"genie.py",4198,15,"previous frames",python,selection_mouse +6447,15772358,"genie.py",4194,19,"all previous frames",python,selection_mouse +6448,15772358,"genie.py",4191,22,"on all previous frames",python,selection_mouse +6449,15772359,"genie.py",4179,34,"conditioned on all previous frames",python,selection_mouse +6450,15772412,"genie.py",4174,39,"each conditioned on all previous frames",python,selection_mouse +6451,15772442,"genie.py",4172,41,", each conditioned on all previous frames",python,selection_mouse +6452,15772496,"genie.py",4168,45,"time, each conditioned on all previous frames",python,selection_mouse +6453,15772501,"genie.py",4167,46," time, each conditioned on all previous frames",python,selection_mouse +6454,15772525,"genie.py",4165,48," a time, each conditioned on all previous frames",python,selection_mouse +6455,15772543,"genie.py",4163,50,"at a time, each conditioned on all previous frames",python,selection_mouse +6456,15772596,"genie.py",4159,54,"one at a time, each conditioned on all previous frames",python,selection_mouse +6457,15772609,"genie.py",4158,55," one at a time, each conditioned on all previous frames",python,selection_mouse +6458,15772629,"genie.py",4149,64,"generated one at a time, each conditioned on all previous frames",python,selection_mouse +6459,15772734,"genie.py",4148,65," generated one at a time, each conditioned on all previous frames",python,selection_mouse +6460,15772735,"genie.py",4145,68,"are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6461,15772781,"genie.py",4144,69," are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6462,15772782,"genie.py",4138,75,"frames are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6463,15772946,"genie.py",4137,76," frames are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6464,15772946,"genie.py",4131,82,"Future frames are generated one at a time, each conditioned on all previous frames",python,selection_mouse +6465,15773582,"genie.py",4132,0,"",python,selection_mouse +6466,15774671,"genie.py",4312,0,"",python,selection_mouse +6467,15774817,"genie.py",4308,5,"batch",python,selection_mouse +6468,15775420,"genie.py",4388,0,"",python,selection_mouse +6469,15776138,"genie.py",4317,0,"",python,selection_mouse +6470,15776659,"genie.py",4381,0,"",python,selection_mouse +6471,15776793,"genie.py",4380,5,"total",python,selection_mouse +6472,15777008,"genie.py",4380,12,"total number",python,selection_mouse +6473,15784880,"genie.py",4876,0,"",python,selection_mouse +6474,15789788,"genie.py",5134,0,"",python,selection_mouse +6475,15791035,"genie.py",5135,0,"",python,selection_mouse +6476,15792985,"genie.py",5126,0,"",python,selection_mouse +6477,15793144,"genie.py",5123,7,"seq_len",python,selection_mouse +6478,15793736,"genie.py",5131,0,"",python,selection_mouse +6479,15793916,"genie.py",5131,1," ",python,selection_mouse +6480,15794895,"genie.py",5159,0,"",python,selection_mouse +6481,15795722,"genie.py",5206,0,"",python,selection_mouse +6482,15796446,"genie.py",5205,0,"",python,selection_command +6483,15805805,"genie.py",6403,0,"",python,selection_mouse +6484,15805821,"genie.py",6402,0,"",python,selection_command +6485,15809941,"genie.py",6558,0,"",python,selection_mouse +6486,15811440,"genie.py",6426,0,"",python,selection_mouse +6487,15811629,"genie.py",6423,3,"rng",python,selection_mouse +6488,15822406,"sample.py",0,0,"",python,tab +6489,15825280,"sample.py",2754,0,"",python,selection_mouse +6490,15825818,"sample.py",2667,0,"",python,selection_mouse +6491,15829776,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +6492,15829837,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +6493,15830060,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +6494,15830430,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +6495,15831399,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +6496,15832983,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +6497,15833308,"TERMINAL",0,0,"[?25ls': ls[?25h",,terminal_output +6498,15833513,"TERMINAL",0,0,"[?25lsa': git checkout causal-transformer-dynamics-model[?25h",,terminal_output +6499,15833577,"TERMINAL",0,0,"[?25lsm': git add sample.py[?25h",,terminal_output +6500,15833810,"TERMINAL",0,0,"[?25lasp': git add sample.py[?25hl': git add sample.py",,terminal_output +6501,15834524,"TERMINAL",0,0,"[?25ls\rfailed reverse-i-search)`samply': git add sample.py[?25h",,terminal_output +6502,15835630,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output +6503,15836396,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +6504,15836759,"TERMINAL",0,0,"[?25ls': ls[?25h",,terminal_output +6505,15836917,"TERMINAL",0,0,"[?25lsa': git checkout causal-transformer-dynamics-model[?25h",,terminal_output +6506,15836980,"TERMINAL",0,0,"[?25lsm': git add sample.py[?25h",,terminal_output +6507,15837278,"TERMINAL",0,0,"[?25lasp': git add sample.py[?25hl': git add sample.py",,terminal_output +6508,15837448,"TERMINAL",0,0,"[?25lse': git add sample.py[?25h",,terminal_output +6509,15838327,"TERMINAL",0,0,"[?25ls.': git add sample.py[?25h",,terminal_output +6510,15838581,"TERMINAL",0,0,"[?25lsp': git add sample.py[?25h",,terminal_output +6511,15838698,"TERMINAL",0,0,"[?25lsy': git add sample.py[?25h",,terminal_output +6512,15841600,"TERMINAL",0,0,"python sample.py --checkpoint /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/train_dynamics_modelsize_scaling_500M_32_node --data_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked --dyna_dim=1536 --dyna_num_blocks=24 --dyna_num_heads=24",,terminal_output +6513,15846501,"TERMINAL",0,0,"\r\n\r",,terminal_output +6514,15848644,"TERMINAL",0,0,"\r[6@jafar) [tum_cte0515@hkn0710 jafar]$ python sample.py",,terminal_output +6515,15849309,"TERMINAL",0,0,"",,terminal_output +6516,15849703,"TERMINAL",0,0,"",,terminal_output +6517,15849866,"TERMINAL",0,0,"",,terminal_output +6518,15850006,"TERMINAL",0,0,"",,terminal_output +6519,15850180,"TERMINAL",0,0,"",,terminal_output +6520,15850413,"TERMINAL",0,0,"",,terminal_output +6521,15850573,"TERMINAL",0,0,"",,terminal_output +6522,15850736,"TERMINAL",0,0,"",,terminal_output +6523,15850892,"TERMINAL",0,0,"",,terminal_output +6524,15851226,"TERMINAL",0,0,"",,terminal_output +6525,15851914,"TERMINAL",0,0,"",,terminal_output +6526,15851973,"TERMINAL",0,0,"",,terminal_output +6527,15852113,"TERMINAL",0,0,"",,terminal_output +6528,15852323,"TERMINAL",0,0,"",,terminal_output +6529,15852493,"TERMINAL",0,0,"",,terminal_output +6530,15852625,"TERMINAL",0,0,"",,terminal_output +6531,15852839,"TERMINAL",0,0,"",,terminal_output +6532,15853303,"TERMINAL",0,0,"",,terminal_output +6533,15853660,"TERMINAL",0,0,"",,terminal_output +6534,15854850,"TERMINAL",0,0,"\r",,terminal_output +6535,15873441,"TERMINAL",0,0,"[103@/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/train_dyn_yolorun_new_arch/3351743",,terminal_output +6536,15876917,"genie.py",0,0,"",python,tab +6537,15878612,"sample.py",0,0,"",python,tab +6538,15880057,"sample.py",2674,0,"",python,selection_command +6539,15881852,"sample.py",2667,0,"",python,selection_mouse +6540,15882610,"sample.py",2667,7,"",python,content +6541,15885498,"sample.py",2667,0,"s",python,content +6542,15885499,"sample.py",2668,0,"",python,selection_keyboard +6543,15885846,"sample.py",2667,1,"",python,content +6544,15885953,"sample.py",2667,0,"c",python,content +6545,15885954,"sample.py",2668,0,"",python,selection_keyboard +6546,15886100,"sample.py",2668,0,"a",python,content +6547,15886101,"sample.py",2669,0,"",python,selection_keyboard +6548,15886200,"sample.py",2669,0,"u",python,content +6549,15886201,"sample.py",2670,0,"",python,selection_keyboard +6550,15886300,"sample.py",2670,0,"s",python,content +6551,15886301,"sample.py",2671,0,"",python,selection_keyboard +6552,15886409,"sample.py",2671,0,"a",python,content +6553,15886410,"sample.py",2672,0,"",python,selection_keyboard +6554,15886466,"sample.py",2672,0,"l",python,content +6555,15886467,"sample.py",2673,0,"",python,selection_keyboard +6556,15888597,"TERMINAL",0,0,"[?25l\r/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/train_dyn_yolorun_new_arch/3351743\r\n[?2004l\r[?25h",,terminal_output +6557,15897303,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +6558,15897717,"TERMINAL",0,0,"ERROR:2025-07-16 19:14:25,108:jax._src.xla_bridge:444: Jax plugin configuration error: Exception when calling jax_plugins.xla_cuda12.initialize()\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 197, in _version_check\r\n version = get_version()\r\nRuntimeError: jaxlib/cuda/versions_helpers.cc:81: operation cusparseGetProperty(MAJOR_VERSION, &major) failed: The cuSPARSE library was not found.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 442, in discover_pjrt_plugins\r\n plugin_module.initialize()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 324, in initialize\r\n _check_cuda_versions(raise_on_first_error=True)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 262, in _check_cuda_versions\r\n _version_check(""cuSPARSE"", cuda_versions.cusparse_get_version,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 201, in _version_check\r\n raise RuntimeError(err_msg) from e\r\nRuntimeError: Unable to load cuSPARSE. Is it installed?\r\nERROR:jax._src.xla_bridge:Jax plugin configuration error: Exception when calling jax_plugins.xla_cuda12.initialize()\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 197, in _version_check\r\n version = get_version()\r\nRuntimeError: jaxlib/cuda/versions_helpers.cc:81: operation cusparseGetProperty(MAJOR_VERSION, &major) failed: The cuSPARSE library was not found.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 442, in discover_pjrt_plugins\r\n plugin_module.initialize()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 324, in initialize\r\n _check_cuda_versions(raise_on_first_error=True)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 262, in _check_cuda_versions\r\n _version_check(""cuSPARSE"", cuda_versions.cusparse_get_version,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax_plugins/xla_cuda12/__init__.py"", line 201, in _version_check\r\n raise RuntimeError(err_msg) from e\r\nRuntimeError: Unable to load cuSPARSE. Is it installed?\r\nWARNING:2025-07-16 19:14:25,122:jax._src.xla_bridge:794: An NVIDIA GPU may be present on this machine, but a CUDA-enabled jaxlib is not installed. Falling back to cpu.\r\nWARNING:jax._src.xla_bridge:An NVIDIA GPU may be present on this machine, but a CUDA-enabled jaxlib is not installed. Falling back to cpu.\r\n",,terminal_output +6559,15897770,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 61, in \r\n genie = Genie(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: Genie.__init__() missing 1 required positional argument: 'use_maskgit'\r\n",,terminal_output +6560,15897885,"TERMINAL",0,0,"]0;tum_cte0515@hkn0710:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0710 jafar]$ ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7f5803ab-1386-4d6f-bc3a-3fff3d3adcc91759089760490-2025_09_28-22.02.58.175/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7f5803ab-1386-4d6f-bc3a-3fff3d3adcc91759089760490-2025_09_28-22.02.58.175/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..8fc88338e69797c35e40852c7378920f70a3f003 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7f5803ab-1386-4d6f-bc3a-3fff3d3adcc91759089760490-2025_09_28-22.02.58.175/source.csv @@ -0,0 +1,646 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,5,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M))\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n # safe sqrt: clip argument to >= 0\n one_minus_noise = jnp.clip(1.0 - noise_level_B111, min=0.0)\n sqrt_one_minus = jnp.sqrt(one_minus_noise)\n sqrt_noise = jnp.sqrt(jnp.clip(noise_level_B111, min=0.0))\n\n noise_augmented_vid_embed_BTNM = sqrt_one_minus * vid_embed_BTNM + sqrt_noise * noise_BTNM\n jax.debug.print(""noise_augmented_vid_embed_BTNM: {}"", noise_augmented_vid_embed_BTNM)\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n rng, _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], B + 2)\n mask_prob = jax.random.uniform(_rng_prob, shape=(B,), minval=self.mask_limit)\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Apply noise augmentation ---\n vid_embed_BTNM, noise_level_embed_BT1M = self._apply_noise_augmentation(vid_embed_BTNM, rng)\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M))\n # We calculate `(noise_level * noise_buckets) / max_noise_level` instead of\n # `(noise_level_B / max_noise_level) * noise_buckets` for numerical stability.\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n # safe sqrt: clip argument to >= 0\n one_minus_noise = jnp.clip(1.0 - noise_level_B111, a_min=0.0)\n sqrt_one_minus = jnp.sqrt(one_minus_noise)\n sqrt_noise = jnp.sqrt(jnp.clip(noise_level_B111, a_min=0.0))\n\n noise_augmented_vid_embed_BTNM = sqrt_one_minus * vid_embed_BTNM + sqrt_noise * noise_BTNM\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNM, noise_level_embed_BT1M = self._apply_noise_augmentation(video_tokens_BTN, batch[""rng""])\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 1:-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +2,329,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:02:58 PM [info] Activating crowd-code\n10:02:58 PM [info] Recording started\n10:02:58 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,472,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:02:58 PM [info] Git repository found\n10:02:58 PM [info] Git provider initialized successfully\n10:02:58 PM [info] Initial git state: [object Object]\n",Log,content +4,17189,"jasmine/models/dynamics.py",0,0,"",python,tab +5,17809,"jasmine/models/dynamics.py",2981,0,"",python,selection_mouse +6,18475,"jasmine/models/dynamics.py",3369,0,"",python,selection_mouse +7,19320,"jasmine/models/dynamics.py",2794,0,"",python,selection_mouse +8,19551,"jasmine/models/dynamics.py",2782,18,"noise_bucket_idx_B",python,selection_mouse +9,20213,"jasmine/models/dynamics.py",2872,0,"",python,selection_mouse +10,20798,"jasmine/models/dynamics.py",2788,0,"",python,selection_mouse +11,20979,"jasmine/models/dynamics.py",2782,18,"noise_bucket_idx_B",python,selection_mouse +12,21500,"jasmine/models/dynamics.py",2809,0,"",python,selection_mouse +13,21664,"jasmine/models/dynamics.py",2804,6,"ensure",python,selection_mouse +14,22491,"jasmine/models/dynamics.py",2845,0,"",python,selection_mouse +15,22660,"jasmine/models/dynamics.py",2842,7,"prevent",python,selection_mouse +16,22995,"jasmine/models/dynamics.py",2842,8,"prevent ",python,selection_mouse +17,23364,"jasmine/models/dynamics.py",2850,0,"",python,selection_mouse +18,23781,"jasmine/models/dynamics.py",2854,0,"",python,selection_mouse +19,23789,"jasmine/models/dynamics.py",2853,0,"",python,selection_command +20,23965,"jasmine/models/dynamics.py",2850,4,"NaNs",python,selection_mouse +21,23966,"jasmine/models/dynamics.py",2851,3,"aNs",python,selection_command +22,24618,"jasmine/models/dynamics.py",2845,0,"",python,selection_mouse +23,24751,"jasmine/models/dynamics.py",2842,7,"prevent",python,selection_mouse +24,25363,"jasmine/models/dynamics.py",2924,0,"",python,selection_mouse +25,25520,"jasmine/models/dynamics.py",2923,1,"i",python,selection_mouse +26,25592,"jasmine/models/dynamics.py",2835,89,"nge to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noi",python,selection_mouse +27,25944,"jasmine/models/dynamics.py",2835,0,"",python,selection_mouse +28,26452,"jasmine/models/dynamics.py",2818,0,"",python,selection_mouse +29,27123,"jasmine/models/dynamics.py",2810,0,"",python,selection_mouse +30,27732,"jasmine/models/dynamics.py",2804,0,"",python,selection_mouse +31,29033,"jasmine/models/dynamics.py",2813,0,"",python,selection_mouse +32,30131,"jasmine/models/dynamics.py",2905,0,"",python,selection_mouse +33,30976,"jasmine/models/dynamics.py",2940,0,"",python,selection_mouse +34,31799,"jasmine/models/dynamics.py",2817,0,"",python,selection_mouse +35,32584,"jasmine/models/dynamics.py",2766,0,"",python,selection_mouse +36,32585,"jasmine/models/dynamics.py",2765,0,"",python,selection_command +37,33095,"jasmine/models/dynamics.py",2894,0,"",python,selection_mouse +38,33631,"jasmine/models/dynamics.py",2806,0,"",python,selection_mouse +39,34501,"jasmine/models/dynamics.py",2894,0,"",python,selection_command +40,35196,"jasmine/models/dynamics.py",2893,0,"",python,selection_command +41,35394,"jasmine/models/dynamics.py",2892,0,"",python,selection_command +42,35455,"jasmine/models/dynamics.py",2891,0,"",python,selection_command +43,35605,"jasmine/models/dynamics.py",2890,0,"",python,selection_command +44,35828,"jasmine/models/dynamics.py",2889,0,"",python,selection_command +45,36213,"jasmine/models/dynamics.py",2801,0,"",python,selection_command +46,36481,"jasmine/models/dynamics.py",2800,0,"",python,selection_command +47,36674,"jasmine/models/dynamics.py",2799,0,"",python,selection_command +48,37297,"jasmine/models/dynamics.py",2800,0,"",python,selection_command +49,37429,"jasmine/models/dynamics.py",2801,0,"",python,selection_command +50,38055,"jasmine/models/dynamics.py",2800,0,"",python,selection_command +51,38264,"jasmine/models/dynamics.py",2799,0,"",python,selection_command +52,39160,"jasmine/models/dynamics.py",2814,0,"",python,selection_mouse +53,39701,"jasmine/models/dynamics.py",2812,0,"",python,selection_mouse +54,40248,"jasmine/models/dynamics.py",2766,0,"",python,selection_mouse +55,40249,"jasmine/models/dynamics.py",2765,0,"",python,selection_command +56,40783,"jasmine/models/dynamics.py",2847,0,"",python,selection_mouse +57,41855,"jasmine/models/dynamics.py",2843,0,"",python,selection_mouse +58,43044,"jasmine/models/dynamics.py",2839,0,"",python,selection_mouse +59,48678,"jasmine/models/dynamics.py",3007,0,"",python,selection_mouse +60,48679,"jasmine/models/dynamics.py",3006,0,"",python,selection_command +61,49256,"jasmine/models/dynamics.py",2939,0,"",python,selection_mouse +62,49257,"jasmine/models/dynamics.py",2938,0,"",python,selection_command +63,49784,"jasmine/models/dynamics.py",2940,0,"",python,selection_mouse +64,53085,"jasmine/models/dynamics.py",2766,0,"",python,selection_mouse +65,53086,"jasmine/models/dynamics.py",2765,0,"",python,selection_command +66,57569,"jasmine/models/dynamics.py",2803,0,"",python,selection_mouse +67,58191,"jasmine/models/dynamics.py",2673,0,"",python,selection_mouse +68,78536,"jasmine/models/dynamics.py",2940,0,"",python,selection_mouse +69,79186,"jasmine/models/dynamics.py",2766,0,"",python,selection_mouse +70,79187,"jasmine/models/dynamics.py",2765,0,"",python,selection_command +71,79859,"jasmine/models/dynamics.py",2757,0,"",python,selection_mouse +72,79861,"jasmine/models/dynamics.py",2756,0,"",python,selection_command +73,100312,"jasmine/models/dynamics.py",2451,0,"",python,selection_mouse +74,103241,"jasmine/models/dynamics.py",2693,0,"",python,selection_mouse +75,109836,"jasmine/models/dynamics.py",2513,0,"",python,selection_mouse +76,110469,"jasmine/models/dynamics.py",2536,0,"",python,selection_mouse +77,110471,"jasmine/models/dynamics.py",2535,0,"",python,selection_command +78,111497,"jasmine/models/dynamics.py",2536,0,"",python,selection_command +79,111732,"jasmine/models/dynamics.py",2536,0,",",python,content +80,111734,"jasmine/models/dynamics.py",2537,0,"",python,selection_keyboard +81,111871,"jasmine/models/dynamics.py",2537,0," ",python,content +82,111872,"jasmine/models/dynamics.py",2538,0,"",python,selection_keyboard +83,112441,"jasmine/models/dynamics.py",2538,0,"d",python,content +84,112442,"jasmine/models/dynamics.py",2539,0,"",python,selection_keyboard +85,113320,"jasmine/models/dynamics.py",2539,0,"t",python,content +86,113321,"jasmine/models/dynamics.py",2540,0,"",python,selection_keyboard +87,113569,"jasmine/models/dynamics.py",2540,0,"y",python,content +88,113571,"jasmine/models/dynamics.py",2541,0,"",python,selection_keyboard +89,113727,"jasmine/models/dynamics.py",2541,0,"p",python,content +90,113729,"jasmine/models/dynamics.py",2542,0,"",python,selection_keyboard +91,113879,"jasmine/models/dynamics.py",2542,0,"e",python,content +92,113881,"jasmine/models/dynamics.py",2543,0,"",python,selection_keyboard +93,114226,"jasmine/models/dynamics.py",2543,0,"=",python,content +94,114227,"jasmine/models/dynamics.py",2544,0,"",python,selection_keyboard +95,114544,"jasmine/models/dynamics.py",2544,0,"s",python,content +96,114545,"jasmine/models/dynamics.py",2545,0,"",python,selection_keyboard +97,114727,"jasmine/models/dynamics.py",2545,0,"e",python,content +98,114728,"jasmine/models/dynamics.py",2546,0,"",python,selection_keyboard +99,114842,"jasmine/models/dynamics.py",2546,0,"l",python,content +100,114845,"jasmine/models/dynamics.py",2547,0,"",python,selection_keyboard +101,114941,"jasmine/models/dynamics.py",2547,0,"f",python,content +102,114942,"jasmine/models/dynamics.py",2548,0,"",python,selection_keyboard +103,115055,"jasmine/models/dynamics.py",2548,0,".",python,content +104,115057,"jasmine/models/dynamics.py",2549,0,"",python,selection_keyboard +105,115406,"jasmine/models/dynamics.py",2549,0,"d",python,content +106,115407,"jasmine/models/dynamics.py",2550,0,"",python,selection_keyboard +107,115929,"jasmine/models/dynamics.py",2550,0,"t",python,content +108,115931,"jasmine/models/dynamics.py",2551,0,"",python,selection_keyboard +109,116092,"jasmine/models/dynamics.py",2551,0,"y",python,content +110,116093,"jasmine/models/dynamics.py",2552,0,"",python,selection_keyboard +111,116226,"jasmine/models/dynamics.py",2552,0,"o",python,content +112,116227,"jasmine/models/dynamics.py",2553,0,"",python,selection_keyboard +113,116286,"jasmine/models/dynamics.py",2553,0,"e",python,content +114,116288,"jasmine/models/dynamics.py",2554,0,"",python,selection_keyboard +115,116817,"jasmine/models/dynamics.py",2553,1,"",python,content +116,117125,"jasmine/models/dynamics.py",2552,1,"",python,content +117,117250,"jasmine/models/dynamics.py",2552,0,"p",python,content +118,117251,"jasmine/models/dynamics.py",2553,0,"",python,selection_keyboard +119,117342,"jasmine/models/dynamics.py",2553,0,"e",python,content +120,117343,"jasmine/models/dynamics.py",2554,0,"",python,selection_keyboard +121,117751,"jasmine/models/dynamics.py",2553,0,"",python,selection_command +122,118940,"jasmine/models/dynamics.py",2493,0,"",python,selection_mouse +123,122169,"jasmine/models/dynamics.py",2452,0,"",python,selection_mouse +124,123122,"jasmine/models/dynamics.py",2384,0,"",python,selection_mouse +125,123723,"jasmine/models/dynamics.py",2375,10,"_rng_noise",python,selection_mouse +126,125189,"jasmine/models/dynamics.py",2456,0,"",python,selection_mouse +127,125191,"jasmine/models/dynamics.py",2455,0,"",python,selection_command +128,129575,"jasmine/models/dynamics.py",2634,0,"",python,selection_mouse +129,130601,"jasmine/models/dynamics.py",2634,0,",",python,content +130,130602,"jasmine/models/dynamics.py",2635,0,"",python,selection_keyboard +131,130719,"jasmine/models/dynamics.py",2635,0," ",python,content +132,130719,"jasmine/models/dynamics.py",2636,0,"",python,selection_keyboard +133,131622,"jasmine/models/dynamics.py",2636,0,"d",python,content +134,131623,"jasmine/models/dynamics.py",2637,0,"",python,selection_keyboard +135,132678,"jasmine/models/dynamics.py",2636,1,"dtype=",python,content +136,133948,"jasmine/models/dynamics.py",2642,0,"s",python,content +137,133949,"jasmine/models/dynamics.py",2643,0,"",python,selection_keyboard +138,134090,"jasmine/models/dynamics.py",2643,0,"e",python,content +139,134092,"jasmine/models/dynamics.py",2644,0,"",python,selection_keyboard +140,134216,"jasmine/models/dynamics.py",2644,0,"l",python,content +141,134217,"jasmine/models/dynamics.py",2645,0,"",python,selection_keyboard +142,134302,"jasmine/models/dynamics.py",2645,0,"f",python,content +143,134304,"jasmine/models/dynamics.py",2646,0,"",python,selection_keyboard +144,134410,"jasmine/models/dynamics.py",2646,0,".",python,content +145,134411,"jasmine/models/dynamics.py",2647,0,"",python,selection_keyboard +146,134756,"jasmine/models/dynamics.py",2647,0,"d",python,content +147,134757,"jasmine/models/dynamics.py",2648,0,"",python,selection_keyboard +148,135025,"jasmine/models/dynamics.py",2648,0,"t",python,content +149,135026,"jasmine/models/dynamics.py",2649,0,"",python,selection_keyboard +150,135855,"jasmine/models/dynamics.py",2647,2,"dtype",python,content +151,136797,"jasmine/models/dynamics.py",2765,0,"",python,selection_mouse +152,150412,"jasmine/models/dynamics.py",2964,0,"",python,selection_mouse +153,150901,"jasmine/models/dynamics.py",2693,0,"",python,selection_mouse +154,151414,"jasmine/models/dynamics.py",2793,0,"",python,selection_mouse +155,151989,"jasmine/models/dynamics.py",2765,0,"",python,selection_mouse +156,153125,"jasmine/models/dynamics.py",2753,0,"",python,selection_mouse +157,153263,"jasmine/models/dynamics.py",2750,15,"max_noise_level",python,selection_mouse +158,153918,"jasmine/models/dynamics.py",2793,0,"",python,selection_mouse +159,154486,"jasmine/models/dynamics.py",2712,0,"",python,selection_mouse +160,154622,"jasmine/models/dynamics.py",2707,13,"noise_level_B",python,selection_mouse +161,154852,"jasmine/models/dynamics.py",2707,21,"noise_level_B * self.",python,selection_mouse +162,154870,"jasmine/models/dynamics.py",2707,34,"noise_level_B * self.noise_buckets",python,selection_mouse +163,154895,"jasmine/models/dynamics.py",2693,27,"\n (noise_level_B",python,selection_mouse +164,155072,"jasmine/models/dynamics.py",2622,98,"(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B",python,selection_mouse +165,155087,"jasmine/models/dynamics.py",2625,95," T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B",python,selection_mouse +166,155137,"jasmine/models/dynamics.py",2626,94,"T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B",python,selection_mouse +167,155140,"jasmine/models/dynamics.py",2628,92," N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B",python,selection_mouse +168,155188,"jasmine/models/dynamics.py",2631,89," M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B",python,selection_mouse +169,155189,"jasmine/models/dynamics.py",2693,27,"\n (noise_level_B",python,selection_mouse +170,155290,"jasmine/models/dynamics.py",2707,58,"noise_level_B * self.noise_buckets) / self.max_noise_level",python,selection_mouse +171,155728,"jasmine/models/dynamics.py",2765,0,"",python,selection_mouse +172,156177,"jasmine/models/dynamics.py",2764,1,"l",python,selection_mouse +173,156195,"jasmine/models/dynamics.py",2761,4,"evel",python,selection_mouse +174,156211,"jasmine/models/dynamics.py",2765,28,"\n ).astype(jnp.int32)",python,selection_mouse +175,156419,"jasmine/models/dynamics.py",2765,27,"\n ).astype(jnp.int32",python,selection_mouse +176,156434,"jasmine/models/dynamics.py",2765,26,"\n ).astype(jnp.int3",python,selection_mouse +177,156451,"jasmine/models/dynamics.py",2765,25,"\n ).astype(jnp.int",python,selection_mouse +178,156476,"jasmine/models/dynamics.py",2765,23,"\n ).astype(jnp.i",python,selection_mouse +179,156492,"jasmine/models/dynamics.py",2765,21,"\n ).astype(jnp",python,selection_mouse +180,156518,"jasmine/models/dynamics.py",2765,20,"\n ).astype(jn",python,selection_mouse +181,156534,"jasmine/models/dynamics.py",2765,19,"\n ).astype(j",python,selection_mouse +182,156535,"jasmine/models/dynamics.py",2765,18,"\n ).astype(",python,selection_mouse +183,156561,"jasmine/models/dynamics.py",2765,17,"\n ).astype",python,selection_mouse +184,156577,"jasmine/models/dynamics.py",2765,15,"\n ).asty",python,selection_mouse +185,156603,"jasmine/models/dynamics.py",2765,14,"\n ).ast",python,selection_mouse +186,156688,"jasmine/models/dynamics.py",2707,58,"noise_level_B * self.noise_buckets) / self.max_noise_level",python,selection_mouse +187,157074,"jasmine/models/dynamics.py",2707,0,"",python,selection_mouse +188,157166,"jasmine/models/dynamics.py",2707,13,"noise_level_B",python,selection_mouse +189,157307,"jasmine/models/dynamics.py",2694,72," (noise_level_B * self.noise_buckets) / self.max_noise_level\n",python,selection_mouse +190,157690,"jasmine/models/dynamics.py",2654,112," noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n",python,selection_mouse +191,157787,"jasmine/models/dynamics.py",2694,72," (noise_level_B * self.noise_buckets) / self.max_noise_level\n",python,selection_mouse +192,158214,"jasmine/models/dynamics.py",2757,0,"",python,selection_mouse +193,158275,"jasmine/models/dynamics.py",2750,15,"max_noise_level",python,selection_mouse +194,165532,"jasmine/models/dynamics.py",3641,0,"",python,selection_mouse +195,166477,"jasmine/models/dynamics.py",3640,0,"",python,selection_command +196,167194,"jasmine/models/dynamics.py",3593,94,"",python,content +197,168438,"jasmine/models/dynamics.py",3458,0,"",python,selection_mouse +198,169060,"jasmine/models/dynamics.py",3493,0,"",python,selection_mouse +199,172660,"jasmine/models/dynamics.py",3292,0,"",python,selection_mouse +200,173250,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +201,174634,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +202,196948,"jasmine/models/dynamics.py",3593,0,"",python,selection_mouse +203,200271,"jasmine/models/dynamics.py",3402,0,"",python,selection_mouse +204,201253,"jasmine/models/dynamics.py",3405,0,"",python,selection_mouse +205,206250,"jasmine/models/dynamics.py",3069,0,"",python,selection_mouse +206,213491,"jasmine/models/dynamics.py",3306,0,"",python,selection_mouse +207,213492,"jasmine/models/dynamics.py",3305,0,"",python,selection_command +208,213962,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +209,226585,"jasmine/models/dynamics.py",2675,0,"",python,selection_mouse +210,227166,"jasmine/models/dynamics.py",2687,0,"",python,selection_mouse +211,235945,"jasmine/models/dynamics.py",2725,0,"",python,selection_mouse +212,236486,"jasmine/models/dynamics.py",2793,0,"",python,selection_mouse +213,236488,"jasmine/models/dynamics.py",2792,0,"",python,selection_command +214,248065,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +215,249854,"jasmine/models/dynamics.py",3544,0,"",python,selection_mouse +216,250573,"jasmine/models/dynamics.py",3493,0,"",python,selection_mouse +217,251169,"jasmine/models/dynamics.py",3553,0,"",python,selection_mouse +218,251792,"jasmine/models/dynamics.py",3543,0,"",python,selection_mouse +219,252808,"jasmine/models/dynamics.py",3481,0,"",python,selection_mouse +220,253603,"jasmine/models/dynamics.py",3492,0,"",python,selection_mouse +221,253604,"jasmine/models/dynamics.py",3491,0,"",python,selection_command +222,254375,"jasmine/models/dynamics.py",3492,0,"\n ",python,content +223,254866,"jasmine/models/dynamics.py",3501,0,"\n ",python,content +224,254867,"jasmine/models/dynamics.py",3493,8,"",python,content +225,255117,"jasmine/models/dynamics.py",3502,0,"n",python,content +226,255118,"jasmine/models/dynamics.py",3503,0,"",python,selection_keyboard +227,255242,"jasmine/models/dynamics.py",3503,0,"o",python,content +228,255243,"jasmine/models/dynamics.py",3504,0,"",python,selection_keyboard +229,255360,"jasmine/models/dynamics.py",3504,0,"i",python,content +230,255361,"jasmine/models/dynamics.py",3505,0,"",python,selection_keyboard +231,255791,"jasmine/models/dynamics.py",3505,0,"s",python,content +232,255793,"jasmine/models/dynamics.py",3506,0,"",python,selection_keyboard +233,255962,"jasmine/models/dynamics.py",3506,0,"e",python,content +234,255964,"jasmine/models/dynamics.py",3507,0,"",python,selection_keyboard +235,256086,"jasmine/models/dynamics.py",3507,0,"_",python,content +236,256088,"jasmine/models/dynamics.py",3508,0,"",python,selection_keyboard +237,256700,"jasmine/models/dynamics.py",3502,6,"noise_augmented_vid_embed_BTNM",python,content +238,256917,"jasmine/models/dynamics.py",3532,0," ",python,content +239,256918,"jasmine/models/dynamics.py",3533,0,"",python,selection_keyboard +240,257114,"jasmine/models/dynamics.py",3533,0,"=",python,content +241,257115,"jasmine/models/dynamics.py",3534,0,"",python,selection_keyboard +242,257205,"jasmine/models/dynamics.py",3534,0," ",python,content +243,257206,"jasmine/models/dynamics.py",3535,0,"",python,selection_keyboard +244,264471,"jasmine/models/dynamics.py",3535,0,"k",python,content +245,264472,"jasmine/models/dynamics.py",3536,0,"",python,selection_keyboard +246,264593,"jasmine/models/dynamics.py",3536,0,"n",python,content +247,264595,"jasmine/models/dynamics.py",3537,0,"",python,selection_keyboard +248,265656,"jasmine/models/dynamics.py",3536,1,"",python,content +249,265823,"jasmine/models/dynamics.py",3535,1,"",python,content +250,266661,"jasmine/models/dynamics.py",3535,0,"j",python,content +251,266662,"jasmine/models/dynamics.py",3536,0,"",python,selection_keyboard +252,266753,"jasmine/models/dynamics.py",3536,0,"n",python,content +253,266754,"jasmine/models/dynamics.py",3537,0,"",python,selection_keyboard +254,267029,"jasmine/models/dynamics.py",3537,0,"p",python,content +255,267031,"jasmine/models/dynamics.py",3538,0,"",python,selection_keyboard +256,267320,"jasmine/models/dynamics.py",3538,0,".",python,content +257,267321,"jasmine/models/dynamics.py",3539,0,"",python,selection_keyboard +258,267567,"jasmine/models/dynamics.py",3539,0,"s",python,content +259,267569,"jasmine/models/dynamics.py",3540,0,"",python,selection_keyboard +260,268327,"jasmine/models/dynamics.py",3540,0,"q",python,content +261,268328,"jasmine/models/dynamics.py",3541,0,"",python,selection_keyboard +262,268560,"jasmine/models/dynamics.py",3541,0,"r",python,content +263,268561,"jasmine/models/dynamics.py",3542,0,"",python,selection_keyboard +264,268718,"jasmine/models/dynamics.py",3542,0,"t",python,content +265,268719,"jasmine/models/dynamics.py",3543,0,"",python,selection_keyboard +266,269567,"jasmine/models/dynamics.py",3543,0,"()",python,content +267,269568,"jasmine/models/dynamics.py",3544,0,"",python,selection_keyboard +268,272592,"jasmine/models/dynamics.py",3545,0,"",python,selection_mouse +269,273501,"jasmine/models/dynamics.py",3544,0,"",python,selection_mouse +270,282126,"jasmine/models/dynamics.py",3544,0,"1",python,content +271,282127,"jasmine/models/dynamics.py",3545,0,"",python,selection_keyboard +272,282450,"jasmine/models/dynamics.py",3545,0," ",python,content +273,282451,"jasmine/models/dynamics.py",3546,0,"",python,selection_keyboard +274,282586,"jasmine/models/dynamics.py",3546,0,"-",python,content +275,282587,"jasmine/models/dynamics.py",3547,0,"",python,selection_keyboard +276,282661,"jasmine/models/dynamics.py",3547,0," ",python,content +277,282661,"jasmine/models/dynamics.py",3548,0,"",python,selection_keyboard +278,283052,"jasmine/models/dynamics.py",3548,0,"n",python,content +279,283053,"jasmine/models/dynamics.py",3549,0,"",python,selection_keyboard +280,283378,"jasmine/models/dynamics.py",3549,0,"o",python,content +281,283380,"jasmine/models/dynamics.py",3550,0,"",python,selection_keyboard +282,283419,"jasmine/models/dynamics.py",3550,0,"i",python,content +283,283420,"jasmine/models/dynamics.py",3551,0,"",python,selection_keyboard +284,283421,"jasmine/models/dynamics.py",3551,0,"s",python,content +285,283422,"jasmine/models/dynamics.py",3552,0,"",python,selection_keyboard +286,283637,"jasmine/models/dynamics.py",3552,0,"e",python,content +287,283638,"jasmine/models/dynamics.py",3553,0,"",python,selection_keyboard +288,284324,"jasmine/models/dynamics.py",3553,0,"_",python,content +289,284326,"jasmine/models/dynamics.py",3554,0,"",python,selection_keyboard +290,284929,"jasmine/models/dynamics.py",3554,0,"l",python,content +291,284932,"jasmine/models/dynamics.py",3555,0,"",python,selection_keyboard +292,286403,"jasmine/models/dynamics.py",3548,7,"noise_level_B111",python,content +293,288068,"jasmine/models/dynamics.py",3565,0,"",python,selection_command +294,288560,"jasmine/models/dynamics.py",3565,0," ",python,content +295,288561,"jasmine/models/dynamics.py",3566,0,"",python,selection_keyboard +296,288753,"jasmine/models/dynamics.py",3566,0,"*",python,content +297,288754,"jasmine/models/dynamics.py",3567,0,"",python,selection_keyboard +298,288866,"jasmine/models/dynamics.py",3567,0," ",python,content +299,288867,"jasmine/models/dynamics.py",3568,0,"",python,selection_keyboard +300,289741,"jasmine/models/dynamics.py",3568,0,"v",python,content +301,289742,"jasmine/models/dynamics.py",3569,0,"",python,selection_keyboard +302,289903,"jasmine/models/dynamics.py",3569,0,"i",python,content +303,289905,"jasmine/models/dynamics.py",3570,0,"",python,selection_keyboard +304,289983,"jasmine/models/dynamics.py",3570,0,"d",python,content +305,289983,"jasmine/models/dynamics.py",3571,0,"",python,selection_keyboard +306,290354,"jasmine/models/dynamics.py",3571,0,"_",python,content +307,290355,"jasmine/models/dynamics.py",3572,0,"",python,selection_keyboard +308,290806,"jasmine/models/dynamics.py",3568,4,"vid_embed_BTNM",python,content +309,291385,"jasmine/models/dynamics.py",3582,0," ",python,content +310,291386,"jasmine/models/dynamics.py",3583,0,"",python,selection_keyboard +311,291678,"jasmine/models/dynamics.py",3583,0,"+",python,content +312,291678,"jasmine/models/dynamics.py",3584,0,"",python,selection_keyboard +313,291971,"jasmine/models/dynamics.py",3584,0," ",python,content +314,291972,"jasmine/models/dynamics.py",3585,0,"",python,selection_keyboard +315,294899,"jasmine/models/dynamics.py",3585,0,"n",python,content +316,294900,"jasmine/models/dynamics.py",3586,0,"",python,selection_keyboard +317,295088,"jasmine/models/dynamics.py",3586,0,"o",python,content +318,295089,"jasmine/models/dynamics.py",3587,0,"",python,selection_keyboard +319,295136,"jasmine/models/dynamics.py",3587,0,"i",python,content +320,295137,"jasmine/models/dynamics.py",3588,0,"",python,selection_keyboard +321,295205,"jasmine/models/dynamics.py",3588,0,"s",python,content +322,295206,"jasmine/models/dynamics.py",3589,0,"",python,selection_keyboard +323,295403,"jasmine/models/dynamics.py",3589,0,"e",python,content +324,295404,"jasmine/models/dynamics.py",3590,0,"",python,selection_keyboard +325,295635,"jasmine/models/dynamics.py",3590,0,"_",python,content +326,295636,"jasmine/models/dynamics.py",3591,0,"",python,selection_keyboard +327,295974,"jasmine/models/dynamics.py",3591,0,"l",python,content +328,295976,"jasmine/models/dynamics.py",3592,0,"",python,selection_keyboard +329,296126,"jasmine/models/dynamics.py",3592,0,"e",python,content +330,296127,"jasmine/models/dynamics.py",3593,0,"",python,selection_keyboard +331,296437,"jasmine/models/dynamics.py",3593,0,"v",python,content +332,296439,"jasmine/models/dynamics.py",3594,0,"",python,selection_keyboard +333,298482,"jasmine/models/dynamics.py",3585,9,"noise_level_B111",python,content +334,299707,"jasmine/models/dynamics.py",3601,0," ",python,content +335,299707,"jasmine/models/dynamics.py",3602,0,"",python,selection_keyboard +336,299862,"jasmine/models/dynamics.py",3602,0,"*",python,content +337,299863,"jasmine/models/dynamics.py",3603,0,"",python,selection_keyboard +338,300073,"jasmine/models/dynamics.py",3603,0," ",python,content +339,300073,"jasmine/models/dynamics.py",3604,0,"",python,selection_keyboard +340,300574,"jasmine/models/dynamics.py",3604,0,"n",python,content +341,300575,"jasmine/models/dynamics.py",3605,0,"",python,selection_keyboard +342,300819,"jasmine/models/dynamics.py",3605,0,"o",python,content +343,300821,"jasmine/models/dynamics.py",3606,0,"",python,selection_keyboard +344,300918,"jasmine/models/dynamics.py",3606,0,"i",python,content +345,300920,"jasmine/models/dynamics.py",3607,0,"",python,selection_keyboard +346,301090,"jasmine/models/dynamics.py",3607,0,"s",python,content +347,301091,"jasmine/models/dynamics.py",3608,0,"",python,selection_keyboard +348,301309,"jasmine/models/dynamics.py",3608,0,"e",python,content +349,301309,"jasmine/models/dynamics.py",3609,0,"",python,selection_keyboard +350,301530,"jasmine/models/dynamics.py",3609,0,"_",python,content +351,301531,"jasmine/models/dynamics.py",3610,0,"",python,selection_keyboard +352,302598,"jasmine/models/dynamics.py",3604,6,"noise_BTNM",python,content +353,303574,"jasmine/models/dynamics.py",3690,0,"",python,selection_mouse +354,303689,"jasmine/models/dynamics.py",3689,0,"",python,selection_command +355,305723,"jasmine/models/dynamics.py",3534,0,"",python,selection_mouse +356,306254,"jasmine/models/dynamics.py",3715,0,"",python,selection_mouse +357,311612,"jasmine/models/dynamics.py",3616,0,"",python,selection_command +358,311873,"jasmine/models/dynamics.py",3615,0,"",python,selection_command +359,312056,"jasmine/models/dynamics.py",3494,0,"",python,selection_command +360,312161,"jasmine/models/dynamics.py",3493,0,"",python,selection_command +361,312304,"jasmine/models/dynamics.py",3426,0,"",python,selection_command +362,317069,"jasmine/models/dynamics.py",3493,0,"",python,selection_command +363,317248,"jasmine/models/dynamics.py",3494,0,"",python,selection_command +364,317449,"jasmine/models/dynamics.py",3615,0,"",python,selection_command +365,317537,"jasmine/models/dynamics.py",3616,0,"",python,selection_command +366,319581,"jasmine/models/dynamics.py",3616,99,"",python,content +367,319877,"jasmine/models/dynamics.py",3615,0,"",python,selection_command +368,320370,"jasmine/models/dynamics.py",3615,1,"",python,content +369,320610,"jasmine/models/dynamics.py",3494,0,"",python,selection_command +370,320789,"jasmine/models/dynamics.py",3493,0,"",python,selection_command +371,320948,"jasmine/models/dynamics.py",3426,0,"",python,selection_command +372,322118,"jasmine/models/dynamics.py",3426,67,"",python,content +373,322247,"jasmine/models/dynamics.py",3375,0,"",python,selection_command +374,323446,"jasmine/models/dynamics.py",3375,51,"",python,content +375,323655,"jasmine/models/dynamics.py",3307,0,"",python,selection_command +376,325490,"jasmine/models/dynamics.py",3264,111,"",python,content +377,326141,"jasmine/models/dynamics.py",3264,1,"",python,content +378,326180,"jasmine/models/dynamics.py",3272,0,"",python,selection_command +379,327289,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +380,327866,"jasmine/models/dynamics.py",3342,0,"",python,selection_mouse +381,328457,"jasmine/models/dynamics.py",3332,0,"",python,selection_mouse +382,329321,"jasmine/models/dynamics.py",3314,0,"",python,selection_mouse +383,329847,"jasmine/models/dynamics.py",3309,0,"",python,selection_mouse +384,349252,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +385,360710,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +386,361450,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +387,362098,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +388,363018,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +389,363647,"jasmine/models/dynamics.py",2842,0,"",python,selection_mouse +390,364251,"jasmine/models/dynamics.py",2843,0,"",python,selection_mouse +391,364888,"jasmine/models/dynamics.py",2802,0,"",python,selection_mouse +392,364930,"jasmine/models/dynamics.py",2801,0,"",python,selection_command +393,366007,"jasmine/models/dynamics.py",2490,0,"",python,selection_mouse +394,366598,"jasmine/models/dynamics.py",2802,0,"",python,selection_mouse +395,366639,"jasmine/models/dynamics.py",2801,0,"",python,selection_command +396,367180,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +397,368181,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +398,368887,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +399,369590,"jasmine/models/dynamics.py",3287,0,"",python,selection_mouse +400,370237,"jasmine/models/dynamics.py",3226,0,"",python,selection_mouse +401,370834,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +402,371621,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +403,372416,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +404,373231,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +405,374122,"jasmine/models/dynamics.py",3288,0,"",python,selection_mouse +406,374668,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +407,416829,"jasmine/models/dynamics.py",3408,0,"",python,selection_mouse +408,417414,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +409,418350,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +410,418894,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +411,419634,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +412,420202,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +413,421745,"jasmine/models/dynamics.py",3438,0,"",python,selection_mouse +414,421900,"jasmine/models/dynamics.py",3433,22,"noise_level_embed_BT1M",python,selection_mouse +415,422482,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +416,423163,"jasmine/models/dynamics.py",3426,0,"",python,selection_mouse +417,425637,"jasmine/models/dynamics.py",3419,0,"",python,selection_mouse +418,425774,"jasmine/models/dynamics.py",3401,30,"noise_augmented_vid_embed_BTNM",python,selection_mouse +419,426825,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +420,427441,"jasmine/models/dynamics.py",3439,0,"",python,selection_mouse +421,427600,"jasmine/models/dynamics.py",3433,22,"noise_level_embed_BT1M",python,selection_mouse +422,428435,"jasmine/models/dynamics.py",3292,0,"",python,selection_mouse +423,428602,"jasmine/models/dynamics.py",3272,30,"noise_augmented_vid_embed_BTNM",python,selection_mouse +424,429445,"jasmine/models/dynamics.py",3417,0,"",python,selection_mouse +425,429625,"jasmine/models/dynamics.py",3401,30,"noise_augmented_vid_embed_BTNM",python,selection_mouse +426,437070,"jasmine/models/dynamics.py",3442,0,"",python,selection_mouse +427,437282,"jasmine/models/dynamics.py",3433,22,"noise_level_embed_BT1M",python,selection_mouse +428,438207,"jasmine/models/dynamics.py",3307,0,"",python,selection_mouse +429,438318,"jasmine/models/dynamics.py",3305,3,"jnp",python,selection_mouse +430,438563,"jasmine/models/dynamics.py",3305,4,"jnp.",python,selection_mouse +431,438587,"jasmine/models/dynamics.py",3305,8,"jnp.sqrt",python,selection_mouse +432,438617,"jasmine/models/dynamics.py",3305,10,"jnp.sqrt(1",python,selection_mouse +433,438656,"jasmine/models/dynamics.py",3305,80,"jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n",python,selection_mouse +434,439036,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +435,450684,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +436,451273,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +437,451914,"jasmine/models/dynamics.py",3263,0,"",python,selection_mouse +438,452497,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +439,453308,"jasmine/models/dynamics.py",2940,0,"",python,selection_mouse +440,453889,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +441,454953,"jasmine/models/dynamics.py",2797,0,"",python,selection_mouse +442,455537,"jasmine/models/dynamics.py",2794,0,"",python,selection_mouse +443,457243,"jasmine/models/dynamics.py",2566,0,"",python,selection_mouse +444,458105,"jasmine/models/dynamics.py",2555,0,"",python,selection_mouse +445,459284,"jasmine/models/dynamics.py",2242,0,"",python,selection_mouse +446,459973,"jasmine/models/dynamics.py",2241,0,"",python,selection_mouse +447,461044,"jasmine/models/dynamics.py",2243,0,"",python,selection_mouse +448,461209,"jasmine/models/dynamics.py",2242,4," ",python,selection_mouse +449,461317,"jasmine/models/dynamics.py",2242,63," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n ",python,selection_mouse +450,461346,"jasmine/models/dynamics.py",2242,105," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n ",python,selection_mouse +451,461362,"jasmine/models/dynamics.py",2242,172," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n ",python,selection_mouse +452,461387,"jasmine/models/dynamics.py",2242,325," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n ",python,selection_mouse +453,461402,"jasmine/models/dynamics.py",2242,527," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ",python,selection_mouse +454,461426,"jasmine/models/dynamics.py",2242,1027," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n ",python,selection_mouse +455,461427,"jasmine/models/dynamics.py",2242,1150," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +456,461469,"jasmine/models/dynamics.py",2242,1239," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n ",python,selection_mouse +457,461469,"jasmine/models/dynamics.py",2242,1254," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n ",python,selection_mouse +458,461520,"jasmine/models/dynamics.py",2242,1291," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -",python,selection_mouse +459,461521,"jasmine/models/dynamics.py",2242,1290," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) ",python,selection_mouse +460,461577,"jasmine/models/dynamics.py",2242,1328," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n ",python,selection_mouse +461,461734,"jasmine/models/dynamics.py",2242,1290," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) ",python,selection_mouse +462,461767,"jasmine/models/dynamics.py",2242,1253," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n ",python,selection_mouse +463,461807,"jasmine/models/dynamics.py",2242,1239," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n ",python,selection_mouse +464,461852,"jasmine/models/dynamics.py",2242,1222," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def",python,selection_mouse +465,461852,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +466,461894,"jasmine/models/dynamics.py",2242,1150," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +467,461894,"jasmine/models/dynamics.py",2242,1151," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +468,461939,"jasmine/models/dynamics.py",2242,1143," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n",python,selection_mouse +469,462146,"jasmine/models/dynamics.py",2242,1151," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +470,462514,"jasmine/models/dynamics.py",2242,1150," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +471,462557,"jasmine/models/dynamics.py",2242,1149," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +472,462604,"jasmine/models/dynamics.py",2242,1147," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +473,462645,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +474,464377,"jasmine/models/dynamics.py",2243,0,"",python,selection_mouse +475,464378,"jasmine/models/dynamics.py",2242,4," ",python,selection_mouse +476,464568,"jasmine/models/dynamics.py",2242,63," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n ",python,selection_mouse +477,464610,"jasmine/models/dynamics.py",2242,105," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n ",python,selection_mouse +478,464623,"jasmine/models/dynamics.py",2242,106," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n ",python,selection_mouse +479,464640,"jasmine/models/dynamics.py",2242,217," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n ",python,selection_mouse +480,464657,"jasmine/models/dynamics.py",2242,564," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n ",python,selection_mouse +481,464700,"jasmine/models/dynamics.py",2242,739," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n ",python,selection_mouse +482,464701,"jasmine/models/dynamics.py",2242,885," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n ",python,selection_mouse +483,464706,"jasmine/models/dynamics.py",2242,1021," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n",python,selection_mouse +484,464722,"jasmine/models/dynamics.py",2242,1026," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n ",python,selection_mouse +485,464768,"jasmine/models/dynamics.py",2242,1143," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n",python,selection_mouse +486,464813,"jasmine/models/dynamics.py",2242,1147," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +487,464901,"jasmine/models/dynamics.py",2242,1148," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +488,464908,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +489,465007,"jasmine/models/dynamics.py",2242,1219," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n ",python,selection_mouse +490,465322,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +491,465387,"jasmine/models/dynamics.py",2242,1146," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +492,465746,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +493,466903,"jasmine/models/dynamics.py",3385,0,"",python,selection_mouse +494,467588,"jasmine/models/dynamics.py",3455,0,"",python,selection_mouse +495,467590,"jasmine/models/dynamics.py",3454,0,"",python,selection_command +496,467732,"jasmine/models/dynamics.py",3433,22,"noise_level_embed_BT1M",python,selection_mouse +497,467773,"jasmine/models/dynamics.py",3434,21,"oise_level_embed_BT1M",python,selection_command +498,467947,"jasmine/models/dynamics.py",3385,49,"\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +499,467977,"jasmine/models/dynamics.py",3263,171,"\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +500,467978,"jasmine/models/dynamics.py",3258,176,", 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +501,467991,"jasmine/models/dynamics.py",3164,270,"noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +502,468016,"jasmine/models/dynamics.py",3082,352,"noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +503,468036,"jasmine/models/dynamics.py",3008,426,"noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +504,468037,"jasmine/models/dynamics.py",2818,616,"noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +505,468058,"jasmine/models/dynamics.py",2802,632,"\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +506,468091,"jasmine/models/dynamics.py",2707,727,"noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +507,468162,"jasmine/models/dynamics.py",2573,861,"noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +508,468162,"jasmine/models/dynamics.py",2468,966," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +509,468163,"jasmine/models/dynamics.py",2467,967," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +510,468163,"jasmine/models/dynamics.py",2466,968," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +511,468208,"jasmine/models/dynamics.py",2421,1013,"noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +512,468264,"jasmine/models/dynamics.py",2353,1081," rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +513,468289,"jasmine/models/dynamics.py",2310,1124," B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +514,468308,"jasmine/models/dynamics.py",2246,1188,"def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +515,468390,"jasmine/models/dynamics.py",2241,1193,"\n def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +516,468467,"jasmine/models/dynamics.py",2244,1190," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +517,468477,"jasmine/models/dynamics.py",2243,1191," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +518,468573,"jasmine/models/dynamics.py",2242,1192," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, n",python,selection_mouse +519,469686,"jasmine/models/dynamics.py",2243,0,"",python,selection_mouse +520,469687,"jasmine/models/dynamics.py",2242,4," ",python,selection_mouse +521,469869,"jasmine/models/dynamics.py",2242,104," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n",python,selection_mouse +522,469893,"jasmine/models/dynamics.py",2242,313," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n",python,selection_mouse +523,469907,"jasmine/models/dynamics.py",2242,736," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n ",python,selection_mouse +524,469925,"jasmine/models/dynamics.py",2242,1143," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n",python,selection_mouse +525,469943,"jasmine/models/dynamics.py",2242,1218," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n ",python,selection_mouse +526,469955,"jasmine/models/dynamics.py",2242,1288," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ",python,selection_mouse +527,469973,"jasmine/models/dynamics.py",2242,1327," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n ",python,selection_mouse +528,470000,"jasmine/models/dynamics.py",2242,1357," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n ",python,selection_mouse +529,470012,"jasmine/models/dynamics.py",2242,1358," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n ",python,selection_mouse +530,470054,"jasmine/models/dynamics.py",2242,1407," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n ",python,selection_mouse +531,470232,"jasmine/models/dynamics.py",2242,1358," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n ",python,selection_mouse +532,470282,"jasmine/models/dynamics.py",2242,1328," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n ",python,selection_mouse +533,470301,"jasmine/models/dynamics.py",2242,1290," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) ",python,selection_mouse +534,470319,"jasmine/models/dynamics.py",2242,1253," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n ",python,selection_mouse +535,470340,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +536,470380,"jasmine/models/dynamics.py",2242,1150," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +537,470386,"jasmine/models/dynamics.py",2242,1143," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n",python,selection_mouse +538,470615,"jasmine/models/dynamics.py",2242,1147," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +539,470635,"jasmine/models/dynamics.py",2242,1146," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +540,470701,"jasmine/models/dynamics.py",2242,1145," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n ",python,selection_mouse +541,470717,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +542,470760,"jasmine/models/dynamics.py",2242,1216," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n ",python,selection_mouse +543,470802,"jasmine/models/dynamics.py",2242,1215," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n",python,selection_mouse +544,471368,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +545,473002,"jasmine/models/dynamics.py",3400,0,"",python,selection_mouse +546,473451,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +547,473789,"jasmine/models/dynamics.py",3388,68," return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +548,473823,"jasmine/models/dynamics.py",3385,71,"\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +549,473872,"jasmine/models/dynamics.py",3263,193,"\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +550,473873,"jasmine/models/dynamics.py",3202,254," noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +551,473874,"jasmine/models/dynamics.py",3122,334," noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +552,473915,"jasmine/models/dynamics.py",3044,412," noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +553,473916,"jasmine/models/dynamics.py",2892,564," noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +554,473960,"jasmine/models/dynamics.py",2804,652," # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +555,473961,"jasmine/models/dynamics.py",2795,661," \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +556,473997,"jasmine/models/dynamics.py",2767,689," ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +557,474006,"jasmine/models/dynamics.py",2695,761," (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +558,474041,"jasmine/models/dynamics.py",2655,801," noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +559,474049,"jasmine/models/dynamics.py",2566,890," noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +560,474089,"jasmine/models/dynamics.py",2556,900," )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +561,474090,"jasmine/models/dynamics.py",2458,998," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +562,474135,"jasmine/models/dynamics.py",2414,1042," noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +563,474257,"jasmine/models/dynamics.py",2347,1109," rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +564,474322,"jasmine/models/dynamics.py",2458,998," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +565,474336,"jasmine/models/dynamics.py",2555,901," )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +566,474354,"jasmine/models/dynamics.py",2654,802," noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +567,474369,"jasmine/models/dynamics.py",2766,690," ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +568,474397,"jasmine/models/dynamics.py",2803,653," # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +569,474413,"jasmine/models/dynamics.py",2976,480,"\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +570,474424,"jasmine/models/dynamics.py",3203,253," noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +571,474442,"jasmine/models/dynamics.py",3266,190," noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +572,474466,"jasmine/models/dynamics.py",3385,71,"\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +573,474483,"jasmine/models/dynamics.py",3388,68," return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +574,474581,"jasmine/models/dynamics.py",3385,71,"\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +575,474582,"jasmine/models/dynamics.py",3266,190," noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +576,474614,"jasmine/models/dynamics.py",2892,564," noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +577,474615,"jasmine/models/dynamics.py",2695,761," (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +578,474625,"jasmine/models/dynamics.py",2566,890," noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +579,474647,"jasmine/models/dynamics.py",2458,998," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +580,474659,"jasmine/models/dynamics.py",2347,1109," rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +581,474882,"jasmine/models/dynamics.py",2305,1151," B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +582,474902,"jasmine/models/dynamics.py",2304,1152," B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +583,474930,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +584,475865,"jasmine/models/dynamics.py",2242,0,"",python,selection_mouse +585,475866,"jasmine/models/dynamics.py",2242,4," ",python,selection_mouse +586,476037,"jasmine/models/dynamics.py",2242,104," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n",python,selection_mouse +587,476080,"jasmine/models/dynamics.py",2242,323," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n",python,selection_mouse +588,476082,"jasmine/models/dynamics.py",2242,552," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n",python,selection_mouse +589,476122,"jasmine/models/dynamics.py",2242,1287," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ",python,selection_mouse +590,476123,"jasmine/models/dynamics.py",2242,1460," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n ",python,selection_mouse +591,476163,"jasmine/models/dynamics.py",2242,1520," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n ",python,selection_mouse +592,476174,"jasmine/models/dynamics.py",2242,1556," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n ",python,selection_mouse +593,476215,"jasmine/models/dynamics.py",2242,1555," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n ",python,selection_mouse +594,476351,"jasmine/models/dynamics.py",2242,1519," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n ",python,selection_mouse +595,476385,"jasmine/models/dynamics.py",2242,1517," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n",python,selection_mouse +596,476403,"jasmine/models/dynamics.py",2242,1457," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n",python,selection_mouse +597,476428,"jasmine/models/dynamics.py",2242,1352," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n",python,selection_mouse +598,476445,"jasmine/models/dynamics.py",2242,1284," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n",python,selection_mouse +599,476472,"jasmine/models/dynamics.py",2242,1247," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n",python,selection_mouse +600,476473,"jasmine/models/dynamics.py",2242,1234," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n ",python,selection_mouse +601,476668,"jasmine/models/dynamics.py",2242,1216," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n ",python,selection_mouse +602,476761,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +603,477713,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +604,478269,"jasmine/models/dynamics.py",3385,71,"\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +605,478288,"jasmine/models/dynamics.py",3045,411," noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +606,478304,"jasmine/models/dynamics.py",2891,565," noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +607,478324,"jasmine/models/dynamics.py",2794,662," \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +608,478355,"jasmine/models/dynamics.py",2694,762," (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +609,478356,"jasmine/models/dynamics.py",2654,802," noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +610,478374,"jasmine/models/dynamics.py",2565,891," noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +611,478399,"jasmine/models/dynamics.py",2555,901," )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +612,478420,"jasmine/models/dynamics.py",2457,999," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +613,478461,"jasmine/models/dynamics.py",2413,1043," noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +614,478502,"jasmine/models/dynamics.py",2346,1110," rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +615,478525,"jasmine/models/dynamics.py",2304,1152," B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +616,478651,"jasmine/models/dynamics.py",2346,1110," rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +617,478676,"jasmine/models/dynamics.py",2413,1043," noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +618,478698,"jasmine/models/dynamics.py",2457,999," _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +619,478704,"jasmine/models/dynamics.py",2555,901," )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +620,478741,"jasmine/models/dynamics.py",2654,802," noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +621,478742,"jasmine/models/dynamics.py",2766,690," ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +622,478761,"jasmine/models/dynamics.py",2803,653," # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +623,478783,"jasmine/models/dynamics.py",2976,480,"\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +624,478787,"jasmine/models/dynamics.py",2978,478," noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +625,478821,"jasmine/models/dynamics.py",3123,333," noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +626,478827,"jasmine/models/dynamics.py",3263,193,"\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +627,478844,"jasmine/models/dynamics.py",3266,190," noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +628,478947,"jasmine/models/dynamics.py",3204,252," noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +629,478948,"jasmine/models/dynamics.py",2979,477," noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +630,478957,"jasmine/models/dynamics.py",2805,651," # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +631,478978,"jasmine/models/dynamics.py",2695,761," (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +632,478991,"jasmine/models/dynamics.py",2413,1043," noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +633,479010,"jasmine/models/dynamics.py",2304,1152," B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +634,479021,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +635,479043,"jasmine/models/dynamics.py",2241,1215,"\n def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +636,479308,"jasmine/models/dynamics.py",2242,1214," def _apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl, shape=(B,), minval=0.0, maxval=self.max_noise_level, dtype=self.dtype\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n \n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM + noise_level_B111 * noise_BTNM\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n",python,selection_mouse +637,483401,"jasmine/models/dynamics.py",3504,0,"",python,selection_mouse +638,483968,"jasmine/models/dynamics.py",3402,0,"",python,selection_mouse +639,484592,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +640,494716,"jasmine/models/dynamics.py",3402,0,"",python,selection_mouse +641,495264,"jasmine/models/dynamics.py",3401,0,"",python,selection_mouse +642,495792,"jasmine/models/dynamics.py",3456,0,"",python,selection_mouse +643,514627,"jasmine/models/dynamics.py",2976,0,"",python,selection_mouse +644,556132,"jasmine/models/dynamics.py",2743,0,"",python,selection_mouse +645,556132,"jasmine/models/dynamics.py",2802,0,"",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-94fb4d7e-812c-4d36-984a-6626015fa6fd1750854950642-2025_06_25-14.36.16.983/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-94fb4d7e-812c-4d36-984a-6626015fa6fd1750854950642-2025_06_25-14.36.16.983/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..39b2b8a6ee06b6c3d1f425def434be970049db1e --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-94fb4d7e-812c-4d36-984a-6626015fa6fd1750854950642-2025_06_25-14.36.16.983/source.csv @@ -0,0 +1,42 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"train_tokenizer.py",0,0,"from dataclasses import dataclass\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""rng""]}\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(entity=args.entity, project=args.project, group=""debug"", config=args)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log and jax.process_index() == 0:\n if step % args.log_interval == 0:\n wandb.log({""loss"": loss, ""step"": step, **metrics})\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +2,1687,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:36:16 PM [info] Activating crowd-code\n2:36:16 PM [info] Recording started\n2:36:16 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,1689,"train_tokenizer.py",0,0,"",python,tab +4,1690,"train_tokenizer.py",4056,0,"",python,selection_mouse +5,5861,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +6,10311,"train_tokenizer.py",4053,0,"",python,selection_mouse +7,24016,"train_tokenizer.py",4056,0,"",python,selection_mouse +8,24380,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = nn.sigmoid(recon)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = nn.sigmoid(recon)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +9,27086,"models/tokenizer.py",388,0,"",python,selection_mouse +10,27247,"models/tokenizer.py",384,16,"codebook_dropout",python,selection_mouse +11,28185,"models/tokenizer.py",332,0,"",python,selection_mouse +12,28723,"models/tokenizer.py",370,0,"",python,selection_mouse +13,29516,"models/tokenizer.py",312,0,"",python,selection_mouse +14,29672,"models/tokenizer.py",306,10,"patch_size",python,selection_mouse +15,46767,"models/tokenizer.py",1329,0,"",python,selection_mouse +16,47134,"utils/preprocess.py",0,0,"import dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\n\n\ndef patchify(videos: jax.Array, size: int) -> jax.Array:\n B, T, H, W, C = videos.shape\n x = jnp.pad(videos, ((0, 0), (0, 0), (0, -H % size), (0, -W % size), (0, 0)))\n return einops.rearrange(\n x, ""b t (hn hp) (wn wp) c -> b t (hn wn) (hp wp c)"", hp=size, wp=size\n )\n\n\ndef unpatchify(patches: jax.Array, size: int, h_out: int, w_out: int) -> jax.Array:\n h_pad = -h_out % size\n hn = (h_out + h_pad) // size\n x = einops.rearrange(\n patches,\n ""b t (hn wn) (hp wp c) -> b t (hn hp) (wn wp) c"",\n hp=size,\n wp=size,\n hn=hn,\n )\n return x[:, :, :h_out, :w_out]\n",python,tab +17,162466,"utils/preprocess.py",527,0,"",python,selection_mouse +18,162481,"utils/preprocess.py",526,0,"",python,selection_command +19,163693,"utils/preprocess.py",527,0,"",python,selection_mouse +20,163700,"utils/preprocess.py",526,0,"",python,selection_command +21,164243,"utils/preprocess.py",544,0,"",python,selection_mouse +22,164257,"utils/preprocess.py",543,0,"",python,selection_command +23,165045,"utils/preprocess.py",579,0,"",python,selection_mouse +24,165785,"utils/preprocess.py",619,0,"",python,selection_mouse +25,165797,"utils/preprocess.py",618,0,"",python,selection_command +26,166268,"utils/preprocess.py",544,0,"",python,selection_mouse +27,166273,"utils/preprocess.py",543,0,"",python,selection_command +28,166801,"utils/preprocess.py",602,0,"",python,selection_mouse +29,166805,"utils/preprocess.py",601,0,"",python,selection_command +30,166969,"utils/preprocess.py",602,0,"",python,selection_mouse +31,166976,"utils/preprocess.py",601,0,"",python,selection_command +32,167114,"utils/preprocess.py",545,58," ""b t (hn wn) (hp wp c) -> b t (hn hp) (wn wp) c"",\n",python,selection_mouse +33,167121,"utils/preprocess.py",546,57," ""b t (hn wn) (hp wp c) -> b t (hn hp) (wn wp) c"",\n",python,selection_command +34,167809,"utils/preprocess.py",602,0,"",python,selection_mouse +35,167828,"utils/preprocess.py",601,0,"",python,selection_command +36,400812,"train_tokenizer.py",0,0,"",python,tab +37,413351,"train_tokenizer.py",1116,0,"",python,selection_mouse +38,2135599,"train_tokenizer.py",0,0,"Switched from branch 'mihir-tmp' to 'main'",python,git_branch_checkout +39,2140593,"train_tokenizer.py",0,0,"Switched from branch 'main' to 'tmp'",python,git_branch_checkout +40,11451341,"train_tokenizer.py",0,0,"Switched from branch 'tmp' to 'main'",python,git_branch_checkout +41,11466426,"train_tokenizer.py",0,0,"Switched from branch 'main' to 'add-wandb-name-and-tags'",python,git_branch_checkout diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b195e5c2-8599-461c-a7e1-2fb7fc3167491751552100512-2025_07_03-16.15.36.972/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b195e5c2-8599-461c-a7e1-2fb7fc3167491751552100512-2025_07_03-16.15.36.972/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..fcd68fe801fab5f2e3e04b90415c6a4c09c26b64 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b195e5c2-8599-461c-a7e1-2fb7fc3167491751552100512-2025_07_03-16.15.36.972/source.csv @@ -0,0 +1,16851 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,516,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"4:15:36 PM [info] Activating crowd-code\n4:15:36 PM [info] Recording started\n4:15:36 PM [info] Initializing git provider using file system watchers...\n4:15:37 PM [info] Git repository found\n4:15:37 PM [info] Git provider initialized successfully\n4:15:37 PM [info] Initial git state: [object Object]\n",Log,tab +3,3525,"TERMINAL",0,0,"bash",,terminal_focus +4,3633,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +5,3685,"TERMINAL",0,0,"]633;E;2025-07-03 16:15:40 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;6b7be28e-e1c6-4d1a-888f-73afd4fcaf02]633;C",,terminal_output +6,3715,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +7,4258,"TERMINAL",0,0,"bash",,terminal_focus +8,5932,"TERMINAL",0,0,"queue",,terminal_command +9,5984,"TERMINAL",0,0,"]633;E;2025-07-03 16:15:42 queue;3d7e79a2-e817-4582-b692-6332553dbb63]633;C",,terminal_output +10,6052,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 16:15:42 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +11,6711,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +12,10212,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G",,terminal_command +13,10278,"TERMINAL",0,0,"]633;E;2025-07-03 16:15:47 salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G;3d7e79a2-e817-4582-b692-6332553dbb63]633;Csalloc: Pending job allocation 3315638\r\nsalloc: job 3315638 queued and waiting for resources\r\n",,terminal_output +14,11242,"TERMINAL",0,0,"^Csalloc: Job allocation 3315638 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +15,12142,"TERMINAL",0,0,"bash",,terminal_focus +16,13128,"TERMINAL",0,0,"idling",,terminal_command +17,13223,"TERMINAL",0,0,"]633;E;2025-07-03 16:15:50 idling;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 16:15:50 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 4 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +18,14250,"TERMINAL",0,0,"bash",,terminal_focus +19,14261,"TERMINAL",0,0,"1",,terminal_output +20,15299,"TERMINAL",0,0,"2",,terminal_output +21,16334,"TERMINAL",0,0,"3",,terminal_output +22,17408,"TERMINAL",0,0,"4",,terminal_output +23,18433,"TERMINAL",0,0,"5",,terminal_output +24,19506,"TERMINAL",0,0,"6",,terminal_output +25,20536,"TERMINAL",0,0,"7",,terminal_output +26,21545,"TERMINAL",0,0,"8",,terminal_output +27,22587,"TERMINAL",0,0,"9",,terminal_output +28,23635,"TERMINAL",0,0,"6:00",,terminal_output +29,24694,"TERMINAL",0,0,"1",,terminal_output +30,25713,"TERMINAL",0,0,"2",,terminal_output +31,26760,"TERMINAL",0,0,"3",,terminal_output +32,27501,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +33,27556,"TERMINAL",0,0,"]633;E;2025-07-03 16:16:04 salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;3d7e79a2-e817-4582-b692-6332553dbb63]633;Csalloc: Pending job allocation 3315639\r\nsalloc: job 3315639 queued and waiting for resources\r\n",,terminal_output +34,27804,"TERMINAL",0,0,"4",,terminal_output +35,28534,"TERMINAL",0,0,"^Csalloc: Job allocation 3315639 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +36,28850,"TERMINAL",0,0,"5",,terminal_output +37,29917,"TERMINAL",0,0,"6",,terminal_output +38,30922,"TERMINAL",0,0,"7",,terminal_output +39,32005,"TERMINAL",0,0,"8",,terminal_output +40,33009,"TERMINAL",0,0,"9",,terminal_output +41,33460,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +42,33511,"TERMINAL",0,0,"]633;E;2025-07-03 16:16:10 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;3d7e79a2-e817-4582-b692-6332553dbb63]633;Csalloc: Pending job allocation 3315641\r\nsalloc: job 3315641 queued and waiting for resources\r\n",,terminal_output +43,34055,"TERMINAL",0,0,"1020",,terminal_output +44,34885,"TERMINAL",0,0,"^Csalloc: Job allocation 3315641 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +45,35127,"TERMINAL",0,0,"1",,terminal_output +46,36157,"TERMINAL",0,0,"2",,terminal_output +47,37176,"TERMINAL",0,0,"4",,terminal_output +48,38216,"TERMINAL",0,0,"5",,terminal_output +49,38658,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +50,38735,"TERMINAL",0,0,"]633;E;2025-07-03 16:16:15 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;3d7e79a2-e817-4582-b692-6332553dbb63]633;Csalloc: Pending job allocation 3315642\r\nsalloc: job 3315642 queued and waiting for resources\r\n",,terminal_output +51,39255,"TERMINAL",0,0,"6",,terminal_output +52,40303,"TERMINAL",0,0,"7",,terminal_output +53,40442,"TERMINAL",0,0,"watch",,terminal_focus +54,41338,"TERMINAL",0,0,"8",,terminal_output +55,42210,"TERMINAL",0,0,"salloc",,terminal_focus +56,42384,"TERMINAL",0,0,"9",,terminal_output +57,43423,"TERMINAL",0,0,"20",,terminal_output +58,44450,"TERMINAL",0,0,"^Csalloc: Job allocation 3315642 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +59,44531,"TERMINAL",0,0,"1",,terminal_output +60,45534,"TERMINAL",0,0,"2",,terminal_output +61,46593,"TERMINAL",0,0,"3",,terminal_output +62,47588,"TERMINAL",0,0,"419",,terminal_output +63,48629,"TERMINAL",0,0,"5",,terminal_output +64,48756,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +65,48810,"TERMINAL",0,0,"]633;E;2025-07-03 16:16:25 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;3d7e79a2-e817-4582-b692-6332553dbb63]633;Csalloc: Pending job allocation 3315644\r\nsalloc: job 3315644 queued and waiting for resources\r\n",,terminal_output +66,49685,"TERMINAL",0,0,"6",,terminal_output +67,50722,"TERMINAL",0,0,"7",,terminal_output +68,51759,"TERMINAL",0,0,"8",,terminal_output +69,52811,"TERMINAL",0,0,"9",,terminal_output +70,53850,"TERMINAL",0,0,"30",,terminal_output +71,54590,"scripts_horeka/modelsize_scaling/dynamics/model_sizes.md",0,0,"# Genie 1 - Model Sizes and their configs\n\n## Tokenizer model: sizes\n\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 1024 | ~38M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| L1 | 768 | 12 | 12 | 64 | 2048 | ~80M |\n| L2 | 1024 | 12 | 16 | 128 | 2048 | ~140M |\n| L3 | 1152 | 16 | 16 | 128 | 4096 | ~200M |\n| L4 | 896 | 16 | 14 | 96 | 4096 | ~120M |\n| L5 | 1536 | 12 | 24 | 256 | 2048 | ~190M |\n\n\n### tiny models\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| S1 | 128 | 2 | 2 | 8 | 128 | ~0.6M |\n| S2 | 192 | 2 | 3 | 16 | 128 | ~1.3M |\n| S3 | 256 | 3 | 4 | 16 | 256 | ~3.6M |\n| S4 | 320 | 4 | 5 | 24 | 256 | ~7.4M |\n| S5 | 384 | 4 | 6 | 32 | 512 | ~10M |\n\n\n## Latent Action model: sizes\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 6 | ~39M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XL | 1024 | 12 | 16 | 64 | 12 | ~200M |\n| L | 896 | 12 | 14 | 48 | 8 | ~150M |\n| M+ | 768 | 10 | 12 | 48 | 8 | ~100M |\n| M | 640 | 10 | 10 | 32 | 8 | ~70M |\n| Base+ | 512 | 12 | 8 | 32 | 8 | ~55M |\n\n\n### tiny models\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XS | 128 | 2 | 2 | 8 | 4 | ~0.9M |\n| S | 160 | 2 | 2 | 8 | 4 | ~1.3M |\n| S+ | 192 | 3 | 3 | 8 | 4 | ~2.4M |\n| M- | 256 | 4 | 4 | 16 | 6 | ~5.4M |\n| M | 320 | 6 | 4 | 16 | 6 | ~12M |\n\n\n## Dynamics model: sizes \n\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| 1 | 512 | 12 | 8 | ~36M |\n| 2 | 768 | 16 | 12 | ~110M |\n| 3 | 1024 | 16 | 16 | ~180M |\n| 4 | 1024 | 24 | 16 | ~270M |\n| 5 | 1536 | 24 | 24 | ~500M |\n\n\n### tiny models\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| A | 128 | 2 | 4 | ~1.5M |\n| B | 256 | 2 | 4 | ~3.5M |\n| C | 256 | 4 | 4 | ~6M |\n| D | 384 | 4 | 6 | ~12M |\n| E | 512 | 4 | 8 | ~18M |",markdown,tab +72,55513,"TERMINAL",0,0,"1",,terminal_output +73,55959,"TERMINAL",0,0,"2",,terminal_output +74,56969,"TERMINAL",0,0,"3",,terminal_output +75,58021,"TERMINAL",0,0,"4",,terminal_output +76,59048,"TERMINAL",0,0,"5",,terminal_output +77,60093,"TERMINAL",0,0,"6",,terminal_output +78,61134,"TERMINAL",0,0,"7",,terminal_output +79,62178,"TERMINAL",0,0,"9",,terminal_output +80,63232,"TERMINAL",0,0,"40",,terminal_output +81,64270,"TERMINAL",0,0,"1",,terminal_output +82,65312,"TERMINAL",0,0,"2",,terminal_output +83,66346,"TERMINAL",0,0,"335",,terminal_output +84,67395,"TERMINAL",0,0,"4",,terminal_output +85,68435,"TERMINAL",0,0,"5",,terminal_output +86,69474,"TERMINAL",0,0,"6",,terminal_output +87,70519,"TERMINAL",0,0,"7",,terminal_output +88,71560,"TERMINAL",0,0,"8",,terminal_output +89,72604,"TERMINAL",0,0,"9",,terminal_output +90,73638,"TERMINAL",0,0,"50",,terminal_output +91,74681,"TERMINAL",0,0,"1",,terminal_output +92,75795,"TERMINAL",0,0,"2",,terminal_output +93,76820,"TERMINAL",0,0,"3",,terminal_output +94,77847,"TERMINAL",0,0,"4",,terminal_output +95,78860,"TERMINAL",0,0,"5",,terminal_output +96,79924,"TERMINAL",0,0,"6",,terminal_output +97,81122,"TERMINAL",0,0,"7",,terminal_output +98,82017,"TERMINAL",0,0,"8",,terminal_output +99,83030,"TERMINAL",0,0,"9",,terminal_output +100,84102,"TERMINAL",0,0,"7:00",,terminal_output +101,85118,"TERMINAL",0,0,"1",,terminal_output +102,86192,"TERMINAL",0,0,"3",,terminal_output +103,87201,"TERMINAL",0,0,"4",,terminal_output +104,88241,"TERMINAL",0,0,"5",,terminal_output +105,90151,"TERMINAL",0,0,"salloc: job 3315644 has been allocated resources\r\nsalloc: Granted job allocation 3315644\r\n",,terminal_output +106,90151,"TERMINAL",0,0,"6 60",,terminal_output +107,90245,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +108,91179,"TERMINAL",0,0,"8",,terminal_output +109,92386,"TERMINAL",0,0,"9",,terminal_output +110,93264,"TERMINAL",0,0,"10",,terminal_output +111,94311,"TERMINAL",0,0,"1",,terminal_output +112,95348,"TERMINAL",0,0,"2",,terminal_output +113,96480,"TERMINAL",0,0,"3",,terminal_output +114,97410,"TERMINAL",0,0,"4",,terminal_output +115,98449,"TERMINAL",0,0,"5",,terminal_output +116,99552,"TERMINAL",0,0,"6",,terminal_output +117,100576,"TERMINAL",0,0,"7",,terminal_output +118,101570,"TERMINAL",0,0,"8",,terminal_output +119,102626,"TERMINAL",0,0,"9",,terminal_output +120,103852,"TERMINAL",0,0,"20",,terminal_output +121,104774,"TERMINAL",0,0,"1",,terminal_output +122,105902,"TERMINAL",0,0,"2",,terminal_output +123,106823,"TERMINAL",0,0,"3",,terminal_output +124,107848,"TERMINAL",0,0,"4",,terminal_output +125,108973,"TERMINAL",0,0,"5",,terminal_output +126,109915,"TERMINAL",0,0,"6",,terminal_output +127,110983,"TERMINAL",0,0,"7",,terminal_output +128,112059,"TERMINAL",0,0,"8",,terminal_output +129,113035,"TERMINAL",0,0,"9",,terminal_output +130,114912,"TERMINAL",0,0,"30",,terminal_output +131,115936,"TERMINAL",0,0,"2",,terminal_output +132,116964,"TERMINAL",0,0,"3",,terminal_output +133,117988,"TERMINAL",0,0,"4",,terminal_output +134,119011,"TERMINAL",0,0,"5",,terminal_output +135,120049,"TERMINAL",0,0,"6",,terminal_output +136,121099,"TERMINAL",0,0,"7",,terminal_output +137,122131,"TERMINAL",0,0,"8",,terminal_output +138,122900,"TERMINAL",0,0,"salloc: Nodes hkn0704 are ready for job\r\n",,terminal_output +139,123171,"TERMINAL",0,0,"40",,terminal_output +140,124211,"TERMINAL",0,0,"1",,terminal_output +141,125415,"TERMINAL",0,0,"2",,terminal_output +142,125416,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h[tum_cte0515@hkn0704 jafar]$ ",,terminal_output +143,126303,"TERMINAL",0,0,"3",,terminal_output +144,127398,"TERMINAL",0,0,"4",,terminal_output +145,128434,"TERMINAL",0,0,"57",,terminal_output +146,128904,"TERMINAL",0,0,"s",,terminal_output +147,129033,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +148,129075,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +149,129136,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +150,129336,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +151,129496,"TERMINAL",0,0,"6",,terminal_output +152,129571,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +153,129688,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +154,130105,"TERMINAL",0,0,"",,terminal_output +155,130478,"TERMINAL",0,0,"7",,terminal_output +156,130924,"TERMINAL",0,0,"[?25lv[?25henv/",,terminal_output +157,131271,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +158,131396,"TERMINAL",0,0,"in/",,terminal_output +159,131523,"TERMINAL",0,0,"8",,terminal_output +160,131870,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +161,132071,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +162,132248,"TERMINAL",0,0,"tivate",,terminal_output +163,132548,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ [?25h",,terminal_output +164,132585,"TERMINAL",0,0,"9",,terminal_output +165,133607,"TERMINAL",0,0,"50",,terminal_output +166,134676,"TERMINAL",0,0,"1",,terminal_output +167,135709,"TERMINAL",0,0,"2",,terminal_output +168,136835,"TERMINAL",0,0,"3",,terminal_output +169,137818,"TERMINAL",0,0,"4",,terminal_output +170,138069,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +171,138297,"TERMINAL",0,0,"idling",,terminal_output +172,138450,"TERMINAL",0,0,"ls",,terminal_output +173,138613,"TERMINAL",0,0,"cd logs_mihir/",,terminal_output +174,138784,"TERMINAL",0,0,"ls",,terminal_output +175,138836,"TERMINAL",0,0,"5",,terminal_output +176,138894,"TERMINAL",0,0,"cd logs/",,terminal_output +177,139083,"TERMINAL",0,0,"..",,terminal_output +178,139247,"TERMINAL",0,0,"checkpoints/",,terminal_output +179,139435,"TERMINAL",0,0,"ls",,terminal_output +180,139573,"TERMINAL",0,0,"cd ..",,terminal_output +181,139780,"TERMINAL",0,0,"$ws_dir",,terminal_output +182,139887,"TERMINAL",0,0,"6",,terminal_output +183,139951,"TERMINAL",0,0,"queue",,terminal_output +184,140299,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_output +185,140458,"TERMINAL",0,0,"\rsh scripts_horeka/overfit_sample_tiny/sample.sh \r\n\r",,terminal_output +186,140927,"TERMINAL",0,0,"7",,terminal_output +187,141002,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +188,142076,"TERMINAL",0,0,"8",,terminal_output +189,142077,"TERMINAL",0,0,"s",,terminal_output +190,143005,"TERMINAL",0,0,"9",,terminal_output +191,144082,"TERMINAL",0,0,"8:00",,terminal_output +192,145162,"TERMINAL",0,0,"1",,terminal_output +193,146134,"TERMINAL",0,0,"2",,terminal_output +194,146305,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +195,147200,"TERMINAL",0,0,"49",,terminal_output +196,148129,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample_mihir(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int,\n temperature: float,\n sample_argmax: bool,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0] # (B, N) # TODO remove\n print(""new_frame_idxs.shape:"", new_frame_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0] # (B, N)\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n jax.debug.breakpoint()\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStepMihir(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n ) # (B, T+1, N)\n vid_embed = self.dynamics.patch_embed(vid_token_idxs) # (B, T+1, N, D)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n jax.debug.breakpoint()\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +197,148130,"genie.py",3714,0,"",python,selection_mouse +198,148281,"TERMINAL",0,0,"5",,terminal_output +199,148720,"genie.py",3556,0,"",python,selection_mouse +200,148742,"genie.py",3555,0,"",python,selection_command +201,149259,"TERMINAL",0,0,"6",,terminal_output +202,150299,"TERMINAL",0,0,"7",,terminal_output +203,151367,"TERMINAL",0,0,"8",,terminal_output +204,152509,"TERMINAL",0,0,"9",,terminal_output +205,152954,"genie.py",3714,0,"",python,selection_mouse +206,153625,"TERMINAL",0,0,"10",,terminal_output +207,154120,"genie.py",4055,0,"",python,selection_mouse +208,154477,"TERMINAL",0,0,"1",,terminal_output +209,155566,"TERMINAL",0,0,"2",,terminal_output +210,156292,"genie.py",3909,0,"",python,selection_mouse +211,156299,"genie.py",3908,0,"",python,selection_command +212,156695,"TERMINAL",0,0,"3",,terminal_output +213,157529,"genie.py",4040,0,"",python,selection_mouse +214,157654,"TERMINAL",0,0,"4",,terminal_output +215,158032,"genie.py",4055,0,"",python,selection_mouse +216,158740,"TERMINAL",0,0,"5",,terminal_output +217,158929,"genie.py",4055,0," ",python,content +218,159275,"genie.py",4059,0," ",python,content +219,159708,"TERMINAL",0,0,"6",,terminal_output +220,160070,"genie.py",4063,0,"\n ",python,content +221,160070,"genie.py",4055,8,"",python,content +222,160285,"genie.py",4064,0,"j",python,content +223,160286,"genie.py",4065,0,"",python,selection_keyboard +224,160356,"genie.py",4065,0,"a",python,content +225,160357,"genie.py",4066,0,"",python,selection_keyboard +226,160539,"genie.py",4066,0,"x",python,content +227,160540,"genie.py",4067,0,"",python,selection_keyboard +228,160668,"genie.py",4067,0,".",python,content +229,160669,"genie.py",4068,0,"",python,selection_keyboard +230,160748,"TERMINAL",0,0,"7",,terminal_output +231,160887,"genie.py",4068,0,"d",python,content +232,160889,"genie.py",4069,0,"",python,selection_keyboard +233,161042,"genie.py",4069,0,"e",python,content +234,161044,"genie.py",4070,0,"",python,selection_keyboard +235,161109,"genie.py",4070,0,"b",python,content +236,161111,"genie.py",4071,0,"",python,selection_keyboard +237,161362,"genie.py",4071,0,"u",python,content +238,161363,"genie.py",4072,0,"",python,selection_keyboard +239,161378,"genie.py",4072,0,"g",python,content +240,161379,"genie.py",4073,0,"",python,selection_keyboard +241,161524,"genie.py",4073,0,"-",python,content +242,161526,"genie.py",4074,0,"",python,selection_keyboard +243,161808,"genie.py",4074,0,"b",python,content +244,161809,"genie.py",4075,0,"",python,selection_keyboard +245,161826,"TERMINAL",0,0,"8",,terminal_output +246,161872,"genie.py",4075,0,"r",python,content +247,161874,"genie.py",4076,0,"",python,selection_keyboard +248,162242,"genie.py",4075,1,"",python,content +249,162378,"genie.py",4074,1,"",python,content +250,162486,"genie.py",4073,1,"",python,content +251,162830,"TERMINAL",0,0,"9",,terminal_output +252,163164,"genie.py",4073,0,".",python,content +253,163165,"genie.py",4074,0,"",python,selection_keyboard +254,163427,"genie.py",4074,0,"b",python,content +255,163428,"genie.py",4075,0,"",python,selection_keyboard +256,163529,"genie.py",4075,0,"r",python,content +257,163531,"genie.py",4076,0,"",python,selection_keyboard +258,163723,"genie.py",4076,0,"e",python,content +259,163724,"genie.py",4077,0,"",python,selection_keyboard +260,163870,"TERMINAL",0,0,"20",,terminal_output +261,164067,"genie.py",4077,0,"a",python,content +262,164069,"genie.py",4078,0,"",python,selection_keyboard +263,164234,"genie.py",4078,0,"k",python,content +264,164237,"genie.py",4079,0,"",python,selection_keyboard +265,164422,"genie.py",4079,0,"p",python,content +266,164424,"genie.py",4080,0,"",python,selection_keyboard +267,164878,"TERMINAL",0,0,"1",,terminal_output +268,164947,"genie.py",4074,6,"breakpoint",python,content +269,165719,"genie.py",4084,0,"()",python,content +270,165721,"genie.py",4085,0,"",python,selection_keyboard +271,165798,"genie.py",4085,1,")",python,content +272,165799,"genie.py",4086,0,"",python,selection_keyboard +273,165930,"genie.py",4085,0,"",python,selection_command +274,165951,"TERMINAL",0,0,"2",,terminal_output +275,167073,"TERMINAL",0,0,"3",,terminal_output +276,167649,"TERMINAL",0,0,"[?25l\rsh scripts_horeka/overfit_sample_tiny/sample.sh\r\n[?2004l\r[?25h",,terminal_output +277,167793,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +278,168004,"TERMINAL",0,0,"4",,terminal_output +279,169083,"TERMINAL",0,0,"511",,terminal_output +280,169179,"genie.py",0,0,"",python,tab +281,169182,"genie.py",4086,0,"",python,selection_mouse +282,169250,"genie.py",4085,0,"",python,selection_command +283,170117,"TERMINAL",0,0,"6",,terminal_output +284,171128,"TERMINAL",0,0,"7",,terminal_output +285,172237,"TERMINAL",0,0,"9",,terminal_output +286,173265,"TERMINAL",0,0,"30",,terminal_output +287,174311,"TERMINAL",0,0,"1",,terminal_output +288,175431,"TERMINAL",0,0,"2",,terminal_output +289,176393,"TERMINAL",0,0,"3",,terminal_output +290,177489,"TERMINAL",0,0,"4",,terminal_output +291,178505,"TERMINAL",0,0,"5",,terminal_output +292,179527,"TERMINAL",0,0,"6",,terminal_output +293,180757,"TERMINAL",0,0,"7",,terminal_output +294,181612,"TERMINAL",0,0,"8",,terminal_output +295,182808,"TERMINAL",0,0,"9",,terminal_output +296,183699,"TERMINAL",0,0,"40",,terminal_output +297,184856,"TERMINAL",0,0,"12",,terminal_output +298,185892,"TERMINAL",0,0,"2",,terminal_output +299,186916,"TERMINAL",0,0,"2025-07-03 16:18:43.637039: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +300,186939,"TERMINAL",0,0,"3",,terminal_output +301,187984,"TERMINAL",0,0,"4",,terminal_output +302,188949,"TERMINAL",0,0,"56",,terminal_output +303,190075,"TERMINAL",0,0,"6",,terminal_output +304,191048,"TERMINAL",0,0,"7",,terminal_output +305,191169,"TERMINAL",0,0,"2025-07-03 16:18:48.070644: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +306,192235,"TERMINAL",0,0,"8",,terminal_output +307,193105,"TERMINAL",0,0,"9",,terminal_output +308,194172,"TERMINAL",0,0,"51",,terminal_output +309,195211,"TERMINAL",0,0,"2",,terminal_output +310,196235,"TERMINAL",0,0,"3",,terminal_output +311,197278,"TERMINAL",0,0,"4",,terminal_output +312,198318,"TERMINAL",0,0,"5",,terminal_output +313,199393,"TERMINAL",0,0,"6",,terminal_output +314,200419,"TERMINAL",0,0,"7",,terminal_output +315,200534,"TERMINAL",0,0,"2025-07-03 16:18:57.433092: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +316,201455,"TERMINAL",0,0,"8",,terminal_output +317,202570,"TERMINAL",0,0,"9",,terminal_output +318,203593,"TERMINAL",0,0,"9:00",,terminal_output +319,204718,"TERMINAL",0,0,"1",,terminal_output +320,205682,"TERMINAL",0,0,"2",,terminal_output +321,206713,"TERMINAL",0,0,"3",,terminal_output +322,207791,"TERMINAL",0,0,"4",,terminal_output +323,208610,"TERMINAL",0,0,"2025-07-03 16:19:05.482502: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +324,208807,"TERMINAL",0,0,"5",,terminal_output +325,210044,"TERMINAL",0,0,"6",,terminal_output +326,210996,"TERMINAL",0,0,"71",,terminal_output +327,211990,"TERMINAL",0,0,"8",,terminal_output +328,212999,"TERMINAL",0,0,"9",,terminal_output +329,214139,"TERMINAL",0,0,"10",,terminal_output +330,215077,"TERMINAL",0,0,"1",,terminal_output +331,215233,"TERMINAL",0,0,"2025-07-03 16:19:12.134354: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +332,216217,"TERMINAL",0,0,"2",,terminal_output +333,217153,"TERMINAL",0,0,"4",,terminal_output +334,218197,"TERMINAL",0,0,"5",,terminal_output +335,219242,"TERMINAL",0,0,"6",,terminal_output +336,220283,"TERMINAL",0,0,"7",,terminal_output +337,221324,"TERMINAL",0,0,"8",,terminal_output +338,221969,"TERMINAL",0,0,"2025-07-03 16:19:18.869741: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +339,222369,"TERMINAL",0,0,"9",,terminal_output +340,223458,"TERMINAL",0,0,"20",,terminal_output +341,224586,"TERMINAL",0,0,"1",,terminal_output +342,225524,"TERMINAL",0,0,"2",,terminal_output +343,225697,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +344,225806,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 16:19:22 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 11 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +345,226829,"TERMINAL",0,0,"3",,terminal_output +346,227872,"TERMINAL",0,0,"4",,terminal_output +347,228988,"TERMINAL",0,0,"5",,terminal_output +348,230080,"TERMINAL",0,0,"6",,terminal_output +349,231002,"TERMINAL",0,0,"79",,terminal_output +350,232063,"TERMINAL",0,0,"8",,terminal_output +351,233115,"TERMINAL",0,0,"9",,terminal_output +352,233127,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +353,233698,"TERMINAL",0,0,"2025-07-03 16:19:30.575954: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +354,234135,"TERMINAL",0,0,"30",,terminal_output +355,235295,"TERMINAL",0,0,"2",,terminal_output +356,236257,"TERMINAL",0,0,"3",,terminal_output +357,236800,"TERMINAL",0,0,"2025-07-03 16:19:33.700297: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +358,237368,"TERMINAL",0,0,"4",,terminal_output +359,238319,"TERMINAL",0,0,"5",,terminal_output +360,239401,"TERMINAL",0,0,"6",,terminal_output +361,240489,"TERMINAL",0,0,"7",,terminal_output +362,241457,"TERMINAL",0,0,"8",,terminal_output +363,242499,"TERMINAL",0,0,"9",,terminal_output +364,243541,"TERMINAL",0,0,"40",,terminal_output +365,244600,"TERMINAL",0,0,"1",,terminal_output +366,244936,"TERMINAL",0,0,"2025-07-03 16:19:41.832878: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +367,245636,"TERMINAL",0,0,"2",,terminal_output +368,246690,"TERMINAL",0,0,"3",,terminal_output +369,247734,"TERMINAL",0,0,"4",,terminal_output +370,248699,"TERMINAL",0,0,"2025-07-03 16:19:45.594787: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +371,248814,"TERMINAL",0,0,"5",,terminal_output +372,249812,"TERMINAL",0,0,"6",,terminal_output +373,251007,"TERMINAL",0,0,"7",,terminal_output +374,251921,"TERMINAL",0,0,"8",,terminal_output +375,252983,"TERMINAL",0,0,"9",,terminal_output +376,253218,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +377,253984,"TERMINAL",0,0,"50",,terminal_output +378,255028,"TERMINAL",0,0,"1",,terminal_output +379,256125,"TERMINAL",0,0,"2",,terminal_output +380,257206,"TERMINAL",0,0,"3",,terminal_output +381,258424,"TERMINAL",0,0,"5",,terminal_output +382,259204,"TERMINAL",0,0,"6",,terminal_output +383,260279,"TERMINAL",0,0,"7",,terminal_output +384,261319,"TERMINAL",0,0,"8",,terminal_output +385,262347,"TERMINAL",0,0,"9",,terminal_output +386,263391,"TERMINAL",0,0,"20:00",,terminal_output +387,264433,"TERMINAL",0,0,"1",,terminal_output +388,265527,"TERMINAL",0,0,"2",,terminal_output +389,266545,"TERMINAL",0,0,"3",,terminal_output +390,267564,"TERMINAL",0,0,"4",,terminal_output +391,268604,"TERMINAL",0,0,"5",,terminal_output +392,269667,"TERMINAL",0,0,"6",,terminal_output +393,270692,"TERMINAL",0,0,"7",,terminal_output +394,271720,"TERMINAL",0,0,"8",,terminal_output +395,272765,"TERMINAL",0,0,"9",,terminal_output +396,273866,"TERMINAL",0,0,"10",,terminal_output +397,274890,"TERMINAL",0,0,"1",,terminal_output +398,275914,"TERMINAL",0,0,"2",,terminal_output +399,277040,"TERMINAL",0,0,"3",,terminal_output +400,277989,"TERMINAL",0,0,"4",,terminal_output +401,279088,"TERMINAL",0,0,"5",,terminal_output +402,280115,"TERMINAL",0,0,"6",,terminal_output +403,281114,"TERMINAL",0,0,"7",,terminal_output +404,282160,"TERMINAL",0,0,"9",,terminal_output +405,283287,"TERMINAL",0,0,"20",,terminal_output +406,284311,"TERMINAL",0,0,"1",,terminal_output +407,285272,"TERMINAL",0,0,"2",,terminal_output +408,286360,"TERMINAL",0,0,"3",,terminal_output +409,287355,"TERMINAL",0,0,"4",,terminal_output +410,287873,"TERMINAL",0,0,"l",,terminal_output +411,287945,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(296)\r\n curr_masked_frame = jnp.where(\r\n jnp.expand_dims(mask, -1), # (B, N, 1)\r\n self.dynamics.mask_token[0], # (B, 1, D)\r\n vid_embed[:, -1], # (B, N, D)\r\n ) # (B, N, D)\r\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\r\n-> jax.debug.breakpoint()\r\n \r\n # --- Predict transition ---\r\n act_embed = self.dynamics.action_up(action_tokens)\r\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n(jdb) ",,terminal_output +412,288429,"TERMINAL",0,0,"5",,terminal_output +413,289438,"TERMINAL",0,0,"6",,terminal_output +414,290558,"TERMINAL",0,0,"7",,terminal_output +415,291582,"TERMINAL",0,0,"8",,terminal_output +416,292563,"TERMINAL",0,0,"9",,terminal_output +417,293593,"TERMINAL",0,0,"3020",,terminal_output +418,294654,"TERMINAL",0,0,"1",,terminal_output +419,295780,"TERMINAL",0,0,"2",,terminal_output +420,296721,"TERMINAL",0,0,"3",,terminal_output +421,297062,"genie.py",0,0,"",python,tab +422,297064,"genie.py",3434,0,"",python,selection_mouse +423,297137,"genie.py",3433,0,"",python,selection_command +424,297723,"genie.py",3399,0,"",python,selection_mouse +425,297762,"genie.py",3398,0,"",python,selection_command +426,297784,"TERMINAL",0,0,"4",,terminal_output +427,297886,"genie.py",3398,1,")",python,selection_mouse +428,297899,"genie.py",3399,0,"",python,selection_command +429,298048,"genie.py",3397,2,"N)",python,selection_mouse +430,298049,"genie.py",3393,6," T, N)",python,selection_mouse +431,298049,"genie.py",3391,8,"B, T, N)",python,selection_mouse +432,298145,"genie.py",3390,9,"(B, T, N)",python,selection_mouse +433,298201,"genie.py",3389,10," (B, T, N)",python,selection_mouse +434,298836,"genie.py",3389,0,"",python,selection_mouse +435,298879,"TERMINAL",0,0,"5",,terminal_output +436,299876,"TERMINAL",0,0,"6",,terminal_output +437,300924,"TERMINAL",0,0,"7",,terminal_output +438,301936,"TERMINAL",0,0,"8",,terminal_output +439,302993,"TERMINAL",0,0,"9",,terminal_output +440,304016,"TERMINAL",0,0,"40",,terminal_output +441,305098,"TERMINAL",0,0,"1",,terminal_output +442,306098,"TERMINAL",0,0,"2",,terminal_output +443,307250,"TERMINAL",0,0,"3",,terminal_output +444,308283,"TERMINAL",0,0,"5",,terminal_output +445,308911,"TERMINAL",0,0,"^DERROR:2025-07-03 16:20:45,725:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nE0703 16:20:45.760695 2659158 pjrt_stream_executor_client.cc:2917] Execution of replica 0 failed: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 100, in _autoreg_sample\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 199, in sample\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 100, in _autoreg_sample\r\n new_frame = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 199, in sample\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\njaxlib._jax.XlaRuntimeError: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 100, in _autoreg_sample\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 199, in sample\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\n",,terminal_output +446,309234,"TERMINAL",0,0,"6",,terminal_output +447,310266,"TERMINAL",0,0,"7",,terminal_output +448,310431,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +449,311314,"TERMINAL",0,0,"8",,terminal_output +450,312526,"TERMINAL",0,0,"9",,terminal_output +451,313394,"TERMINAL",0,0,"50",,terminal_output +452,313696,"genie.py",0,0,"",python,tab +453,313697,"genie.py",8468,0,"",python,selection_mouse +454,313765,"genie.py",8467,0,"",python,selection_command +455,314475,"TERMINAL",0,0,"1",,terminal_output +456,315305,"genie.py",10152,0,"",python,selection_mouse +457,315503,"TERMINAL",0,0,"2",,terminal_output +458,316027,"genie.py",10129,31,"",python,content +459,316681,"TERMINAL",0,0,"3",,terminal_output +460,317576,"TERMINAL",0,0,"4",,terminal_output +461,318614,"TERMINAL",0,0,"5",,terminal_output +462,319744,"TERMINAL",0,0,"6",,terminal_output +463,319960,"genie.py",3215,0,"",python,selection_mouse +464,319981,"genie.py",3214,0,"",python,selection_command +465,320096,"genie.py",3214,1,"m",python,selection_mouse +466,320237,"genie.py",3213,1,"i",python,selection_mouse +467,320238,"genie.py",3174,40,"== action_space\n # D == latent_di",python,selection_mouse +468,320238,"genie.py",3145,69," # S == seq_len\n # A == action_space\n # D == latent_di",python,selection_mouse +469,320238,"genie.py",3116,98," # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_di",python,selection_mouse +470,320239,"genie.py",3114,100," # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_di",python,selection_mouse +471,320239,"genie.py",3215,0,"",python,selection_command +472,320281,"genie.py",3114,101," # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim",python,selection_mouse +473,320338,"genie.py",3078,137," # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim",python,selection_mouse +474,320413,"genie.py",3052,163," # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim",python,selection_mouse +475,320775,"TERMINAL",0,0,"7",,terminal_output +476,321422,"genie.py",3052,0,"",python,selection_command +477,321790,"TERMINAL",0,0,"8",,terminal_output +478,322884,"TERMINAL",0,0,"9",,terminal_output +479,323954,"TERMINAL",0,0,"1:001",,terminal_output +480,325068,"TERMINAL",0,0,"1",,terminal_output +481,325575,"genie.py",5346,0,"",python,selection_mouse +482,325577,"genie.py",5345,0,"",python,selection_command +483,326040,"TERMINAL",0,0,"2",,terminal_output +484,326424,"genie.py",5346,0," # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim",python,content +485,326479,"genie.py",5346,0,"",python,selection_command +486,327114,"TERMINAL",0,0,"3",,terminal_output +487,328125,"TERMINAL",0,0,"4",,terminal_output +488,328511,"genie.py",5346,0,"\n ",python,content +489,328599,"genie.py",5351,0," ",python,content +490,329161,"TERMINAL",0,0,"6",,terminal_output +491,329537,"genie.py",5355,1,"",python,content +492,329678,"genie.py",5355,1,"",python,content +493,329818,"genie.py",5355,1,"",python,content +494,329965,"genie.py",5355,1,"",python,content +495,330211,"TERMINAL",0,0,"72",,terminal_output +496,330276,"genie.py",5355,1,"",python,content +497,330440,"genie.py",5355,1,"",python,content +498,330561,"genie.py",5355,1,"",python,content +499,330814,"genie.py",5355,1,"",python,content +500,331472,"TERMINAL",0,0,"819",,terminal_output +501,332511,"TERMINAL",0,0,"9",,terminal_output +502,333548,"TERMINAL",0,0,"10",,terminal_output +503,334590,"TERMINAL",0,0,"1",,terminal_output +504,335676,"TERMINAL",0,0,"2",,terminal_output +505,336335,"genie.py",5354,0,"",python,selection_command +506,336668,"TERMINAL",0,0,"3",,terminal_output +507,337771,"TERMINAL",0,0,"4",,terminal_output +508,338790,"TERMINAL",0,0,"5",,terminal_output +509,339813,"TERMINAL",0,0,"6",,terminal_output +510,340481,"genie.py",3388,0,"",python,selection_mouse +511,340592,"genie.py",3387,1," ",python,selection_mouse +512,340923,"genie.py",3387,2," #",python,selection_mouse +513,340924,"genie.py",3387,4," # (",python,selection_mouse +514,340924,"genie.py",3387,6," # (B,",python,selection_mouse +515,340925,"genie.py",3387,9," # (B, T,",python,selection_mouse +516,340925,"genie.py",3387,10," # (B, T, ",python,selection_mouse +517,340925,"genie.py",3387,11," # (B, T, N",python,selection_mouse +518,340958,"genie.py",3387,12," # (B, T, N)",python,selection_mouse +519,341030,"TERMINAL",0,0,"7",,terminal_output +520,341326,"genie.py",3399,0,"",python,selection_mouse +521,341336,"genie.py",3398,0,"",python,selection_command +522,341963,"TERMINAL",0,0,"8",,terminal_output +523,342247,"genie.py",3397,0,"",python,selection_command +524,342801,"genie.py",3396,0,"",python,selection_command +525,342838,"genie.py",3395,0,"",python,selection_command +526,342839,"genie.py",3394,0,"",python,selection_command +527,342862,"genie.py",3393,0,"",python,selection_command +528,342871,"genie.py",3392,0,"",python,selection_command +529,342982,"TERMINAL",0,0,"9",,terminal_output +530,342998,"genie.py",3391,0,"",python,selection_command +531,343148,"genie.py",3390,0,"",python,selection_command +532,343287,"genie.py",3389,0,"",python,selection_command +533,343472,"genie.py",3388,0,"",python,selection_command +534,344010,"TERMINAL",0,0,"20",,terminal_output +535,345037,"TERMINAL",0,0,"1",,terminal_output +536,346040,"TERMINAL",0,0,"2",,terminal_output +537,346109,"genie.py",3433,0,"",python,selection_command +538,346594,"genie.py",3453,0,"",python,selection_command +539,346632,"genie.py",3455,0,"",python,selection_command +540,346685,"genie.py",3502,0,"",python,selection_command +541,346708,"genie.py",3550,0,"",python,selection_command +542,346722,"genie.py",3589,0,"",python,selection_command +543,346764,"genie.py",3637,0,"",python,selection_command +544,347140,"TERMINAL",0,0,"3",,terminal_output +545,348117,"TERMINAL",0,0,"4",,terminal_output +546,349232,"TERMINAL",0,0,"6",,terminal_output +547,350205,"TERMINAL",0,0,"7",,terminal_output +548,350344,"genie.py",5682,0,"",python,selection_mouse +549,350348,"genie.py",5681,0,"",python,selection_command +550,351114,"genie.py",5682,0,"# (B, T, N)",python,content +551,351134,"genie.py",5692,0,"",python,selection_command +552,351263,"TERMINAL",0,0,"8",,terminal_output +553,352350,"TERMINAL",0,0,"920",,terminal_output +554,353329,"TERMINAL",0,0,"30",,terminal_output +555,354377,"TERMINAL",0,0,"11",,terminal_output +556,355418,"TERMINAL",0,0,"2",,terminal_output +557,355594,"genie.py",3826,0,"",python,selection_mouse +558,355737,"genie.py",3825,1," ",python,selection_mouse +559,356465,"TERMINAL",0,0,"3",,terminal_output +560,357042,"genie.py",3835,0,"",python,selection_mouse +561,357292,"genie.py",3834,1," ",python,selection_mouse +562,357293,"genie.py",3832,3,"N) ",python,selection_mouse +563,357293,"genie.py",3831,4," N) ",python,selection_mouse +564,357293,"genie.py",3830,5,", N) ",python,selection_mouse +565,357294,"genie.py",3829,6,"B, N) ",python,selection_mouse +566,357325,"genie.py",3828,7,"(B, N) ",python,selection_mouse +567,357419,"genie.py",3827,8," (B, N) ",python,selection_mouse +568,357506,"TERMINAL",0,0,"4",,terminal_output +569,357579,"genie.py",3826,9,"# (B, N) ",python,selection_mouse +570,357648,"genie.py",3825,10," # (B, N) ",python,selection_mouse +571,358533,"TERMINAL",0,0,"5",,terminal_output +572,359681,"TERMINAL",0,0,"6",,terminal_output +573,360685,"TERMINAL",0,0,"7",,terminal_output +574,361685,"TERMINAL",0,0,"8",,terminal_output +575,362763,"TERMINAL",0,0,"9",,terminal_output +576,363775,"TERMINAL",0,0,"40",,terminal_output +577,364771,"TERMINAL",0,0,"1",,terminal_output +578,365889,"TERMINAL",0,0,"2",,terminal_output +579,366949,"TERMINAL",0,0,"3",,terminal_output +580,367982,"TERMINAL",0,0,"4",,terminal_output +581,368056,"genie.py",5751,0,"",python,selection_mouse +582,368067,"genie.py",5750,0,"",python,selection_command +583,368662,"genie.py",5751,0,"",python,selection_command +584,368839,"genie.py",5751,0," ",python,content +585,368840,"genie.py",5752,0,"",python,selection_keyboard +586,368922,"TERMINAL",0,0,"5",,terminal_output +587,369370,"genie.py",5751,0,"",python,selection_command +588,369964,"TERMINAL",0,0,"6",,terminal_output +589,370457,"genie.py",5752,0,"",python,selection_command +590,370905,"genie.py",5752,0," # (B, N) ",python,content +591,371039,"TERMINAL",0,0,"7",,terminal_output +592,372061,"TERMINAL",0,0,"8",,terminal_output +593,372980,"genie.py",5833,0,"",python,selection_mouse +594,373084,"TERMINAL",0,0,"9",,terminal_output +595,374122,"TERMINAL",0,0,"50",,terminal_output +596,375244,"TERMINAL",0,0,"2",,terminal_output +597,376212,"TERMINAL",0,0,"3",,terminal_output +598,377291,"TERMINAL",0,0,"4",,terminal_output +599,378326,"TERMINAL",0,0,"5",,terminal_output +600,379341,"TERMINAL",0,0,"6",,terminal_output +601,380358,"TERMINAL",0,0,"7",,terminal_output +602,381401,"TERMINAL",0,0,"8",,terminal_output +603,382440,"TERMINAL",0,0,"9",,terminal_output +604,383514,"genie.py",4054,0,"",python,selection_mouse +605,383527,"TERMINAL",0,0,"2:00",,terminal_output +606,383717,"genie.py",4053,1,")",python,selection_mouse +607,383717,"genie.py",4051,3," D)",python,selection_mouse +608,383718,"genie.py",4049,5,"A, D)",python,selection_mouse +609,383718,"genie.py",4048,6," A, D)",python,selection_mouse +610,383718,"genie.py",4047,7,", A, D)",python,selection_mouse +611,383748,"genie.py",4053,1,")",python,selection_command +612,383749,"genie.py",4046,8,"S, A, D)",python,selection_mouse +613,383772,"genie.py",4045,9," S, A, D)",python,selection_mouse +614,383827,"genie.py",4044,10,", S, A, D)",python,selection_mouse +615,383902,"genie.py",4043,11,"B, S, A, D)",python,selection_mouse +616,383986,"genie.py",4042,12,"(B, S, A, D)",python,selection_mouse +617,384052,"genie.py",4041,13," (B, S, A, D)",python,selection_mouse +618,384246,"genie.py",4040,14,"# (B, S, A, D)",python,selection_mouse +619,384562,"TERMINAL",0,0,"1",,terminal_output +620,385566,"TERMINAL",0,0,"2",,terminal_output +621,386712,"TERMINAL",0,0,"3",,terminal_output +622,387839,"TERMINAL",0,0,"4",,terminal_output +623,388760,"TERMINAL",0,0,"5",,terminal_output +624,389206,"genie.py",5833,0,"",python,selection_mouse +625,389522,"genie.py",5833,0," ",python,content +626,389523,"genie.py",5834,0,"",python,selection_keyboard +627,389762,"TERMINAL",0,0,"6",,terminal_output +628,389770,"genie.py",5834,0,"# (B, S, A, D)",python,content +629,390449,"genie.py",5950,0,"",python,selection_mouse +630,390795,"TERMINAL",0,0,"7",,terminal_output +631,390915,"genie.py",5950,0," ",python,content +632,390916,"genie.py",5951,0,"",python,selection_keyboard +633,391101,"genie.py",5951,0,"# (B, S, A, D)",python,content +634,391831,"TERMINAL",0,0,"8",,terminal_output +635,392238,"genie.py",5957,0,"",python,selection_mouse +636,392326,"genie.py",5957,1,"S",python,selection_mouse +637,392326,"genie.py",5957,4,"S, A",python,selection_mouse +638,392327,"genie.py",5957,6,"S, A, ",python,selection_mouse +639,392327,"genie.py",5957,7,"S, A, D",python,selection_mouse +640,392852,"TERMINAL",0,0,"92",,terminal_output +641,393229,"genie.py",5957,7,"N",python,content +642,393230,"genie.py",5958,0,"",python,selection_keyboard +643,393919,"genie.py",6008,0,"",python,selection_mouse +644,393935,"TERMINAL",0,0,"10",,terminal_output +645,395024,"TERMINAL",0,0,"1",,terminal_output +646,396032,"TERMINAL",0,0,"2",,terminal_output +647,396178,"genie.py",6007,0,"",python,selection_command +648,397055,"TERMINAL",0,0,"3",,terminal_output +649,398082,"TERMINAL",0,0,"4",,terminal_output +650,398565,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n# from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n start_time = time.time()\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n elapsed = time.time() - start_time\n print(f""Frame {frame_idx} sampling took {elapsed:.3f} seconds"")\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample_mihir(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n new_frames = genie.apply(\n params,\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample_mihir,\n )\n vid = jnp.concatenate([vid, new_frames], axis=1)\n return vid\n\n\n\n# --- Get video + latent actions ---\n# tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n# ]\n# dataloader = get_dataloader(\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # args.image_height,\n # args.image_width,\n # args.image_channels,\n # seed=args.seed,\n# )\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1,:args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +651,399143,"TERMINAL",0,0,"5",,terminal_output +652,400152,"TERMINAL",0,0,"7",,terminal_output +653,401252,"TERMINAL",0,0,"8",,terminal_output +654,402239,"TERMINAL",0,0,"9",,terminal_output +655,403020,"sample.py",4698,0,"",python,selection_mouse +656,403334,"TERMINAL",0,0,"20",,terminal_output +657,403956,"sample.py",4699,0,"",python,selection_command +658,404139,"sample.py",4699,0,"i",python,content +659,404140,"sample.py",4700,0,"",python,selection_keyboard +660,404325,"TERMINAL",0,0,"1",,terminal_output +661,404440,"sample.py",4700,0,"_",python,content +662,404454,"sample.py",4701,0,"",python,selection_keyboard +663,404710,"sample.py",4701,0,"m",python,content +664,404712,"sample.py",4702,0,"",python,selection_keyboard +665,404846,"sample.py",4702,0,"i",python,content +666,404847,"sample.py",4703,0,"",python,selection_keyboard +667,404963,"sample.py",4703,0,"h",python,content +668,404964,"sample.py",4704,0,"",python,selection_keyboard +669,405055,"sample.py",4704,0,"i",python,content +670,405056,"sample.py",4705,0,"",python,selection_keyboard +671,405094,"sample.py",4705,0,"r",python,content +672,405096,"sample.py",4706,0,"",python,selection_keyboard +673,405403,"TERMINAL",0,0,"2",,terminal_output +674,405458,"sample.py",4705,0,"",python,selection_command +675,406443,"TERMINAL",0,0,"3",,terminal_output +676,407441,"TERMINAL",0,0,"4",,terminal_output +677,408524,"TERMINAL",0,0,"5",,terminal_output +678,409548,"TERMINAL",0,0,"6",,terminal_output +679,410573,"TERMINAL",0,0,"7",,terminal_output +680,411699,"TERMINAL",0,0,"8",,terminal_output +681,412374,"sample.py",0,0,"",python,tab +682,412375,"sample.py",4701,0,"",python,selection_mouse +683,412652,"TERMINAL",0,0,"9",,terminal_output +684,412931,"sample.py",4700,0,"",python,selection_mouse +685,413742,"sample.py",4699,1,"",python,content +686,413745,"TERMINAL",0,0,"30",,terminal_output +687,414715,"TERMINAL",0,0,"1",,terminal_output +688,415514,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +689,415753,"TERMINAL",0,0,"2",,terminal_output +690,415967,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +691,416133,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +692,416820,"TERMINAL",0,0,"3",,terminal_output +693,417842,"TERMINAL",0,0,"4",,terminal_output +694,418128,"genie.py",0,0,"",python,tab +695,418876,"TERMINAL",0,0,"5",,terminal_output +696,418938,"TERMINAL",0,0,"2025-07-03 16:22:35.828408: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +697,419993,"TERMINAL",0,0,"6",,terminal_output +698,420931,"genie.py",5195,0,"",python,selection_command +699,420990,"TERMINAL",0,0,"7",,terminal_output +700,422022,"TERMINAL",0,0,"8",,terminal_output +701,423032,"TERMINAL",0,0,"9",,terminal_output +702,423388,"TERMINAL",0,0,"2025-07-03 16:22:40.288108: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +703,424089,"TERMINAL",0,0,"40",,terminal_output +704,425216,"TERMINAL",0,0,"1",,terminal_output +705,426167,"TERMINAL",0,0,"3",,terminal_output +706,427264,"TERMINAL",0,0,"4",,terminal_output +707,428316,"TERMINAL",0,0,"5",,terminal_output +708,429313,"TERMINAL",0,0,"6",,terminal_output +709,430338,"TERMINAL",0,0,"7",,terminal_output +710,431373,"TERMINAL",0,0,"8",,terminal_output +711,432418,"TERMINAL",0,0,"9",,terminal_output +712,432897,"TERMINAL",0,0,"2025-07-03 16:22:49.779872: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +713,433459,"TERMINAL",0,0,"50",,terminal_output +714,434498,"TERMINAL",0,0,"1",,terminal_output +715,435558,"TERMINAL",0,0,"2",,terminal_output +716,436685,"TERMINAL",0,0,"3",,terminal_output +717,437622,"TERMINAL",0,0,"4",,terminal_output +718,438664,"TERMINAL",0,0,"5",,terminal_output +719,439707,"TERMINAL",0,0,"6",,terminal_output +720,440781,"TERMINAL",0,0,"7",,terminal_output +721,441326,"TERMINAL",0,0,"2025-07-03 16:22:58.192064: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +722,441805,"TERMINAL",0,0,"8",,terminal_output +723,442833,"TERMINAL",0,0,"9",,terminal_output +724,443957,"TERMINAL",0,0,"3:00",,terminal_output +725,444914,"TERMINAL",0,0,"1",,terminal_output +726,445959,"TERMINAL",0,0,"2",,terminal_output +727,447027,"TERMINAL",0,0,"3",,terminal_output +728,448039,"TERMINAL",0,0,"4",,terminal_output +729,448327,"TERMINAL",0,0,"2025-07-03 16:23:05.223987: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +730,449176,"TERMINAL",0,0,"5",,terminal_output +731,450203,"TERMINAL",0,0,"6",,terminal_output +732,451166,"TERMINAL",0,0,"8",,terminal_output +733,452251,"TERMINAL",0,0,"9 9 3",,terminal_output +734,453274,"TERMINAL",0,0,"10",,terminal_output +735,454327,"TERMINAL",0,0,"1",,terminal_output +736,454912,"TERMINAL",0,0,"2025-07-03 16:23:11.791263: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +737,455425,"TERMINAL",0,0,"2",,terminal_output +738,456368,"TERMINAL",0,0,"3",,terminal_output +739,457410,"TERMINAL",0,0,"4",,terminal_output +740,458437,"TERMINAL",0,0,"5",,terminal_output +741,458544,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +742,459477,"TERMINAL",0,0,"6",,terminal_output +743,460545,"TERMINAL",0,0,"7",,terminal_output +744,461557,"TERMINAL",0,0,"8",,terminal_output +745,462695,"TERMINAL",0,0,"9",,terminal_output +746,463725,"TERMINAL",0,0,"20",,terminal_output +747,464682,"TERMINAL",0,0,"1",,terminal_output +748,465972,"TERMINAL",0,0,"2",,terminal_output +749,466770,"TERMINAL",0,0,"3",,terminal_output +750,466925,"TERMINAL",0,0,"2025-07-03 16:23:23.823208: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +751,467814,"TERMINAL",0,0,"4",,terminal_output +752,468839,"TERMINAL",0,0,"5",,terminal_output +753,469864,"TERMINAL",0,0,"6",,terminal_output +754,470170,"TERMINAL",0,0,"2025-07-03 16:23:27.032714: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +755,470988,"TERMINAL",0,0,"7",,terminal_output +756,472015,"TERMINAL",0,0,"8",,terminal_output +757,472975,"TERMINAL",0,0,"9",,terminal_output +758,474064,"TERMINAL",0,0,"30",,terminal_output +759,475085,"TERMINAL",0,0,"1",,terminal_output +760,476110,"TERMINAL",0,0,"2",,terminal_output +761,477126,"TERMINAL",0,0,"3",,terminal_output +762,478259,"TERMINAL",0,0,"5",,terminal_output +763,478271,"TERMINAL",0,0,"2025-07-03 16:23:35.167235: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +764,479284,"TERMINAL",0,0,"6",,terminal_output +765,479909,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +766,480016,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +767,480074,"TERMINAL",0,0,"new_frame_idxs.shape: (1, 920)\r\n",,terminal_output +768,480248,"TERMINAL",0,0,"7",,terminal_output +769,480311,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +770,480434,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +771,481330,"TERMINAL",0,0,"8",,terminal_output +772,482363,"TERMINAL",0,0,"9",,terminal_output +773,483381,"TERMINAL",0,0,"40",,terminal_output +774,484411,"TERMINAL",0,0,"1",,terminal_output +775,485453,"TERMINAL",0,0,"2",,terminal_output +776,486492,"TERMINAL",0,0,"3",,terminal_output +777,487536,"TERMINAL",0,0,"4",,terminal_output +778,488602,"TERMINAL",0,0,"5",,terminal_output +779,489727,"TERMINAL",0,0,"6",,terminal_output +780,490752,"TERMINAL",0,0,"7",,terminal_output +781,491777,"TERMINAL",0,0,"8",,terminal_output +782,492802,"TERMINAL",0,0,"9",,terminal_output +783,493824,"TERMINAL",0,0,"50",,terminal_output +784,494848,"TERMINAL",0,0,"1",,terminal_output +785,495882,"TERMINAL",0,0,"2",,terminal_output +786,496885,"TERMINAL",0,0,"3",,terminal_output +787,497994,"TERMINAL",0,0,"4",,terminal_output +788,499047,"TERMINAL",0,0,"5",,terminal_output +789,500071,"TERMINAL",0,0,"6",,terminal_output +790,501095,"TERMINAL",0,0,"7",,terminal_output +791,502117,"TERMINAL",0,0,"8",,terminal_output +792,502854,"genie.py",0,0,"",python,tab +793,502855,"genie.py",3580,0,"",python,selection_mouse +794,503013,"genie.py",3580,2," S",python,selection_mouse +795,503014,"genie.py",3580,3," S ",python,selection_mouse +796,503079,"genie.py",3580,4," S -",python,selection_mouse +797,503080,"genie.py",3580,5," S - ",python,selection_mouse +798,503116,"genie.py",3580,6," S - T",python,selection_mouse +799,503144,"TERMINAL",0,0,"9",,terminal_output +800,503442,"genie.py",3580,7," S - T,",python,selection_mouse +801,503784,"genie.py",3587,0,"",python,selection_mouse +802,504251,"TERMINAL",0,0,"4:01",,terminal_output +803,504470,"genie.py",3585,0,"",python,selection_mouse +804,504711,"genie.py",3584,1," ",python,selection_mouse +805,504771,"genie.py",3583,2,"- ",python,selection_mouse +806,504772,"genie.py",3582,3," - ",python,selection_mouse +807,504862,"genie.py",3581,4,"S - ",python,selection_mouse +808,505212,"TERMINAL",0,0,"2",,terminal_output +809,505596,"genie.py",3581,0,"",python,selection_mouse +810,506092,"genie.py",3581,1,"S",python,selection_mouse +811,506092,"genie.py",3581,2,"S ",python,selection_mouse +812,506118,"genie.py",3581,3,"S -",python,selection_mouse +813,506183,"genie.py",3581,4,"S - ",python,selection_mouse +814,506258,"TERMINAL",0,0,"3",,terminal_output +815,506554,"genie.py",3581,5,"S - T",python,selection_mouse +816,507142,"genie.py",3586,0,"",python,selection_mouse +817,507295,"TERMINAL",0,0,"4",,terminal_output +818,507642,"genie.py",3585,1,"T",python,selection_mouse +819,507713,"genie.py",3584,2," T",python,selection_mouse +820,507766,"genie.py",3583,3,"- T",python,selection_mouse +821,507833,"genie.py",3582,4," - T",python,selection_mouse +822,507886,"genie.py",3581,5,"S - T",python,selection_mouse +823,508382,"TERMINAL",0,0,"5",,terminal_output +824,508483,"genie.py",3581,0,"",python,selection_mouse +825,509389,"TERMINAL",0,0,"6",,terminal_output +826,510426,"TERMINAL",0,0,"7",,terminal_output +827,511456,"TERMINAL",0,0,"8",,terminal_output +828,512506,"TERMINAL",0,0,"9",,terminal_output +829,512670,"genie.py",3701,0,"",python,selection_mouse +830,513390,"genie.py",3767,0,"",python,selection_mouse +831,513395,"genie.py",3766,0,"",python,selection_command +832,513543,"TERMINAL",0,0,"10",,terminal_output +833,514599,"TERMINAL",0,0,"1",,terminal_output +834,514845,"TERMINAL",0,0,"T",,terminal_output +835,515053,"TERMINAL",0,0,"\r\n1\r\n(jdb) ",,terminal_output +836,515645,"TERMINAL",0,0,"2",,terminal_output +837,516092,"TERMINAL",0,0,"S",,terminal_output +838,516276,"TERMINAL",0,0,"\r\n6\r\n(jdb) ",,terminal_output +839,516763,"TERMINAL",0,0,"3",,terminal_output +840,517752,"TERMINAL",0,0,"47",,terminal_output +841,518771,"TERMINAL",0,0,"5",,terminal_output +842,519835,"TERMINAL",0,0,"6",,terminal_output +843,520858,"TERMINAL",0,0,"7",,terminal_output +844,521878,"TERMINAL",0,0,"8",,terminal_output +845,522993,"TERMINAL",0,0,"9",,terminal_output +846,523963,"TERMINAL",0,0,"20",,terminal_output +847,524450,"genie.py",0,0,"",python,tab +848,524451,"genie.py",3546,0,"",python,selection_mouse +849,524551,"genie.py",3539,10,"token_idxs",python,selection_mouse +850,525063,"TERMINAL",0,0,"1",,terminal_output +851,526044,"TERMINAL",0,0,"2",,terminal_output +852,527105,"TERMINAL",0,0,"3",,terminal_output +853,528125,"TERMINAL",0,0,"4",,terminal_output +854,529255,"TERMINAL",0,0,"6",,terminal_output +855,530215,"TERMINAL",0,0,"7",,terminal_output +856,531249,"TERMINAL",0,0,"8",,terminal_output +857,532309,"TERMINAL",0,0,"9",,terminal_output +858,533371,"TERMINAL",0,0,"30",,terminal_output +859,534383,"TERMINAL",0,0,"1",,terminal_output +860,535412,"TERMINAL",0,0,"2",,terminal_output +861,536451,"TERMINAL",0,0,"3",,terminal_output +862,537490,"TERMINAL",0,0,"4",,terminal_output +863,538533,"TERMINAL",0,0,"5",,terminal_output +864,539178,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/../checkpoints/3307618/genie_1751322003_15500/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3307619/genie_1751322003_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1 \\n --num_latent_actions=6 \\n --seq_len=6 \\n --start_frame=0\n\n# python sample.py \\n # --checkpoint ""$CHECKPOINT_PATH"" \\n # --data_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/coinrun_episodes\n",shellscript,tab +865,539576,"TERMINAL",0,0,"6",,terminal_output +866,540622,"TERMINAL",0,0,"7",,terminal_output +867,541748,"TERMINAL",0,0,"8",,terminal_output +868,542701,"TERMINAL",0,0,"9",,terminal_output +869,543388,"TERMINAL",0,0,"bash",,terminal_focus +870,543755,"TERMINAL",0,0,"40",,terminal_output +871,544766,"TERMINAL",0,0,"srun",,terminal_focus +872,544830,"TERMINAL",0,0,"1",,terminal_output +873,545844,"TERMINAL",0,0,"2",,terminal_output +874,546972,"TERMINAL",0,0,"3",,terminal_output +875,547995,"TERMINAL",0,0,"4",,terminal_output +876,549019,"TERMINAL",0,0,"5",,terminal_output +877,550011,"TERMINAL",0,0,"6",,terminal_output +878,551069,"TERMINAL",0,0,"7",,terminal_output +879,552096,"TERMINAL",0,0,"8",,terminal_output +880,553131,"TERMINAL",0,0,"9",,terminal_output +881,553766,"genie.py",0,0,"",python,tab +882,553767,"genie.py",3621,0,"",python,selection_mouse +883,553871,"genie.py",3615,9,"pad_shape",python,selection_mouse +884,554174,"TERMINAL",0,0,"51",,terminal_output +885,554993,"genie.py",3609,0,"",python,selection_mouse +886,555160,"genie.py",3609,5,"zeros",python,selection_mouse +887,555248,"TERMINAL",0,0,"2",,terminal_output +888,555945,"genie.py",3606,0,"",python,selection_mouse +889,556089,"genie.py",3605,3,"jnp",python,selection_mouse +890,556293,"TERMINAL",0,0,"3",,terminal_output +891,556897,"genie.py",3601,0,"",python,selection_mouse +892,557416,"TERMINAL",0,0,"4",,terminal_output +893,558547,"TERMINAL",0,0,"5",,terminal_output +894,559157,"genie.py",4266,0,"",python,selection_mouse +895,559481,"TERMINAL",0,0,"6",,terminal_output +896,559774,"genie.py",4010,0,"",python,selection_mouse +897,560546,"TERMINAL",0,0,"7",,terminal_output +898,561624,"TERMINAL",0,0,"8",,terminal_output +899,562565,"TERMINAL",0,0,"9",,terminal_output +900,563661,"TERMINAL",0,0,"5:00",,terminal_output +901,564471,"genie.py",4077,0,"",python,selection_mouse +902,564658,"TERMINAL",0,0,"1",,terminal_output +903,565118,"genie.py",4150,0,"",python,selection_mouse +904,565236,"genie.py",4148,9,"ones_like",python,selection_mouse +905,565695,"TERMINAL",0,0,"2",,terminal_output +906,566079,"genie.py",4123,0,"",python,selection_mouse +907,566111,"genie.py",4122,0,"",python,selection_command +908,566739,"TERMINAL",0,0,"3",,terminal_output +909,567787,"TERMINAL",0,0,"4",,terminal_output +910,567975,"TERMINAL",0,0,"pa",,terminal_output +911,568168,"TERMINAL",0,0,"d",,terminal_output +912,568230,"TERMINAL",0,0,".",,terminal_output +913,568378,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +914,568585,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +915,568669,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +916,568780,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +917,568851,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +918,568852,"TERMINAL",0,0,"5",,terminal_output +919,568957,"TERMINAL",0,0,"\r\n(1, 5, 920)\r\n(jdb) ",,terminal_output +920,569909,"TERMINAL",0,0,"6",,terminal_output +921,570032,"TERMINAL",0,0,"[?25l,a[?25h[?25la[?25h",,terminal_output +922,570947,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +923,570957,"TERMINAL",0,0,"7",,terminal_output +924,571086,"TERMINAL",0,0,"[?25l,\r[?25h",,terminal_output +925,571220,"TERMINAL",0,0,"",,terminal_output +926,571463,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +927,571531,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +928,571705,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +929,571743,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +930,571876,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +931,571983,"TERMINAL",0,0,"8",,terminal_output +932,572050,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +933,572166,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +934,572285,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +935,572383,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +936,572395,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +937,572457,"TERMINAL",0,0,"\r\n*** NameError: name 'mask' is not defined\r\n(jdb) ",,terminal_output +938,573089,"TERMINAL",0,0,"93",,terminal_output +939,574210,"TERMINAL",0,0,"10",,terminal_output +940,575235,"TERMINAL",0,0,"2",,terminal_output +941,576258,"TERMINAL",0,0,"3",,terminal_output +942,577324,"TERMINAL",0,0,"4",,terminal_output +943,578387,"TERMINAL",0,0,"5",,terminal_output +944,579074,"TERMINAL",0,0,"[?25lto[?25h",,terminal_output +945,579145,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +946,579206,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +947,579376,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +948,579377,"TERMINAL",0,0,"610",,terminal_output +949,579481,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +950,580460,"TERMINAL",0,0,"7",,terminal_output +951,581452,"TERMINAL",0,0,"8",,terminal_output +952,581883,"TERMINAL",0,0,"\r",,terminal_output +953,582491,"TERMINAL",0,0,"9",,terminal_output +954,583535,"TERMINAL",0,0,"20",,terminal_output +955,584655,"TERMINAL",0,0,"1",,terminal_output +956,585615,"TERMINAL",0,0,"2",,terminal_output +957,586703,"TERMINAL",0,0,"3",,terminal_output +958,587695,"TERMINAL",0,0,"4",,terminal_output +959,588788,"TERMINAL",0,0,"5",,terminal_output +960,589877,"TERMINAL",0,0,"6",,terminal_output +961,590902,"TERMINAL",0,0,"7",,terminal_output +962,591927,"TERMINAL",0,0,"8",,terminal_output +963,592894,"TERMINAL",0,0,"9",,terminal_output +964,593935,"TERMINAL",0,0,"30",,terminal_output +965,594997,"TERMINAL",0,0,"1",,terminal_output +966,596064,"TERMINAL",0,0,"2",,terminal_output +967,597052,"TERMINAL",0,0,"3",,terminal_output +968,598101,"TERMINAL",0,0,"4",,terminal_output +969,599299,"TERMINAL",0,0,"6",,terminal_output +970,600329,"TERMINAL",0,0,"7",,terminal_output +971,601397,"TERMINAL",0,0,"8",,terminal_output +972,602395,"TERMINAL",0,0,"9",,terminal_output +973,603410,"TERMINAL",0,0,"40",,terminal_output +974,604441,"TERMINAL",0,0,"1",,terminal_output +975,605486,"TERMINAL",0,0,"2",,terminal_output +976,606521,"TERMINAL",0,0,"3",,terminal_output +977,607593,"TERMINAL",0,0,"4",,terminal_output +978,608617,"TERMINAL",0,0,"5",,terminal_output +979,609641,"TERMINAL",0,0,"6",,terminal_output +980,610766,"TERMINAL",0,0,"7",,terminal_output +981,611793,"TERMINAL",0,0,"8",,terminal_output +982,612815,"TERMINAL",0,0,"9",,terminal_output +983,613839,"TERMINAL",0,0,"50",,terminal_output +984,614452,"genie.py",0,0,"",python,tab +985,614453,"genie.py",3882,0,"",python,selection_mouse +986,614528,"genie.py",3879,5,"shape",python,selection_mouse +987,614848,"TERMINAL",0,0,"1",,terminal_output +988,615091,"genie.py",3882,0,"",python,selection_mouse +989,615200,"genie.py",3879,5,"shape",python,selection_mouse +990,615778,"genie.py",3789,0,"",python,selection_mouse +991,615908,"genie.py",3776,14,"new_frame_idxs",python,selection_mouse +992,615925,"TERMINAL",0,0,"2",,terminal_output +993,616576,"genie.py",3810,0,"",python,selection_mouse +994,616939,"TERMINAL",0,0,"3",,terminal_output +995,617155,"genie.py",3798,0,"",python,selection_mouse +996,617993,"TERMINAL",0,0,"45",,terminal_output +997,619061,"TERMINAL",0,0,"5",,terminal_output +998,620085,"TERMINAL",0,0,"6",,terminal_output +999,620279,"genie.py",3879,0,"",python,selection_command +1000,620453,"genie.py",3940,0,"",python,selection_command +1001,621109,"TERMINAL",0,0,"7",,terminal_output +1002,621325,"genie.py",4011,0,"",python,selection_command +1003,621485,"genie.py",4055,0,"",python,selection_command +1004,621641,"genie.py",4085,0,"",python,selection_command +1005,621784,"genie.py",4117,0,"",python,selection_command +1006,622076,"genie.py",4085,0,"",python,selection_command +1007,622146,"TERMINAL",0,0,"96",,terminal_output +1008,622217,"genie.py",4055,0,"",python,selection_command +1009,622458,"genie.py",4085,0,"",python,selection_command +1010,623243,"genie.py",4056,31,"",python,content +1011,623318,"genie.py",4064,0,"",python,selection_command +1012,623326,"TERMINAL",0,0,"6:00",,terminal_output +1013,623432,"genie.py",4101,0,"",python,selection_command +1014,623808,"genie.py",4165,0,"\n jax.debug.breakpoint()",python,content +1015,623833,"genie.py",4174,0,"",python,selection_command +1016,624249,"TERMINAL",0,0,"1",,terminal_output +1017,624337,"genie.py",4101,0,"",python,selection_command +1018,624529,"genie.py",4102,0,"",python,selection_command +1019,625039,"genie.py",4103,0,"",python,selection_command +1020,625080,"genie.py",4104,0,"",python,selection_command +1021,625115,"genie.py",4105,0,"",python,selection_command +1022,625137,"genie.py",4106,0,"",python,selection_command +1023,625209,"genie.py",4107,0,"",python,selection_command +1024,625213,"genie.py",4108,0,"",python,selection_command +1025,625218,"genie.py",4109,0,"",python,selection_command +1026,625254,"genie.py",4110,0,"",python,selection_command +1027,625309,"genie.py",4111,0,"",python,selection_command +1028,625332,"genie.py",4112,0,"",python,selection_command +1029,625332,"TERMINAL",0,0,"2",,terminal_output +1030,625355,"genie.py",4113,0,"",python,selection_command +1031,625376,"genie.py",4114,0,"",python,selection_command +1032,625430,"genie.py",4115,0,"",python,selection_command +1033,625430,"genie.py",4116,0,"",python,selection_command +1034,625447,"genie.py",4117,0,"",python,selection_command +1035,625495,"genie.py",4118,0,"",python,selection_command +1036,625547,"genie.py",4119,0,"",python,selection_command +1037,625562,"genie.py",4120,0,"",python,selection_command +1038,625818,"genie.py",4121,0,"",python,selection_command +1039,626332,"genie.py",4122,0,"",python,selection_command +1040,626342,"genie.py",4123,0,"",python,selection_command +1041,626343,"TERMINAL",0,0,"3",,terminal_output +1042,626431,"genie.py",4124,0,"",python,selection_command +1043,626614,"genie.py",4125,0,"",python,selection_command +1044,627165,"genie.py",4126,0,"",python,selection_command +1045,627356,"genie.py",4127,0,"",python,selection_command +1046,627368,"TERMINAL",0,0,"4",,terminal_output +1047,628390,"TERMINAL",0,0,"5",,terminal_output +1048,629435,"TERMINAL",0,0,"65",,terminal_output +1049,630478,"TERMINAL",0,0,"7",,terminal_output +1050,631523,"TERMINAL",0,0,"8",,terminal_output +1051,632564,"TERMINAL",0,0,"9",,terminal_output +1052,633603,"TERMINAL",0,0,"10",,terminal_output +1053,633803,"genie.py",4128,0,"",python,selection_command +1054,634222,"genie.py",4127,0,"",python,selection_command +1055,634645,"TERMINAL",0,0,"1",,terminal_output +1056,635523,"genie.py",4128,0,"",python,selection_command +1057,635689,"TERMINAL",0,0,"2",,terminal_output +1058,636022,"genie.py",4129,0,"",python,selection_command +1059,636090,"genie.py",4130,0,"",python,selection_command +1060,636094,"genie.py",4131,0,"",python,selection_command +1061,636116,"genie.py",4132,0,"",python,selection_command +1062,636176,"genie.py",4133,0,"",python,selection_command +1063,636209,"genie.py",4134,0,"",python,selection_command +1064,636242,"genie.py",4135,0,"",python,selection_command +1065,636243,"genie.py",4136,0,"",python,selection_command +1066,636329,"genie.py",4137,0,"",python,selection_command +1067,636330,"genie.py",4138,0,"",python,selection_command +1068,636416,"genie.py",4139,0,"",python,selection_command +1069,636417,"genie.py",4140,0,"",python,selection_command +1070,636455,"genie.py",4141,0,"",python,selection_command +1071,636457,"genie.py",4142,0,"",python,selection_command +1072,636508,"genie.py",4143,0,"",python,selection_command +1073,636509,"genie.py",4144,0,"",python,selection_command +1074,636541,"genie.py",4145,0,"",python,selection_command +1075,636542,"genie.py",4146,0,"",python,selection_command +1076,636586,"genie.py",4147,0,"",python,selection_command +1077,636730,"TERMINAL",0,0,"3",,terminal_output +1078,637802,"TERMINAL",0,0,"4",,terminal_output +1079,638824,"TERMINAL",0,0,"5",,terminal_output +1080,639860,"TERMINAL",0,0,"6",,terminal_output +1081,640903,"TERMINAL",0,0,"7",,terminal_output +1082,641984,"TERMINAL",0,0,"8",,terminal_output +1083,642993,"TERMINAL",0,0,"9",,terminal_output +1084,644048,"TERMINAL",0,0,"20",,terminal_output +1085,645174,"TERMINAL",0,0,"1",,terminal_output +1086,646137,"TERMINAL",0,0,"2",,terminal_output +1087,647198,"TERMINAL",0,0,"4",,terminal_output +1088,648245,"TERMINAL",0,0,"5",,terminal_output +1089,649270,"TERMINAL",0,0,"6",,terminal_output +1090,650406,"TERMINAL",0,0,"7",,terminal_output +1091,651423,"TERMINAL",0,0,"8",,terminal_output +1092,652416,"TERMINAL",0,0,"9",,terminal_output +1093,653469,"TERMINAL",0,0,"30",,terminal_output +1094,654494,"TERMINAL",0,0,"1",,terminal_output +1095,655541,"TERMINAL",0,0,"2",,terminal_output +1096,656643,"TERMINAL",0,0,"3",,terminal_output +1097,657668,"TERMINAL",0,0,"41",,terminal_output +1098,658691,"TERMINAL",0,0,"5",,terminal_output +1099,659711,"TERMINAL",0,0,"6",,terminal_output +1100,660752,"TERMINAL",0,0,"7",,terminal_output +1101,661865,"TERMINAL",0,0,"8",,terminal_output +1102,662890,"TERMINAL",0,0,"9",,terminal_output +1103,663915,"TERMINAL",0,0,"40",,terminal_output +1104,664938,"TERMINAL",0,0,"1",,terminal_output +1105,666064,"TERMINAL",0,0,"2",,terminal_output +1106,667088,"TERMINAL",0,0,"3",,terminal_output +1107,668050,"TERMINAL",0,0,"4",,terminal_output +1108,669090,"TERMINAL",0,0,"5",,terminal_output +1109,670160,"TERMINAL",0,0,"6",,terminal_output +1110,671171,"TERMINAL",0,0,"8",,terminal_output +1111,672310,"TERMINAL",0,0,"9",,terminal_output +1112,673248,"TERMINAL",0,0,"50",,terminal_output +1113,674358,"TERMINAL",0,0,"1",,terminal_output +1114,675382,"TERMINAL",0,0,"2",,terminal_output +1115,676417,"TERMINAL",0,0,"3",,terminal_output +1116,677432,"TERMINAL",0,0,"4",,terminal_output +1117,678468,"TERMINAL",0,0,"5",,terminal_output +1118,679306,"genie.py",0,0,"",python,tab +1119,679307,"genie.py",4055,0,"",python,selection_mouse +1120,679500,"TERMINAL",0,0,"6",,terminal_output +1121,680240,"genie.py",4055,0,"\n",python,content +1122,680413,"genie.py",4056,0,"\n",python,content +1123,680578,"TERMINAL",0,0,"7",,terminal_output +1124,680685,"genie.py",4057,0,"\n",python,content +1125,680927,"genie.py",4057,0,"",python,selection_command +1126,681094,"genie.py",4056,0,"",python,selection_command +1127,681387,"genie.py",4056,0," ",python,content +1128,681559,"genie.py",4060,0," ",python,content +1129,681623,"TERMINAL",0,0,"8",,terminal_output +1130,682669,"TERMINAL",0,0,"9",,terminal_output +1131,682757,"genie.py",4064,0,"i",python,content +1132,682758,"genie.py",4065,0,"",python,selection_keyboard +1133,682875,"genie.py",4065,0,"n",python,content +1134,682877,"genie.py",4066,0,"",python,selection_keyboard +1135,682986,"genie.py",4066,0,"i",python,content +1136,682987,"genie.py",4067,0,"",python,selection_keyboard +1137,683063,"genie.py",4067,0,"t",python,content +1138,683067,"genie.py",4068,0,"",python,selection_keyboard +1139,683321,"genie.py",4068,0,"_",python,content +1140,683322,"genie.py",4069,0,"",python,selection_keyboard +1141,683672,"genie.py",4069,0,"a",python,content +1142,683673,"genie.py",4070,0,"",python,selection_keyboard +1143,683714,"TERMINAL",0,0,"7:00",,terminal_output +1144,684006,"genie.py",4069,1,"",python,content +1145,684239,"genie.py",4069,0,"m",python,content +1146,684239,"genie.py",4070,0,"",python,selection_keyboard +1147,684304,"genie.py",4070,0,"a",python,content +1148,684306,"genie.py",4071,0,"",python,selection_keyboard +1149,684357,"genie.py",4071,0,"s",python,content +1150,684360,"genie.py",4072,0,"",python,selection_keyboard +1151,684483,"genie.py",4072,0,"k",python,content +1152,684484,"genie.py",4073,0,"",python,selection_keyboard +1153,684728,"TERMINAL",0,0,"1",,terminal_output +1154,685436,"genie.py",4073,0," ",python,content +1155,685439,"genie.py",4074,0,"",python,selection_keyboard +1156,685792,"TERMINAL",0,0,"2",,terminal_output +1157,686631,"genie.py",4074,0,"=",python,content +1158,686632,"genie.py",4075,0,"",python,selection_keyboard +1159,686745,"genie.py",4075,0," ",python,content +1160,686747,"genie.py",4076,0,"",python,selection_keyboard +1161,686818,"TERMINAL",0,0,"36",,terminal_output +1162,687301,"genie.py",4064,0,"",python,selection_command +1163,687930,"TERMINAL",0,0,"4",,terminal_output +1164,688916,"TERMINAL",0,0,"5",,terminal_output +1165,690021,"TERMINAL",0,0,"6",,terminal_output +1166,691106,"TERMINAL",0,0,"7",,terminal_output +1167,692055,"TERMINAL",0,0,"8",,terminal_output +1168,693068,"TERMINAL",0,0,"9",,terminal_output +1169,694133,"TERMINAL",0,0,"10",,terminal_output +1170,695179,"TERMINAL",0,0,"2",,terminal_output +1171,696274,"TERMINAL",0,0,"3",,terminal_output +1172,697273,"TERMINAL",0,0,"4",,terminal_output +1173,698353,"TERMINAL",0,0,"5",,terminal_output +1174,699408,"TERMINAL",0,0,"6",,terminal_output +1175,700369,"TERMINAL",0,0,"7",,terminal_output +1176,701418,"TERMINAL",0,0,"8",,terminal_output +1177,702458,"TERMINAL",0,0,"9",,terminal_output +1178,703498,"TERMINAL",0,0,"20",,terminal_output +1179,704580,"TERMINAL",0,0,"1",,terminal_output +1180,705592,"TERMINAL",0,0,"2",,terminal_output +1181,706625,"TERMINAL",0,0,"3",,terminal_output +1182,707671,"TERMINAL",0,0,"4",,terminal_output +1183,708718,"TERMINAL",0,0,"5",,terminal_output +1184,709790,"TERMINAL",0,0,"6",,terminal_output +1185,710814,"TERMINAL",0,0,"7",,terminal_output +1186,711940,"TERMINAL",0,0,"8",,terminal_output +1187,712887,"TERMINAL",0,0,"9",,terminal_output +1188,713988,"TERMINAL",0,0,"30",,terminal_output +1189,715011,"TERMINAL",0,0,"1",,terminal_output +1190,716022,"TERMINAL",0,0,"2",,terminal_output +1191,717163,"TERMINAL",0,0,"3",,terminal_output +1192,718103,"TERMINAL",0,0,"4",,terminal_output +1193,719210,"TERMINAL",0,0,"6",,terminal_output +1194,720234,"TERMINAL",0,0,"7",,terminal_output +1195,721297,"TERMINAL",0,0,"8",,terminal_output +1196,722286,"TERMINAL",0,0,"9",,terminal_output +1197,723359,"TERMINAL",0,0,"40",,terminal_output +1198,724432,"TERMINAL",0,0,"17",,terminal_output +1199,725456,"TERMINAL",0,0,"2",,terminal_output +1200,726481,"TERMINAL",0,0,"3",,terminal_output +1201,727505,"TERMINAL",0,0,"4",,terminal_output +1202,728631,"TERMINAL",0,0,"5",,terminal_output +1203,729660,"TERMINAL",0,0,"6",,terminal_output +1204,730680,"TERMINAL",0,0,"7",,terminal_output +1205,731671,"TERMINAL",0,0,"8",,terminal_output +1206,732720,"TERMINAL",0,0,"9",,terminal_output +1207,733758,"TERMINAL",0,0,"50",,terminal_output +1208,734801,"TERMINAL",0,0,"1",,terminal_output +1209,735973,"TERMINAL",0,0,"2",,terminal_output +1210,736982,"TERMINAL",0,0,"3",,terminal_output +1211,737967,"TERMINAL",0,0,"4",,terminal_output +1212,739003,"TERMINAL",0,0,"5",,terminal_output +1213,740017,"TERMINAL",0,0,"6",,terminal_output +1214,741065,"TERMINAL",0,0,"7",,terminal_output +1215,742248,"TERMINAL",0,0,"8",,terminal_output +1216,743322,"TERMINAL",0,0,"8:00",,terminal_output +1217,744352,"TERMINAL",0,0,"1",,terminal_output +1218,745426,"TERMINAL",0,0,"2",,terminal_output +1219,746459,"TERMINAL",0,0,"3",,terminal_output +1220,747523,"TERMINAL",0,0,"4",,terminal_output +1221,748566,"TERMINAL",0,0,"5",,terminal_output +1222,749569,"TERMINAL",0,0,"6",,terminal_output +1223,750632,"TERMINAL",0,0,"7",,terminal_output +1224,751675,"TERMINAL",0,0,"8",,terminal_output +1225,752730,"TERMINAL",0,0,"9",,terminal_output +1226,753775,"TERMINAL",0,0,"10",,terminal_output +1227,754846,"TERMINAL",0,0,"1",,terminal_output +1228,755894,"TERMINAL",0,0,"2",,terminal_output +1229,756568,"genie.py",4056,0,"",python,selection_command +1230,756896,"TERMINAL",0,0,"3",,terminal_output +1231,757910,"TERMINAL",0,0,"4",,terminal_output +1232,758981,"TERMINAL",0,0,"5",,terminal_output +1233,759194,"genie.py",4056,0," # Create a mask that is 1 (True) where we just padded\n",python,content +1234,759547,"genie.py",4118,0," # token_idxs shape: (B, S, N), T = original length, S = seq_len\n",python,content +1235,759752,"genie.py",4190,0," # mask is True for padded positions (i.e., t >= T)\n",python,content +1236,759928,"genie.py",4249,0," B, S, N = token_idxs.shape\n",python,content +1237,760004,"TERMINAL",0,0,"6",,terminal_output +1238,760635,"genie.py",4284,0," mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n",python,content +1239,760911,"genie.py",4377,0," init_mask = mask.astype(bool)\n",python,content +1240,760915,"genie.py",4415,21,"",python,content +1241,761116,"TERMINAL",0,0,"7",,terminal_output +1242,762226,"TERMINAL",0,0,"8",,terminal_output +1243,763164,"TERMINAL",0,0,"9",,terminal_output +1244,764370,"TERMINAL",0,0,"21",,terminal_output +1245,765392,"TERMINAL",0,0,"2",,terminal_output +1246,766417,"TERMINAL",0,0,"3",,terminal_output +1247,767442,"TERMINAL",0,0,"4",,terminal_output +1248,768443,"TERMINAL",0,0,"5",,terminal_output +1249,769489,"TERMINAL",0,0,"6",,terminal_output +1250,770616,"TERMINAL",0,0,"7",,terminal_output +1251,771563,"TERMINAL",0,0,"8",,terminal_output +1252,772624,"TERMINAL",0,0,"9",,terminal_output +1253,773700,"TERMINAL",0,0,"30",,terminal_output +1254,774717,"TERMINAL",0,0,"1",,terminal_output +1255,775743,"TERMINAL",0,0,"2",,terminal_output +1256,776779,"TERMINAL",0,0,"3",,terminal_output +1257,777906,"TERMINAL",0,0,"4",,terminal_output +1258,778951,"TERMINAL",0,0,"5",,terminal_output +1259,780037,"TERMINAL",0,0,"6",,terminal_output +1260,781061,"TERMINAL",0,0,"7",,terminal_output +1261,782084,"TERMINAL",0,0,"8",,terminal_output +1262,783117,"TERMINAL",0,0,"9",,terminal_output +1263,784235,"TERMINAL",0,0,"41",,terminal_output +1264,785244,"TERMINAL",0,0,"2",,terminal_output +1265,786288,"TERMINAL",0,0,"3",,terminal_output +1266,787409,"TERMINAL",0,0,"4",,terminal_output +1267,788434,"TERMINAL",0,0,"5",,terminal_output +1268,789408,"TERMINAL",0,0,"6",,terminal_output +1269,790481,"TERMINAL",0,0,"7",,terminal_output +1270,791608,"TERMINAL",0,0,"8",,terminal_output +1271,792582,"TERMINAL",0,0,"9",,terminal_output +1272,793656,"TERMINAL",0,0,"50",,terminal_output +1273,794699,"TERMINAL",0,0,"1",,terminal_output +1274,795706,"TERMINAL",0,0,"2",,terminal_output +1275,796730,"TERMINAL",0,0,"3",,terminal_output +1276,797760,"TERMINAL",0,0,"4",,terminal_output +1277,798805,"TERMINAL",0,0,"5",,terminal_output +1278,799846,"TERMINAL",0,0,"6",,terminal_output +1279,800927,"TERMINAL",0,0,"7",,terminal_output +1280,801950,"TERMINAL",0,0,"8",,terminal_output +1281,803140,"TERMINAL",0,0,"9",,terminal_output +1282,804204,"TERMINAL",0,0,"9:01",,terminal_output +1283,805225,"TERMINAL",0,0,"2",,terminal_output +1284,805895,"genie.py",4275,0,"",python,selection_mouse +1285,806375,"genie.py",4222,0,"",python,selection_mouse +1286,806391,"TERMINAL",0,0,"3",,terminal_output +1287,807378,"TERMINAL",0,0,"4",,terminal_output +1288,808402,"TERMINAL",0,0,"5",,terminal_output +1289,809426,"TERMINAL",0,0,"6",,terminal_output +1290,810553,"TERMINAL",0,0,"7",,terminal_output +1291,811577,"TERMINAL",0,0,"8",,terminal_output +1292,812601,"TERMINAL",0,0,"9",,terminal_output +1293,813625,"TERMINAL",0,0,"10",,terminal_output +1294,814664,"TERMINAL",0,0,"16",,terminal_output +1295,815774,"TERMINAL",0,0,"2",,terminal_output +1296,816014,"genie.py",4415,0,"",python,selection_mouse +1297,816800,"TERMINAL",0,0,"3",,terminal_output +1298,817405,"genie.py",4415,0," ",python,content +1299,817592,"genie.py",4419,0," ",python,content +1300,817822,"TERMINAL",0,0,"4",,terminal_output +1301,818843,"TERMINAL",0,0,"5",,terminal_output +1302,819882,"TERMINAL",0,0,"6",,terminal_output +1303,820929,"TERMINAL",0,0,"7",,terminal_output +1304,822020,"TERMINAL",0,0,"8",,terminal_output +1305,822122,"genie.py",4423,0,"j",python,content +1306,822123,"genie.py",4424,0,"",python,selection_keyboard +1307,823016,"TERMINAL",0,0,"9",,terminal_output +1308,823063,"genie.py",4424,0,"a",python,content +1309,823064,"genie.py",4425,0,"",python,selection_keyboard +1310,824069,"TERMINAL",0,0,"20",,terminal_output +1311,824421,"genie.py",4424,1,"",python,content +1312,824610,"genie.py",4423,1,"",python,content +1313,824849,"genie.py",4422,0,"",python,selection_command +1314,825106,"TERMINAL",0,0,"1",,terminal_output +1315,826197,"TERMINAL",0,0,"3",,terminal_output +1316,826678,"genie.py",4479,0,"",python,selection_mouse +1317,827230,"TERMINAL",0,0,"4",,terminal_output +1318,827806,"genie.py",4479,0,"_",python,content +1319,827807,"genie.py",4480,0,"",python,selection_keyboard +1320,828053,"genie.py",4480,0,"o",python,content +1321,828054,"genie.py",4481,0,"",python,selection_keyboard +1322,828187,"genie.py",4481,0,"l",python,content +1323,828188,"genie.py",4482,0,"",python,selection_keyboard +1324,828244,"TERMINAL",0,0,"5",,terminal_output +1325,828320,"genie.py",4482,0,"d",python,content +1326,828321,"genie.py",4483,0,"",python,selection_keyboard +1327,828561,"genie.py",4482,0,"",python,selection_command +1328,828766,"genie.py",4445,0,"",python,selection_command +1329,829063,"genie.py",4424,0,"",python,selection_command +1330,829246,"genie.py",4422,0,"",python,selection_command +1331,829309,"TERMINAL",0,0,"6",,terminal_output +1332,829534,"genie.py",4397,0,"",python,selection_command +1333,829766,"genie.py",4414,0,"\n ",python,content +1334,830218,"genie.py",4423,0,"d",python,content +1335,830219,"genie.py",4424,0,"",python,selection_keyboard +1336,830319,"TERMINAL",0,0,"7",,terminal_output +1337,830576,"genie.py",4423,1,"",python,content +1338,830938,"genie.py",4423,0,"j",python,content +1339,830939,"genie.py",4424,0,"",python,selection_keyboard +1340,831022,"genie.py",4424,0,"a",python,content +1341,831024,"genie.py",4425,0,"",python,selection_keyboard +1342,831188,"genie.py",4425,0,"x",python,content +1343,831190,"genie.py",4426,0,"",python,selection_keyboard +1344,831297,"genie.py",4426,0,".",python,content +1345,831298,"genie.py",4427,0,"",python,selection_keyboard +1346,831376,"TERMINAL",0,0,"8",,terminal_output +1347,831475,"genie.py",4427,0,"d",python,content +1348,831477,"genie.py",4428,0,"",python,selection_keyboard +1349,831636,"genie.py",4428,0,"e",python,content +1350,831637,"genie.py",4429,0,"",python,selection_keyboard +1351,831715,"genie.py",4429,0,"b",python,content +1352,831716,"genie.py",4430,0,"",python,selection_keyboard +1353,831860,"genie.py",4430,0,"u",python,content +1354,831860,"genie.py",4431,0,"",python,selection_keyboard +1355,831928,"genie.py",4431,0,"g",python,content +1356,831929,"genie.py",4432,0,"",python,selection_keyboard +1357,832079,"genie.py",4432,0,".",python,content +1358,832079,"genie.py",4433,0,"",python,selection_keyboard +1359,832324,"genie.py",4433,0,"b",python,content +1360,832326,"genie.py",4434,0,"",python,selection_keyboard +1361,832392,"genie.py",4434,0,"r",python,content +1362,832393,"genie.py",4435,0,"",python,selection_keyboard +1363,832488,"TERMINAL",0,0,"9",,terminal_output +1364,832569,"genie.py",4435,0,"e",python,content +1365,832570,"genie.py",4436,0,"",python,selection_keyboard +1366,832695,"genie.py",4436,0,"a",python,content +1367,832698,"genie.py",4437,0,"",python,selection_keyboard +1368,832780,"genie.py",4437,0,"k",python,content +1369,832782,"genie.py",4438,0,"",python,selection_keyboard +1370,833122,"genie.py",4433,5,"breakpoint",python,content +1371,833461,"TERMINAL",0,0,"30",,terminal_output +1372,833902,"genie.py",4443,0,"()",python,content +1373,833903,"genie.py",4444,0,"",python,selection_keyboard +1374,833973,"genie.py",4444,1,")",python,content +1375,833973,"genie.py",4445,0,"",python,selection_keyboard +1376,834046,"genie.py",4444,0,"",python,selection_command +1377,834492,"TERMINAL",0,0,"1",,terminal_output +1378,835542,"TERMINAL",0,0,"2",,terminal_output +1379,836632,"TERMINAL",0,0,"\rmask.shape",,terminal_output +1380,836633,"TERMINAL",0,0,"3",,terminal_output +1381,837599,"TERMINAL",0,0,"\rpad",,terminal_output +1382,837617,"TERMINAL",0,0,"4",,terminal_output +1383,837956,"TERMINAL",0,0,"\rS",,terminal_output +1384,838398,"TERMINAL",0,0,"\rT",,terminal_output +1385,838714,"TERMINAL",0,0,"58",,terminal_output +1386,838777,"TERMINAL",0,0,"\r",,terminal_output +1387,839146,"TERMINAL",0,0,"\r",,terminal_output +1388,839543,"TERMINAL",0,0,"\r",,terminal_output +1389,839686,"TERMINAL",0,0,"6",,terminal_output +1390,839749,"TERMINAL",0,0,"\r",,terminal_output +1391,839950,"TERMINAL",0,0,"\r",,terminal_output +1392,840603,"TERMINAL",0,0,"",,terminal_output +1393,840725,"TERMINAL",0,0,"7",,terminal_output +1394,840853,"TERMINAL",0,0,"\rS",,terminal_output +1395,841300,"TERMINAL",0,0,"\rpad.shape",,terminal_output +1396,841470,"TERMINAL",0,0,"\r[1@mask",,terminal_output +1397,841620,"TERMINAL",0,0,"\r",,terminal_output +1398,841765,"TERMINAL",0,0,"8",,terminal_output +1399,841786,"TERMINAL",0,0,"\r",,terminal_output +1400,841939,"TERMINAL",0,0,"--KeyboardInterrupt--\r\nEntering jdb:\r\n(jdb) ",,terminal_output +1401,842381,"TERMINAL",0,0,"^DERROR:2025-07-03 16:29:39,241:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\n",,terminal_output +1402,842804,"TERMINAL",0,0,"9",,terminal_output +1403,843715,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +1404,843849,"TERMINAL",0,0,"40",,terminal_output +1405,844350,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +1406,844614,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1407,844729,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +1408,844890,"TERMINAL",0,0,"19",,terminal_output +1409,845917,"TERMINAL",0,0,"2",,terminal_output +1410,847007,"TERMINAL",0,0,"3",,terminal_output +1411,847623,"TERMINAL",0,0,"2025-07-03 16:29:44.418309: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1412,847929,"genie.py",0,0,"",python,tab +1413,847930,"genie.py",4117,0,"",python,selection_mouse +1414,847989,"genie.py",4116,0,"",python,selection_command +1415,848113,"TERMINAL",0,0,"4",,terminal_output +1416,849058,"TERMINAL",0,0,"5",,terminal_output +1417,850284,"TERMINAL",0,0,"6",,terminal_output +1418,851206,"TERMINAL",0,0,"7",,terminal_output +1419,851894,"TERMINAL",0,0,"2025-07-03 16:29:48.794557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1420,852230,"TERMINAL",0,0,"9",,terminal_output +1421,853253,"TERMINAL",0,0,"50",,terminal_output +1422,854277,"TERMINAL",0,0,"1",,terminal_output +1423,855301,"TERMINAL",0,0,"2",,terminal_output +1424,856326,"TERMINAL",0,0,"3",,terminal_output +1425,857380,"TERMINAL",0,0,"4",,terminal_output +1426,858420,"TERMINAL",0,0,"5",,terminal_output +1427,859501,"TERMINAL",0,0,"6",,terminal_output +1428,860526,"TERMINAL",0,0,"7",,terminal_output +1429,861203,"TERMINAL",0,0,"2025-07-03 16:29:58.087460: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1430,861545,"TERMINAL",0,0,"8",,terminal_output +1431,862793,"TERMINAL",0,0,"9",,terminal_output +1432,863734,"TERMINAL",0,0,"30:00",,terminal_output +1433,864723,"TERMINAL",0,0,"1",,terminal_output +1434,865699,"TERMINAL",0,0,"2",,terminal_output +1435,866734,"TERMINAL",0,0,"310",,terminal_output +1436,867795,"TERMINAL",0,0,"426",,terminal_output +1437,868820,"TERMINAL",0,0,"5",,terminal_output +1438,869331,"TERMINAL",0,0,"2025-07-03 16:30:06.183260: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1439,869852,"TERMINAL",0,0,"6",,terminal_output +1440,870892,"TERMINAL",0,0,"7",,terminal_output +1441,871993,"TERMINAL",0,0,"8",,terminal_output +1442,872976,"TERMINAL",0,0,"9",,terminal_output +1443,874045,"TERMINAL",0,0,"10",,terminal_output +1444,875167,"TERMINAL",0,0,"1",,terminal_output +1445,876010,"TERMINAL",0,0,"2025-07-03 16:30:12.912208: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1446,876115,"TERMINAL",0,0,"2",,terminal_output +1447,877157,"TERMINAL",0,0,"4",,terminal_output +1448,878240,"TERMINAL",0,0,"5",,terminal_output +1449,879236,"TERMINAL",0,0,"6",,terminal_output +1450,880288,"TERMINAL",0,0,"7",,terminal_output +1451,881361,"TERMINAL",0,0,"8",,terminal_output +1452,882439,"TERMINAL",0,0,"9",,terminal_output +1453,882872,"TERMINAL",0,0,"2025-07-03 16:30:19.748548: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1454,883463,"TERMINAL",0,0,"20",,terminal_output +1455,884487,"TERMINAL",0,0,"1",,terminal_output +1456,885479,"TERMINAL",0,0,"2",,terminal_output +1457,886228,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1458,886534,"TERMINAL",0,0,"3",,terminal_output +1459,887662,"TERMINAL",0,0,"4",,terminal_output +1460,888598,"TERMINAL",0,0,"5",,terminal_output +1461,889708,"TERMINAL",0,0,"6",,terminal_output +1462,890669,"TERMINAL",0,0,"7",,terminal_output +1463,891819,"TERMINAL",0,0,"8",,terminal_output +1464,892785,"TERMINAL",0,0,"9",,terminal_output +1465,893805,"TERMINAL",0,0,"30",,terminal_output +1466,894624,"TERMINAL",0,0,"2025-07-03 16:30:31.509191: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1467,894830,"TERMINAL",0,0,"1",,terminal_output +1468,895870,"TERMINAL",0,0,"2",,terminal_output +1469,896905,"TERMINAL",0,0,"3",,terminal_output +1470,897799,"TERMINAL",0,0,"2025-07-03 16:30:34.617611: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1471,897956,"TERMINAL",0,0,"4",,terminal_output +1472,899027,"TERMINAL",0,0,"5",,terminal_output +1473,900070,"TERMINAL",0,0,"6",,terminal_output +1474,901090,"TERMINAL",0,0,"7",,terminal_output +1475,902243,"TERMINAL",0,0,"8",,terminal_output +1476,903163,"TERMINAL",0,0,"40",,terminal_output +1477,904204,"TERMINAL",0,0,"1",,terminal_output +1478,905290,"TERMINAL",0,0,"2",,terminal_output +1479,906056,"TERMINAL",0,0,"2025-07-03 16:30:42.955375: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1480,906288,"TERMINAL",0,0,"3",,terminal_output +1481,907429,"TERMINAL",0,0,"47",,terminal_output +1482,907757,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +1483,907864,"TERMINAL",0,0,"new_frame_idxs.shape: (1, 920)\r\n",,terminal_output +1484,908036,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +1485,908247,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +1486,908374,"TERMINAL",0,0,"5",,terminal_output +1487,909514,"TERMINAL",0,0,"6",,terminal_output +1488,910497,"TERMINAL",0,0,"7",,terminal_output +1489,911496,"TERMINAL",0,0,"8",,terminal_output +1490,912545,"TERMINAL",0,0,"9",,terminal_output +1491,913671,"TERMINAL",0,0,"50",,terminal_output +1492,914695,"TERMINAL",0,0,"1",,terminal_output +1493,915651,"TERMINAL",0,0,"2",,terminal_output +1494,916742,"TERMINAL",0,0,"3",,terminal_output +1495,917770,"TERMINAL",0,0,"4",,terminal_output +1496,918791,"TERMINAL",0,0,"5",,terminal_output +1497,919883,"TERMINAL",0,0,"6",,terminal_output +1498,920563,"TERMINAL",0,0,"i",,terminal_output +1499,920765,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1500,920923,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1501,920924,"TERMINAL",0,0,"7",,terminal_output +1502,920987,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1503,921756,"TERMINAL",0,0,"[?25li\r[?25h",,terminal_output +1504,921869,"TERMINAL",0,0,"[?25ln\r[?25h",,terminal_output +1505,921927,"TERMINAL",0,0,"8",,terminal_output +1506,921948,"TERMINAL",0,0,"[?25li\r[?25h",,terminal_output +1507,922247,"TERMINAL",0,0,"[?25ln[?25h[?25li[?25h",,terminal_output +1508,922448,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1509,922907,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1510,922999,"TERMINAL",0,0,"9",,terminal_output +1511,923159,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1512,923228,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1513,923356,"TERMINAL",0,0,"[?25ls[?25h[?25lk[?25h",,terminal_output +1514,923622,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1515,923685,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1516,923869,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1517,923927,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1518,923989,"TERMINAL",0,0,"1:00",,terminal_output +1519,924034,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1520,924105,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1521,924168,"TERMINAL",0,0,"\r\n(1, 6, 1)\r\n(jdb) ",,terminal_output +1522,925037,"TERMINAL",0,0,"1",,terminal_output +1523,926069,"TERMINAL",0,0,"2",,terminal_output +1524,927122,"TERMINAL",0,0,"3",,terminal_output +1525,927225,"TERMINAL",0,0,"t",,terminal_output +1526,927333,"TERMINAL",0,0,"[?25lok[?25h",,terminal_output +1527,927468,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1528,927528,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1529,927959,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1530,928145,"TERMINAL",0,0,"5",,terminal_output +1531,928360,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1532,928697,"TERMINAL",0,0,"[?25ld\r[?25h",,terminal_output +1533,928878,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1534,928939,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1535,929196,"TERMINAL",0,0,"6",,terminal_output +1536,929217,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +1537,929616,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1538,929927,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1539,930226,"TERMINAL",0,0,"7",,terminal_output +1540,930334,"TERMINAL",0,0,"[?25l_\r[?25h",,terminal_output +1541,930440,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1542,930777,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +1543,931015,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1544,931068,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1545,931131,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1546,931260,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +1547,931284,"TERMINAL",0,0,"8",,terminal_output +1548,932307,"TERMINAL",0,0,"9",,terminal_output +1549,933436,"TERMINAL",0,0,"10",,terminal_output +1550,935073,"TERMINAL",0,0,"1 3",,terminal_output +1551,936097,"TERMINAL",0,0,"2",,terminal_output +1552,937120,"TERMINAL",0,0,"3",,terminal_output +1553,938108,"TERMINAL",0,0,"4",,terminal_output +1554,939170,"TERMINAL",0,0,"6",,terminal_output +1555,939290,"TERMINAL",0,0,"i",,terminal_output +1556,939354,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1557,939502,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1558,939569,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1559,939784,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1560,940092,"TERMINAL",0,0,"[?25lm[?25h[?25la[?25h",,terminal_output +1561,940233,"TERMINAL",0,0,"7",,terminal_output +1562,940233,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1563,940296,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +1564,940737,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1565,941010,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1566,941193,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +1567,941255,"TERMINAL",0,0,"8",,terminal_output +1568,941296,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +1569,941374,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +1570,941603,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1571,941710,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +1572,941861,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1573,941927,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1574,942062,"TERMINAL",0,0,"[?25le[?25h\r\n*** NameError: name 'init_mask_old' is not defined\r\n(jdb) ",,terminal_output +1575,942343,"TERMINAL",0,0,"9",,terminal_output +1576,942727,"TERMINAL",0,0,"\rinit_mask_old.sjape",,terminal_output +1577,943317,"TERMINAL",0,0,"20",,terminal_output +1578,943694,"TERMINAL",0,0,"[?25lj\rape[?25h",,terminal_output +1579,944120,"TERMINAL",0,0,"\rhape",,terminal_output +1580,944276,"TERMINAL",0,0,"\r\n*** NameError: name 'init_mask_old' is not defined\r\n(jdb) ",,terminal_output +1581,944381,"TERMINAL",0,0,"1",,terminal_output +1582,945421,"TERMINAL",0,0,"2",,terminal_output +1583,946434,"TERMINAL",0,0,"3",,terminal_output +1584,947565,"TERMINAL",0,0,"4",,terminal_output +1585,948590,"TERMINAL",0,0,"5",,terminal_output +1586,949614,"TERMINAL",0,0,"6",,terminal_output +1587,950370,"TERMINAL",0,0,"n",,terminal_output +1588,950576,"TERMINAL",0,0,"\r\n*** NameError: name 'n' is not defined\r\n(jdb) ",,terminal_output +1589,950609,"TERMINAL",0,0,"7",,terminal_output +1590,951193,"TERMINAL",0,0,"c",,terminal_output +1591,951546,"TERMINAL",0,0,"\r\n",,terminal_output +1592,951607,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +1593,951703,"TERMINAL",0,0,"8",,terminal_output +1594,952573,"TERMINAL",0,0,"\rc",,terminal_output +1595,952684,"TERMINAL",0,0,"9",,terminal_output +1596,952710,"TERMINAL",0,0,"\rn",,terminal_output +1597,953130,"TERMINAL",0,0,"\rinit_mask_old.shape",,terminal_output +1598,953726,"TERMINAL",0,0,"\r\n(1, 920)\r\n(jdb) ",,terminal_output +1599,953747,"TERMINAL",0,0,"30",,terminal_output +1600,954836,"TERMINAL",0,0,"1",,terminal_output +1601,955801,"TERMINAL",0,0,"2",,terminal_output +1602,956849,"TERMINAL",0,0,"3",,terminal_output +1603,956912,"TERMINAL",0,0,"\rinit_mask_old.shape",,terminal_output +1604,957170,"TERMINAL",0,0,"\rc",,terminal_output +1605,957398,"TERMINAL",0,0,"\rn",,terminal_output +1606,957895,"TERMINAL",0,0,"4",,terminal_output +1607,957930,"TERMINAL",0,0,"\rinit_mask_old.shape",,terminal_output +1608,958695,"TERMINAL",0,0,"\rjape",,terminal_output +1609,958923,"TERMINAL",0,0,"54",,terminal_output +1610,959173,"TERMINAL",0,0,"\rtoken_idxs.shape",,terminal_output +1611,960058,"TERMINAL",0,0,"6",,terminal_output +1612,960121,"TERMINAL",0,0,"\rinit_mask",,terminal_output +1613,960693,"TERMINAL",0,0,"\r\n(1, 6, 1)\r\n(jdb) ",,terminal_output +1614,961083,"TERMINAL",0,0,"7",,terminal_output +1615,962107,"TERMINAL",0,0,"8",,terminal_output +1616,962698,"genie.py",0,0,"",python,tab +1617,962699,"genie.py",4445,0,"",python,selection_mouse +1618,962742,"genie.py",4444,0,"",python,selection_command +1619,963149,"TERMINAL",0,0,"9",,terminal_output +1620,964153,"TERMINAL",0,0,"40",,terminal_output +1621,965180,"TERMINAL",0,0,"2",,terminal_output +1622,966201,"TERMINAL",0,0,"3",,terminal_output +1623,967328,"TERMINAL",0,0,"4",,terminal_output +1624,968352,"TERMINAL",0,0,"5",,terminal_output +1625,969314,"TERMINAL",0,0,"6",,terminal_output +1626,970402,"TERMINAL",0,0,"75",,terminal_output +1627,971402,"TERMINAL",0,0,"8",,terminal_output +1628,972448,"TERMINAL",0,0,"9",,terminal_output +1629,973574,"TERMINAL",0,0,"50",,terminal_output +1630,974599,"TERMINAL",0,0,"1",,terminal_output +1631,975623,"TERMINAL",0,0,"2",,terminal_output +1632,976647,"TERMINAL",0,0,"3",,terminal_output +1633,977672,"TERMINAL",0,0,"4",,terminal_output +1634,978799,"TERMINAL",0,0,"5",,terminal_output +1635,979826,"TERMINAL",0,0,"6",,terminal_output +1636,980848,"TERMINAL",0,0,"7",,terminal_output +1637,981872,"TERMINAL",0,0,"8",,terminal_output +1638,982858,"TERMINAL",0,0,"9",,terminal_output +1639,983900,"TERMINAL",0,0,"2:00",,terminal_output +1640,984931,"TERMINAL",0,0,"1",,terminal_output +1641,985975,"TERMINAL",0,0,"2",,terminal_output +1642,987012,"TERMINAL",0,0,"3",,terminal_output +1643,987689,"genie.py",4445,0,"",python,selection_mouse +1644,987704,"genie.py",4444,0,"",python,selection_command +1645,988055,"TERMINAL",0,0,"4",,terminal_output +1646,988360,"genie.py",4414,0,"",python,selection_mouse +1647,988371,"genie.py",4413,0,"",python,selection_command +1648,989139,"TERMINAL",0,0,"5",,terminal_output +1649,989502,"genie.py",4414,0,"",python,selection_mouse +1650,989516,"genie.py",4413,0,"",python,selection_command +1651,990121,"genie.py",4445,0,"",python,selection_mouse +1652,990132,"genie.py",4444,0,"",python,selection_command +1653,990145,"TERMINAL",0,0,"7",,terminal_output +1654,991290,"TERMINAL",0,0,"8",,terminal_output +1655,992223,"TERMINAL",0,0,"9",,terminal_output +1656,993339,"TERMINAL",0,0,"10",,terminal_output +1657,994361,"TERMINAL",0,0,"1",,terminal_output +1658,995386,"TERMINAL",0,0,"2",,terminal_output +1659,996390,"TERMINAL",0,0,"3",,terminal_output +1660,997539,"TERMINAL",0,0,"4",,terminal_output +1661,998334,"genie.py",4445,0,"",python,selection_mouse +1662,998346,"genie.py",4444,0,"",python,selection_command +1663,998474,"TERMINAL",0,0,"5",,terminal_output +1664,998990,"genie.py",4332,0,"",python,selection_mouse +1665,999561,"genie.py",4445,0,"",python,selection_mouse +1666,999593,"genie.py",4444,0,"",python,selection_command +1667,999603,"TERMINAL",0,0,"6",,terminal_output +1668,1000370,"genie.py",4445,0,"",python,selection_mouse +1669,1000372,"genie.py",4444,0,"",python,selection_command +1670,1000554,"TERMINAL",0,0,"7",,terminal_output +1671,1000923,"genie.py",4445,0,"",python,selection_mouse +1672,1000935,"genie.py",4444,0,"",python,selection_command +1673,1001624,"genie.py",4414,0,"",python,selection_mouse +1674,1001637,"genie.py",4413,0,"",python,selection_command +1675,1001638,"TERMINAL",0,0,"8",,terminal_output +1676,1002657,"TERMINAL",0,0,"9",,terminal_output +1677,1002833,"genie.py",4445,0,"",python,selection_mouse +1678,1002847,"genie.py",4444,0,"",python,selection_command +1679,1003691,"TERMINAL",0,0,"20",,terminal_output +1680,1004807,"TERMINAL",0,0,"1",,terminal_output +1681,1005832,"TERMINAL",0,0,"2",,terminal_output +1682,1006891,"TERMINAL",0,0,"34",,terminal_output +1683,1006908,"genie.py",4343,0,"",python,selection_mouse +1684,1007872,"genie.py",4376,0,"",python,selection_mouse +1685,1007873,"genie.py",4375,0,"",python,selection_command +1686,1007939,"TERMINAL",0,0,"4",,terminal_output +1687,1008459,"genie.py",4454,0,"",python,selection_mouse +1688,1008461,"genie.py",4453,0,"",python,selection_command +1689,1008632,"genie.py",4453,1," ",python,selection_mouse +1690,1008632,"genie.py",4454,0,"",python,selection_command +1691,1008727,"genie.py",4400,54,"k.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1692,1008728,"genie.py",4257,197,"B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1693,1008755,"genie.py",4191,263," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1694,1008784,"genie.py",4190,264," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1695,1008893,"TERMINAL",0,0,"5",,terminal_output +1696,1009005,"genie.py",4191,263," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1697,1009006,"genie.py",4192,262," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1698,1009006,"genie.py",4194,260," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1699,1009006,"genie.py",4197,257," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)\n jax.debug.breakpoint()\n ",python,selection_mouse +1700,1009358,"genie.py",4197,0,"",python,selection_mouse +1701,1009931,"TERMINAL",0,0,"6",,terminal_output +1702,1010005,"genie.py",4414,0,"",python,selection_mouse +1703,1010018,"genie.py",4413,0,"",python,selection_command +1704,1010238,"genie.py",4413,1,")",python,selection_mouse +1705,1010239,"genie.py",4413,0,"",python,selection_mouse +1706,1010239,"genie.py",4300,113,"np.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1707,1010239,"genie.py",4193,220," # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1708,1010239,"genie.py",4118,295," # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1709,1010243,"genie.py",4414,0,"",python,selection_command +1710,1010661,"genie.py",4118,296," # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1711,1010695,"genie.py",4056,358," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1712,1010758,"genie.py",4055,359,"\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1713,1010963,"TERMINAL",0,0,"7",,terminal_output +1714,1011055,"genie.py",4056,358," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1715,1012077,"TERMINAL",0,0,"8",,terminal_output +1716,1013051,"TERMINAL",0,0,"9",,terminal_output +1717,1014126,"TERMINAL",0,0,"30",,terminal_output +1718,1015128,"TERMINAL",0,0,"1",,terminal_output +1719,1016173,"TERMINAL",0,0,"3",,terminal_output +1720,1017300,"TERMINAL",0,0,"4",,terminal_output +1721,1018249,"TERMINAL",0,0,"55",,terminal_output +1722,1019290,"TERMINAL",0,0,"6",,terminal_output +1723,1020330,"TERMINAL",0,0,"7",,terminal_output +1724,1021382,"TERMINAL",0,0,"8",,terminal_output +1725,1022412,"TERMINAL",0,0,"9",,terminal_output +1726,1023470,"TERMINAL",0,0,"40",,terminal_output +1727,1024551,"TERMINAL",0,0,"1",,terminal_output +1728,1025688,"TERMINAL",0,0,"2",,terminal_output +1729,1026570,"TERMINAL",0,0,"36",,terminal_output +1730,1027611,"TERMINAL",0,0,"4",,terminal_output +1731,1028676,"TERMINAL",0,0,"5",,terminal_output +1732,1029793,"TERMINAL",0,0,"6",,terminal_output +1733,1030746,"TERMINAL",0,0,"7",,terminal_output +1734,1031763,"TERMINAL",0,0,"8",,terminal_output +1735,1032866,"TERMINAL",0,0,"9",,terminal_output +1736,1033843,"TERMINAL",0,0,"50",,terminal_output +1737,1034911,"TERMINAL",0,0,"1",,terminal_output +1738,1035929,"TERMINAL",0,0,"2",,terminal_output +1739,1037063,"TERMINAL",0,0,"3",,terminal_output +1740,1038009,"TERMINAL",0,0,"4",,terminal_output +1741,1039113,"TERMINAL",0,0,"5",,terminal_output +1742,1040094,"TERMINAL",0,0,"6",,terminal_output +1743,1041133,"TERMINAL",0,0,"7",,terminal_output +1744,1042249,"TERMINAL",0,0,"9",,terminal_output +1745,1043217,"TERMINAL",0,0,"3:00",,terminal_output +1746,1044261,"TERMINAL",0,0,"1",,terminal_output +1747,1045302,"TERMINAL",0,0,"2",,terminal_output +1748,1046376,"TERMINAL",0,0,"3",,terminal_output +1749,1047390,"TERMINAL",0,0,"4",,terminal_output +1750,1048427,"TERMINAL",0,0,"5",,terminal_output +1751,1049470,"TERMINAL",0,0,"6",,terminal_output +1752,1050580,"TERMINAL",0,0,"7",,terminal_output +1753,1051557,"TERMINAL",0,0,"8",,terminal_output +1754,1052598,"TERMINAL",0,0,"9",,terminal_output +1755,1053656,"TERMINAL",0,0,"10",,terminal_output +1756,1054684,"TERMINAL",0,0,"1",,terminal_output +1757,1055809,"TERMINAL",0,0,"23",,terminal_output +1758,1056826,"TERMINAL",0,0,"3",,terminal_output +1759,1057860,"TERMINAL",0,0,"4",,terminal_output +1760,1058985,"TERMINAL",0,0,"54",,terminal_output +1761,1059913,"TERMINAL",0,0,"6",,terminal_output +1762,1060957,"TERMINAL",0,0,"7",,terminal_output +1763,1062055,"TERMINAL",0,0,"8",,terminal_output +1764,1063038,"TERMINAL",0,0,"9",,terminal_output +1765,1064074,"TERMINAL",0,0,"20",,terminal_output +1766,1065114,"TERMINAL",0,0,"1",,terminal_output +1767,1066156,"TERMINAL",0,0,"3",,terminal_output +1768,1067193,"TERMINAL",0,0,"4",,terminal_output +1769,1068258,"TERMINAL",0,0,"5",,terminal_output +1770,1069323,"TERMINAL",0,0,"6",,terminal_output +1771,1070343,"TERMINAL",0,0,"7",,terminal_output +1772,1071367,"TERMINAL",0,0,"8",,terminal_output +1773,1072406,"TERMINAL",0,0,"9",,terminal_output +1774,1073441,"TERMINAL",0,0,"30",,terminal_output +1775,1074541,"TERMINAL",0,0,"1",,terminal_output +1776,1075567,"TERMINAL",0,0,"2",,terminal_output +1777,1076590,"TERMINAL",0,0,"3",,terminal_output +1778,1077611,"TERMINAL",0,0,"45",,terminal_output +1779,1078650,"TERMINAL",0,0,"5",,terminal_output +1780,1079694,"TERMINAL",0,0,"6",,terminal_output +1781,1080741,"TERMINAL",0,0,"7",,terminal_output +1782,1081813,"TERMINAL",0,0,"8",,terminal_output +1783,1082845,"TERMINAL",0,0,"9",,terminal_output +1784,1083860,"TERMINAL",0,0,"40",,terminal_output +1785,1085002,"TERMINAL",0,0,"1",,terminal_output +1786,1085940,"TERMINAL",0,0,"2",,terminal_output +1787,1087002,"TERMINAL",0,0,"3",,terminal_output +1788,1088016,"TERMINAL",0,0,"4",,terminal_output +1789,1089056,"TERMINAL",0,0,"5",,terminal_output +1790,1090098,"TERMINAL",0,0,"6",,terminal_output +1791,1091141,"TERMINAL",0,0,"8",,terminal_output +1792,1092179,"TERMINAL",0,0,"9",,terminal_output +1793,1093231,"TERMINAL",0,0,"50",,terminal_output +1794,1094308,"TERMINAL",0,0,"1",,terminal_output +1795,1095330,"TERMINAL",0,0,"2",,terminal_output +1796,1096362,"TERMINAL",0,0,"3",,terminal_output +1797,1097377,"TERMINAL",0,0,"4",,terminal_output +1798,1098507,"TERMINAL",0,0,"5",,terminal_output +1799,1099528,"TERMINAL",0,0,"6",,terminal_output +1800,1100555,"TERMINAL",0,0,"7",,terminal_output +1801,1101531,"TERMINAL",0,0,"8",,terminal_output +1802,1102600,"TERMINAL",0,0,"9",,terminal_output +1803,1103600,"TERMINAL",0,0,"4:00",,terminal_output +1804,1104648,"TERMINAL",0,0,"1",,terminal_output +1805,1105667,"TERMINAL",0,0,"2",,terminal_output +1806,1106714,"TERMINAL",0,0,"3",,terminal_output +1807,1107776,"TERMINAL",0,0,"4",,terminal_output +1808,1108848,"TERMINAL",0,0,"5",,terminal_output +1809,1109823,"TERMINAL",0,0,"6",,terminal_output +1810,1110880,"TERMINAL",0,0,"7",,terminal_output +1811,1111923,"TERMINAL",0,0,"8",,terminal_output +1812,1112930,"TERMINAL",0,0,"9",,terminal_output +1813,1114071,"TERMINAL",0,0,"10",,terminal_output +1814,1115098,"TERMINAL",0,0,"1",,terminal_output +1815,1116046,"TERMINAL",0,0,"2",,terminal_output +1816,1117086,"TERMINAL",0,0,"3",,terminal_output +1817,1118135,"TERMINAL",0,0,"4",,terminal_output +1818,1119176,"TERMINAL",0,0,"6",,terminal_output +1819,1120215,"TERMINAL",0,0,"7",,terminal_output +1820,1121263,"TERMINAL",0,0,"8",,terminal_output +1821,1122290,"TERMINAL",0,0,"9",,terminal_output +1822,1123399,"TERMINAL",0,0,"20",,terminal_output +1823,1124412,"TERMINAL",0,0,"1",,terminal_output +1824,1125442,"TERMINAL",0,0,"2",,terminal_output +1825,1126460,"TERMINAL",0,0,"3",,terminal_output +1826,1127507,"TERMINAL",0,0,"46",,terminal_output +1827,1128542,"TERMINAL",0,0,"5",,terminal_output +1828,1129580,"TERMINAL",0,0,"6",,terminal_output +1829,1130619,"TERMINAL",0,0,"7",,terminal_output +1830,1131724,"TERMINAL",0,0,"8",,terminal_output +1831,1132700,"TERMINAL",0,0,"9",,terminal_output +1832,1133834,"TERMINAL",0,0,"30",,terminal_output +1833,1134857,"TERMINAL",0,0,"1",,terminal_output +1834,1135880,"TERMINAL",0,0,"2",,terminal_output +1835,1136860,"TERMINAL",0,0,"3",,terminal_output +1836,1137929,"TERMINAL",0,0,"4",,terminal_output +1837,1138953,"TERMINAL",0,0,"5",,terminal_output +1838,1139977,"TERMINAL",0,0,"6",,terminal_output +1839,1141103,"TERMINAL",0,0,"7",,terminal_output +1840,1142128,"TERMINAL",0,0,"8",,terminal_output +1841,1143094,"TERMINAL",0,0,"9",,terminal_output +1842,1144135,"TERMINAL",0,0,"40",,terminal_output +1843,1145177,"TERMINAL",0,0,"2",,terminal_output +1844,1146223,"TERMINAL",0,0,"3",,terminal_output +1845,1147350,"TERMINAL",0,0,"4",,terminal_output +1846,1148284,"TERMINAL",0,0,"5",,terminal_output +1847,1149397,"TERMINAL",0,0,"6",,terminal_output +1848,1150358,"TERMINAL",0,0,"7",,terminal_output +1849,1151446,"TERMINAL",0,0,"8",,terminal_output +1850,1152472,"TERMINAL",0,0,"9",,terminal_output +1851,1153470,"TERMINAL",0,0,"50",,terminal_output +1852,1154513,"TERMINAL",0,0,"1",,terminal_output +1853,1155581,"TERMINAL",0,0,"2",,terminal_output +1854,1156592,"TERMINAL",0,0,"3",,terminal_output +1855,1157698,"TERMINAL",0,0,"4",,terminal_output +1856,1158717,"TERMINAL",0,0,"5",,terminal_output +1857,1159711,"TERMINAL",0,0,"6",,terminal_output +1858,1160765,"TERMINAL",0,0,"7",,terminal_output +1859,1161814,"TERMINAL",0,0,"8",,terminal_output +1860,1162836,"TERMINAL",0,0,"9",,terminal_output +1861,1163872,"TERMINAL",0,0,"5:00",,terminal_output +1862,1164999,"TERMINAL",0,0,"1",,terminal_output +1863,1165074,"genie.py",0,0,"",python,tab +1864,1165075,"genie.py",4454,0,"",python,selection_mouse +1865,1165179,"genie.py",4453,0,"",python,selection_command +1866,1166003,"TERMINAL",0,0,"2",,terminal_output +1867,1166987,"TERMINAL",0,0,"3",,terminal_output +1868,1167115,"genie.py",4414,0,"",python,selection_mouse +1869,1167123,"genie.py",4413,0,"",python,selection_command +1870,1167354,"genie.py",4413,32,")\n jax.debug.breakpoint()",python,selection_mouse +1871,1167358,"genie.py",4414,31,"\n jax.debug.breakpoint()",python,selection_command +1872,1167397,"genie.py",4414,40,"\n jax.debug.breakpoint()\n ",python,selection_mouse +1873,1167493,"genie.py",4414,16,"\n jax.deb",python,selection_mouse +1874,1167493,"genie.py",4414,12,"\n jax",python,selection_mouse +1875,1167494,"genie.py",4414,10,"\n j",python,selection_mouse +1876,1167552,"genie.py",4414,9,"\n ",python,selection_mouse +1877,1167613,"genie.py",4414,10,"\n j",python,selection_mouse +1878,1167674,"genie.py",4386,28,"nit_mask = mask.astype(bool)",python,selection_mouse +1879,1167675,"genie.py",4387,27,"it_mask = mask.astype(bool)",python,selection_mouse +1880,1168063,"TERMINAL",0,0,"4",,terminal_output +1881,1168612,"genie.py",4414,0,"",python,selection_mouse +1882,1168647,"genie.py",4413,0,"",python,selection_command +1883,1169013,"genie.py",4414,0,"",python,selection_mouse +1884,1169028,"genie.py",4413,0,"",python,selection_command +1885,1169082,"TERMINAL",0,0,"5",,terminal_output +1886,1169317,"genie.py",4413,1,")",python,selection_mouse +1887,1169318,"genie.py",4413,0,"",python,selection_mouse +1888,1169318,"genie.py",4406,7,"pe(bool",python,selection_mouse +1889,1169319,"genie.py",4305,108,"ange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1890,1169319,"genie.py",4300,113,"np.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1891,1169319,"genie.py",4298,115," jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1892,1169319,"genie.py",4296,117," = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1893,1169320,"genie.py",4295,118,"k = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1894,1169320,"genie.py",4294,119,"sk = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1895,1169361,"genie.py",4293,120,"ask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +1896,1169361,"genie.py",4414,0,"",python,selection_command +1897,1169362,"genie.py",4293,121,"ask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1898,1169413,"genie.py",4292,122,"mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1899,1169442,"genie.py",4257,157,"B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1900,1169484,"genie.py",4256,158," B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1901,1169556,"genie.py",4255,159," B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1902,1169682,"genie.py",4254,160," B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1903,1170106,"TERMINAL",0,0,"6",,terminal_output +1904,1170140,"genie.py",4253,161," B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1905,1170219,"genie.py",4252,162," B, S, N = token_idxs.shape\n mask = jnp.arange(S)[None, :, None] >= T # shape (1, S, 1) broadcasted to (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +1906,1171142,"TERMINAL",0,0,"8",,terminal_output +1907,1172022,"genie.py",4257,0,"",python,selection_command +1908,1172206,"TERMINAL",0,0,"9",,terminal_output +1909,1173346,"TERMINAL",0,0,"10",,terminal_output +1910,1174273,"TERMINAL",0,0,"1",,terminal_output +1911,1175314,"TERMINAL",0,0,"2",,terminal_output +1912,1176399,"TERMINAL",0,0,"33",,terminal_output +1913,1177525,"TERMINAL",0,0,"4",,terminal_output +1914,1178476,"TERMINAL",0,0,"5",,terminal_output +1915,1179489,"TERMINAL",0,0,"6",,terminal_output +1916,1180527,"TERMINAL",0,0,"7",,terminal_output +1917,1181557,"TERMINAL",0,0,"8",,terminal_output +1918,1182492,"genie.py",4249,0,"",python,selection_command +1919,1182600,"TERMINAL",0,0,"9",,terminal_output +1920,1183631,"TERMINAL",0,0,"20",,terminal_output +1921,1184727,"TERMINAL",0,0,"1",,terminal_output +1922,1184815,"genie.py",4284,0," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n",python,content +1923,1185046,"genie.py",4354,0," mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n",python,content +1924,1185202,"genie.py",4424,93,"",python,content +1925,1185740,"TERMINAL",0,0,"2",,terminal_output +1926,1186757,"TERMINAL",0,0,"3",,terminal_output +1927,1187792,"TERMINAL",0,0,"4",,terminal_output +1928,1188881,"TERMINAL",0,0,"5",,terminal_output +1929,1189948,"TERMINAL",0,0,"6",,terminal_output +1930,1190676,"genie.py",4383,0,"",python,selection_mouse +1931,1190913,"TERMINAL",0,0,"7",,terminal_output +1932,1191683,"TERMINAL",0,0,"^DERROR:2025-07-03 16:35:28,533:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\n",,terminal_output +1933,1192000,"TERMINAL",0,0,"8",,terminal_output +1934,1192996,"TERMINAL",0,0,"9",,terminal_output +1935,1193110,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +1936,1193628,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +1937,1193750,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1938,1193867,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +1939,1194041,"TERMINAL",0,0,"30",,terminal_output +1940,1195176,"TERMINAL",0,0,"11",,terminal_output +1941,1196167,"TERMINAL",0,0,"2",,terminal_output +1942,1196739,"TERMINAL",0,0,"2025-07-03 16:35:33.635376: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1943,1197179,"TERMINAL",0,0,"4",,terminal_output +1944,1198229,"TERMINAL",0,0,"57",,terminal_output +1945,1199308,"TERMINAL",0,0,"6",,terminal_output +1946,1200504,"TERMINAL",0,0,"7",,terminal_output +1947,1201271,"TERMINAL",0,0,"2025-07-03 16:35:38.130060: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1948,1201386,"TERMINAL",0,0,"8",,terminal_output +1949,1202394,"TERMINAL",0,0,"9",,terminal_output +1950,1203776,"TERMINAL",0,0,"40",,terminal_output +1951,1204491,"TERMINAL",0,0,"1",,terminal_output +1952,1205632,"TERMINAL",0,0,"2",,terminal_output +1953,1206689,"TERMINAL",0,0,"3",,terminal_output +1954,1207669,"TERMINAL",0,0,"4",,terminal_output +1955,1208689,"TERMINAL",0,0,"5",,terminal_output +1956,1209817,"TERMINAL",0,0,"6",,terminal_output +1957,1210769,"TERMINAL",0,0,"7",,terminal_output +1958,1211248,"TERMINAL",0,0,"2025-07-03 16:35:47.759918: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1959,1211875,"TERMINAL",0,0,"8",,terminal_output +1960,1213010,"TERMINAL",0,0,"9",,terminal_output +1961,1213911,"TERMINAL",0,0,"50",,terminal_output +1962,1214940,"TERMINAL",0,0,"1",,terminal_output +1963,1216061,"TERMINAL",0,0,"2",,terminal_output +1964,1216978,"TERMINAL",0,0,"3",,terminal_output +1965,1218045,"TERMINAL",0,0,"4",,terminal_output +1966,1218742,"TERMINAL",0,0,"2025-07-03 16:35:55.634415: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1967,1219150,"TERMINAL",0,0,"5",,terminal_output +1968,1220162,"TERMINAL",0,0,"6",,terminal_output +1969,1221144,"TERMINAL",0,0,"7",,terminal_output +1970,1222208,"TERMINAL",0,0,"9",,terminal_output +1971,1223343,"TERMINAL",0,0,"6:00",,terminal_output +1972,1224263,"TERMINAL",0,0,"1",,terminal_output +1973,1225300,"TERMINAL",0,0,"2",,terminal_output +1974,1225479,"TERMINAL",0,0,"2025-07-03 16:36:02.381336: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1975,1226357,"TERMINAL",0,0,"3",,terminal_output +1976,1227379,"TERMINAL",0,0,"4",,terminal_output +1977,1228415,"TERMINAL",0,0,"5",,terminal_output +1978,1229461,"TERMINAL",0,0,"6",,terminal_output +1979,1230607,"TERMINAL",0,0,"7",,terminal_output +1980,1231535,"TERMINAL",0,0,"8",,terminal_output +1981,1232290,"TERMINAL",0,0,"2025-07-03 16:36:09.188493: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1982,1232591,"TERMINAL",0,0,"9",,terminal_output +1983,1233673,"TERMINAL",0,0,"10",,terminal_output +1984,1234698,"TERMINAL",0,0,"1",,terminal_output +1985,1235725,"TERMINAL",0,0,"2",,terminal_output +1986,1235726,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1987,1236740,"TERMINAL",0,0,"3",,terminal_output +1988,1237874,"TERMINAL",0,0,"4",,terminal_output +1989,1238811,"TERMINAL",0,0,"5",,terminal_output +1990,1239857,"TERMINAL",0,0,"6",,terminal_output +1991,1240998,"TERMINAL",0,0,"7",,terminal_output +1992,1241935,"TERMINAL",0,0,"8",,terminal_output +1993,1242977,"TERMINAL",0,0,"9",,terminal_output +1994,1244078,"TERMINAL",0,0,"20",,terminal_output +1995,1244193,"TERMINAL",0,0,"2025-07-03 16:36:21.081498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1996,1245153,"TERMINAL",0,0,"1",,terminal_output +1997,1246092,"TERMINAL",0,0,"2",,terminal_output +1998,1247191,"TERMINAL",0,0,"3",,terminal_output +1999,1247394,"TERMINAL",0,0,"2025-07-03 16:36:24.293848: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2000,1248174,"TERMINAL",0,0,"5",,terminal_output +2001,1249280,"TERMINAL",0,0,"6",,terminal_output +2002,1250249,"TERMINAL",0,0,"7",,terminal_output +2003,1251291,"TERMINAL",0,0,"8",,terminal_output +2004,1252365,"TERMINAL",0,0,"9",,terminal_output +2005,1253441,"TERMINAL",0,0,"30",,terminal_output +2006,1254461,"TERMINAL",0,0,"1",,terminal_output +2007,1255489,"TERMINAL",0,0,"2",,terminal_output +2008,1255728,"TERMINAL",0,0,"2025-07-03 16:36:32.628061: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2009,1256500,"TERMINAL",0,0,"3",,terminal_output +2010,1257347,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +2011,1257456,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +2012,1257522,"TERMINAL",0,0,"new_frame_idxs.shape: (1, 920)\r\n",,terminal_output +2013,1257546,"TERMINAL",0,0,"4",,terminal_output +2014,1257694,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +2015,1257931,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +2016,1258660,"TERMINAL",0,0,"5",,terminal_output +2017,1259601,"TERMINAL",0,0,"6",,terminal_output +2018,1260641,"TERMINAL",0,0,"7",,terminal_output +2019,1261737,"TERMINAL",0,0,"8",,terminal_output +2020,1262756,"TERMINAL",0,0,"9",,terminal_output +2021,1263780,"TERMINAL",0,0,"40",,terminal_output +2022,1264807,"TERMINAL",0,0,"1",,terminal_output +2023,1265831,"TERMINAL",0,0,"2",,terminal_output +2024,1266871,"TERMINAL",0,0,"3",,terminal_output +2025,1267978,"TERMINAL",0,0,"4",,terminal_output +2026,1269006,"TERMINAL",0,0,"5",,terminal_output +2027,1269981,"TERMINAL",0,0,"615",,terminal_output +2028,1271022,"TERMINAL",0,0,"7",,terminal_output +2029,1272060,"TERMINAL",0,0,"8",,terminal_output +2030,1273143,"TERMINAL",0,0,"9",,terminal_output +2031,1274140,"TERMINAL",0,0,"51",,terminal_output +2032,1275190,"TERMINAL",0,0,"2",,terminal_output +2033,1276280,"TERMINAL",0,0,"3",,terminal_output +2034,1277256,"TERMINAL",0,0,"4",,terminal_output +2035,1278291,"TERMINAL",0,0,"5",,terminal_output +2036,1279327,"TERMINAL",0,0,"6",,terminal_output +2037,1280373,"TERMINAL",0,0,"7",,terminal_output +2038,1281444,"TERMINAL",0,0,"8",,terminal_output +2039,1282530,"TERMINAL",0,0,"9",,terminal_output +2040,1283480,"TERMINAL",0,0,"7:00",,terminal_output +2041,1284567,"TERMINAL",0,0,"1",,terminal_output +2042,1285563,"TERMINAL",0,0,"2",,terminal_output +2043,1286615,"TERMINAL",0,0,"3",,terminal_output +2044,1287741,"TERMINAL",0,0,"4",,terminal_output +2045,1288767,"TERMINAL",0,0,"5",,terminal_output +2046,1289727,"TERMINAL",0,0,"6",,terminal_output +2047,1290813,"TERMINAL",0,0,"7",,terminal_output +2048,1291811,"TERMINAL",0,0,"8",,terminal_output +2049,1292849,"TERMINAL",0,0,"9",,terminal_output +2050,1293997,"TERMINAL",0,0,"10",,terminal_output +2051,1294830,"TERMINAL",0,0,"l",,terminal_output +2052,1294950,"TERMINAL",0,0,"1",,terminal_output +2053,1294978,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(130)\r\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\r\n # mask is True for padded positions (i.e., t >= T)\r\n B, S, N = token_idxs.shape\r\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\r\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\r\n init_mask = mask.astype(bool)\r\n-> jax.debug.breakpoint()\r\n \r\n \r\n # --- Initialize MaskGIT ---\r\n init_mask_old = jnp.ones_like(token_idxs, dtype=bool)[:, 0] # (B, N)\r\n jax.debug.breakpoint()\r\n(jdb) ",,terminal_output +2054,1296039,"TERMINAL",0,0,"223",,terminal_output +2055,1296927,"TERMINAL",0,0,"i",,terminal_output +2056,1297033,"TERMINAL",0,0,"30",,terminal_output +2057,1297200,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2058,1297494,"TERMINAL",0,0,"[?25li\r[?25h",,terminal_output +2059,1297726,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2060,1297862,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2061,1298085,"TERMINAL",0,0,"4",,terminal_output +2062,1298085,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2063,1298980,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +2064,1299089,"TERMINAL",0,0,"5",,terminal_output +2065,1299306,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +2066,1299372,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2067,1299439,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +2068,1299501,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +2069,1299741,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2070,1299852,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +2071,1299957,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2072,1300030,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2073,1300196,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +2074,1300196,"TERMINAL",0,0,"6",,terminal_output +2075,1300212,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2076,1300304,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +2077,1301258,"TERMINAL",0,0,"8",,terminal_output +2078,1302296,"TERMINAL",0,0,"9",,terminal_output +2079,1303240,"TERMINAL",0,0,"20",,terminal_output +2080,1304281,"TERMINAL",0,0,"1",,terminal_output +2081,1305322,"TERMINAL",0,0,"2",,terminal_output +2082,1306188,"TERMINAL",0,0,"c",,terminal_output +2083,1306293,"TERMINAL",0,0,"\r\n",,terminal_output +2084,1306406,"TERMINAL",0,0,"3",,terminal_output +2085,1306407,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +2086,1307406,"TERMINAL",0,0,"4",,terminal_output +2087,1308472,"TERMINAL",0,0,"\rc",,terminal_output +2088,1308483,"TERMINAL",0,0,"5",,terminal_output +2089,1308701,"TERMINAL",0,0,"\r\nEntering jdb:\r\n(jdb) ",,terminal_output +2090,1309492,"TERMINAL",0,0,"6",,terminal_output +2091,1309895,"TERMINAL",0,0,"\rc",,terminal_output +2092,1310042,"TERMINAL",0,0,"\rinit_mask.shape",,terminal_output +2093,1310568,"TERMINAL",0,0,"7",,terminal_output +2094,1310624,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +2095,1311586,"TERMINAL",0,0,"8",,terminal_output +2096,1312599,"TERMINAL",0,0,"9",,terminal_output +2097,1313634,"TERMINAL",0,0,"30",,terminal_output +2098,1314257,"genie.py",0,0,"",python,tab +2099,1314258,"genie.py",4555,0,"",python,selection_mouse +2100,1314346,"genie.py",4548,13,"init_mask_old",python,selection_mouse +2101,1314866,"TERMINAL",0,0,"14",,terminal_output +2102,1315949,"TERMINAL",0,0,"2",,terminal_output +2103,1317028,"TERMINAL",0,0,"3",,terminal_output +2104,1317333,"TERMINAL",0,0,"init_mask_old",,terminal_output +2105,1317983,"TERMINAL",0,0,"\r\nArray([[ True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True]], dtype=bool)\r\n(jdb) ",,terminal_output +2106,1318008,"TERMINAL",0,0,"4",,terminal_output +2107,1319093,"TERMINAL",0,0,"5",,terminal_output +2108,1319201,"TERMINAL",0,0,"\rinit_mask_old",,terminal_output +2109,1320078,"TERMINAL",0,0,"6",,terminal_output +2110,1320325,"TERMINAL",0,0,".",,terminal_output +2111,1320659,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +2112,1320731,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2113,1320849,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2114,1320910,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +2115,1321090,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2116,1321148,"TERMINAL",0,0,"\r\n(1, 920)\r\n(jdb) ",,terminal_output +2117,1321149,"TERMINAL",0,0,"7",,terminal_output +2118,1321891,"TERMINAL",0,0,"\rinit_mask_old.shape",,terminal_output +2119,1322151,"TERMINAL",0,0,"\r",,terminal_output +2120,1322151,"TERMINAL",0,0,"9",,terminal_output +2121,1323191,"TERMINAL",0,0,"40",,terminal_output +2122,1323835,"TERMINAL",0,0,"\r.shape",,terminal_output +2123,1324229,"TERMINAL",0,0,"1",,terminal_output +2124,1324463,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +2125,1325278,"TERMINAL",0,0,"2",,terminal_output +2126,1326308,"TERMINAL",0,0,"3",,terminal_output +2127,1327379,"TERMINAL",0,0,"4",,terminal_output +2128,1328498,"TERMINAL",0,0,"5",,terminal_output +2129,1329430,"TERMINAL",0,0,"6",,terminal_output +2130,1330478,"TERMINAL",0,0,"7",,terminal_output +2131,1331550,"TERMINAL",0,0,"8",,terminal_output +2132,1332593,"TERMINAL",0,0,"9",,terminal_output +2133,1332881,"TERMINAL",0,0,"^DERROR:2025-07-03 16:37:49,746:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\n",,terminal_output +2134,1333618,"TERMINAL",0,0,"50",,terminal_output +2135,1333749,"TERMINAL",0,0,"^[",,terminal_output +2136,1334207,"genie.py",0,0,"",python,tab +2137,1334208,"genie.py",4647,0,"",python,selection_mouse +2138,1334273,"genie.py",4646,0,"",python,selection_command +2139,1334532,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +2140,1334643,"TERMINAL",0,0,"1",,terminal_output +2141,1334779,"genie.py",4647,0,"",python,selection_mouse +2142,1334780,"genie.py",4646,0,"",python,selection_command +2143,1335298,"genie.py",4579,0,"",python,selection_mouse +2144,1335671,"TERMINAL",0,0,"2",,terminal_output +2145,1336793,"TERMINAL",0,0,"3",,terminal_output +2146,1337515,"genie.py",4540,77,"",python,content +2147,1337595,"genie.py",4548,0,"",python,selection_command +2148,1337620,"genie.py",4511,0,"",python,selection_command +2149,1337749,"TERMINAL",0,0,"43",,terminal_output +2150,1337773,"genie.py",4502,0,"",python,selection_command +2151,1337914,"genie.py",4500,0,"",python,selection_command +2152,1338083,"genie.py",4470,0,"",python,selection_command +2153,1338218,"genie.py",4432,0,"",python,selection_command +2154,1338341,"genie.py",4362,0,"",python,selection_command +2155,1338782,"TERMINAL",0,0,"54",,terminal_output +2156,1339234,"genie.py",4292,0,"",python,selection_command +2157,1339366,"genie.py",4257,0,"",python,selection_command +2158,1339496,"genie.py",4198,0,"",python,selection_command +2159,1339822,"TERMINAL",0,0,"6",,terminal_output +2160,1340893,"TERMINAL",0,0,"7",,terminal_output +2161,1341399,"genie.py",4257,0,"",python,selection_command +2162,1341613,"genie.py",4292,0,"",python,selection_command +2163,1341914,"TERMINAL",0,0,"8",,terminal_output +2164,1342169,"genie.py",4257,0,"",python,selection_command +2165,1342328,"genie.py",4198,0,"",python,selection_command +2166,1342461,"genie.py",4126,0,"",python,selection_command +2167,1342650,"genie.py",4064,0,"",python,selection_command +2168,1342979,"TERMINAL",0,0,"9",,terminal_output +2169,1343087,"genie.py",4055,0,"",python,selection_command +2170,1343269,"genie.py",3989,0,"",python,selection_command +2171,1343450,"genie.py",3918,0,"",python,selection_command +2172,1343636,"genie.py",3857,0,"",python,selection_command +2173,1343785,"genie.py",3776,0,"",python,selection_command +2174,1343932,"genie.py",3723,0,"",python,selection_command +2175,1344008,"TERMINAL",0,0,"8:00",,terminal_output +2176,1344113,"genie.py",3776,0,"",python,selection_command +2177,1344261,"genie.py",3857,0,"",python,selection_command +2178,1344412,"genie.py",3918,0,"",python,selection_command +2179,1344618,"genie.py",3989,0,"",python,selection_command +2180,1345089,"TERMINAL",0,0,"1",,terminal_output +2181,1345665,"genie.py",3918,0,"",python,selection_command +2182,1346077,"TERMINAL",0,0,"2",,terminal_output +2183,1346100,"genie.py",3857,0,"",python,selection_command +2184,1347138,"TERMINAL",0,0,"3",,terminal_output +2185,1347399,"genie.py",3776,0,"",python,selection_command +2186,1348159,"TERMINAL",0,0,"5",,terminal_output +2187,1348595,"genie.py",3768,81,"",python,content +2188,1348633,"genie.py",3776,0,"",python,selection_command +2189,1349209,"TERMINAL",0,0,"6",,terminal_output +2190,1350240,"TERMINAL",0,0,"7",,terminal_output +2191,1350337,"genie.py",3776,0,"new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0] # (B, N) # TODO remove\n ",python,content +2192,1350405,"genie.py",3776,0,"",python,selection_command +2193,1351324,"TERMINAL",0,0,"8",,terminal_output +2194,1352324,"TERMINAL",0,0,"9",,terminal_output +2195,1352897,"genie.py",3768,142,"",python,content +2196,1352935,"genie.py",3776,0,"",python,selection_command +2197,1353224,"genie.py",3723,0,"",python,selection_command +2198,1353366,"TERMINAL",0,0,"10",,terminal_output +2199,1353417,"genie.py",3714,0,"",python,selection_command +2200,1353847,"genie.py",3658,0,"",python,selection_command +2201,1354085,"genie.py",3714,0,"",python,selection_command +2202,1354233,"genie.py",3723,0,"",python,selection_command +2203,1354407,"TERMINAL",0,0,"1",,terminal_output +2204,1354546,"genie.py",3776,0,"",python,selection_command +2205,1354787,"genie.py",3723,0,"",python,selection_command +2206,1354915,"genie.py",3714,0,"",python,selection_command +2207,1355285,"genie.py",3723,0,"",python,selection_command +2208,1355450,"genie.py",3714,0,"",python,selection_command +2209,1355451,"TERMINAL",0,0,"22",,terminal_output +2210,1355616,"genie.py",3658,0,"",python,selection_command +2211,1355832,"genie.py",3599,0,"",python,selection_command +2212,1356199,"genie.py",3565,0,"",python,selection_command +2213,1356384,"genie.py",3512,0,"",python,selection_command +2214,1356480,"TERMINAL",0,0,"3",,terminal_output +2215,1357522,"TERMINAL",0,0,"4",,terminal_output +2216,1358562,"TERMINAL",0,0,"5",,terminal_output +2217,1359627,"TERMINAL",0,0,"6",,terminal_output +2218,1360656,"TERMINAL",0,0,"7",,terminal_output +2219,1361778,"TERMINAL",0,0,"85",,terminal_output +2220,1362729,"TERMINAL",0,0,"9",,terminal_output +2221,1363767,"TERMINAL",0,0,"20",,terminal_output +2222,1364851,"TERMINAL",0,0,"1",,terminal_output +2223,1365236,"genie.py",3565,0,"",python,selection_command +2224,1365421,"genie.py",3599,0,"",python,selection_command +2225,1365586,"genie.py",3658,0,"",python,selection_command +2226,1365804,"genie.py",3714,0,"",python,selection_command +2227,1365883,"TERMINAL",0,0,"2",,terminal_output +2228,1366219,"genie.py",3658,0,"",python,selection_command +2229,1366397,"genie.py",3599,0,"",python,selection_command +2230,1366734,"genie.py",3565,0,"",python,selection_command +2231,1366899,"TERMINAL",0,0,"3 80",,terminal_output +2232,1366931,"genie.py",3512,0,"",python,selection_command +2233,1367981,"TERMINAL",0,0,"4",,terminal_output +2234,1368996,"TERMINAL",0,0,"5",,terminal_output +2235,1370038,"TERMINAL",0,0,"6",,terminal_output +2236,1371133,"TERMINAL",0,0,"7",,terminal_output +2237,1372125,"TERMINAL",0,0,"8",,terminal_output +2238,1373166,"TERMINAL",0,0,"30",,terminal_output +2239,1374278,"TERMINAL",0,0,"1",,terminal_output +2240,1375254,"TERMINAL",0,0,"2",,terminal_output +2241,1376306,"TERMINAL",0,0,"3",,terminal_output +2242,1377325,"TERMINAL",0,0,"4",,terminal_output +2243,1378361,"TERMINAL",0,0,"5",,terminal_output +2244,1379411,"TERMINAL",0,0,"6",,terminal_output +2245,1380518,"TERMINAL",0,0,"7",,terminal_output +2246,1381478,"TERMINAL",0,0,"8",,terminal_output +2247,1382574,"TERMINAL",0,0,"9",,terminal_output +2248,1383590,"TERMINAL",0,0,"40",,terminal_output +2249,1384608,"TERMINAL",0,0,"1",,terminal_output +2250,1385741,"TERMINAL",0,0,"2",,terminal_output +2251,1386691,"TERMINAL",0,0,"3",,terminal_output +2252,1387763,"TERMINAL",0,0,"4",,terminal_output +2253,1388812,"TERMINAL",0,0,"5",,terminal_output +2254,1389811,"TERMINAL",0,0,"6",,terminal_output +2255,1391324,"TERMINAL",0,0,"7",,terminal_output +2256,1392363,"TERMINAL",0,0,"9",,terminal_output +2257,1393411,"TERMINAL",0,0,"50",,terminal_output +2258,1393853,"genie.py",3565,0,"",python,selection_command +2259,1394058,"genie.py",3599,0,"",python,selection_command +2260,1394223,"genie.py",3658,0,"",python,selection_command +2261,1394450,"TERMINAL",0,0,"1",,terminal_output +2262,1394577,"genie.py",3599,0,"",python,selection_command +2263,1394736,"genie.py",3565,0,"",python,selection_command +2264,1395505,"genie.py",3599,0,"",python,selection_command +2265,1395524,"TERMINAL",0,0,"2",,terminal_output +2266,1395681,"genie.py",3658,0,"",python,selection_command +2267,1395848,"genie.py",3714,0,"",python,selection_command +2268,1396033,"genie.py",3723,0,"",python,selection_command +2269,1396222,"genie.py",3776,0,"",python,selection_command +2270,1396446,"genie.py",3847,0,"",python,selection_command +2271,1396543,"TERMINAL",0,0,"3",,terminal_output +2272,1396565,"genie.py",3913,0,"",python,selection_command +2273,1396761,"genie.py",3922,0,"",python,selection_command +2274,1396987,"genie.py",3984,0,"",python,selection_command +2275,1397134,"genie.py",4056,0,"",python,selection_command +2276,1397350,"genie.py",3984,0,"",python,selection_command +2277,1397564,"TERMINAL",0,0,"4",,terminal_output +2278,1397830,"genie.py",3922,0,"",python,selection_command +2279,1397893,"genie.py",3913,0,"",python,selection_command +2280,1397899,"genie.py",3847,0,"",python,selection_command +2281,1397932,"genie.py",3776,0,"",python,selection_command +2282,1397969,"genie.py",3723,0,"",python,selection_command +2283,1398606,"TERMINAL",0,0,"5",,terminal_output +2284,1399646,"TERMINAL",0,0,"6",,terminal_output +2285,1400690,"TERMINAL",0,0,"7",,terminal_output +2286,1401763,"TERMINAL",0,0,"8",,terminal_output +2287,1402841,"TERMINAL",0,0,"9",,terminal_output +2288,1403812,"TERMINAL",0,0,"9:00",,terminal_output +2289,1404509,"genie.py",4350,0,"",python,selection_mouse +2290,1404528,"genie.py",4349,0,"",python,selection_command +2291,1404890,"TERMINAL",0,0,"1",,terminal_output +2292,1405015,"genie.py",4319,0,"",python,selection_mouse +2293,1405033,"genie.py",4318,0,"",python,selection_command +2294,1405855,"genie.py",4319,0,"\n ",python,content +2295,1405952,"TERMINAL",0,0,"2",,terminal_output +2296,1405995,"genie.py",4328,0,"\n ",python,content +2297,1405996,"genie.py",4320,8,"",python,content +2298,1406387,"genie.py",4329,0,"a",python,content +2299,1406388,"genie.py",4330,0,"",python,selection_keyboard +2300,1406567,"genie.py",4330,0,"s",python,content +2301,1406568,"genie.py",4331,0,"",python,selection_keyboard +2302,1406822,"genie.py",4331,0,"s",python,content +2303,1406823,"genie.py",4332,0,"",python,selection_keyboard +2304,1406936,"TERMINAL",0,0,"3",,terminal_output +2305,1407469,"genie.py",4329,3,"assert",python,content +2306,1407994,"TERMINAL",0,0,"4",,terminal_output +2307,1408279,"genie.py",4329,6,"",python,content +2308,1408684,"genie.py",4325,4,"",python,content +2309,1408816,"genie.py",4321,4,"",python,content +2310,1409040,"TERMINAL",0,0,"5",,terminal_output +2311,1409378,"genie.py",4320,1,"",python,content +2312,1410061,"TERMINAL",0,0,"6",,terminal_output +2313,1411135,"TERMINAL",0,0,"7",,terminal_output +2314,1411150,"genie.py",3408,0,"",python,selection_mouse +2315,1411313,"genie.py",3408,1,"B",python,selection_mouse +2316,1411569,"genie.py",3408,2,"B,",python,selection_mouse +2317,1411569,"genie.py",3408,3,"B, ",python,selection_mouse +2318,1411569,"genie.py",3408,4,"B, T",python,selection_mouse +2319,1411569,"genie.py",3408,5,"B, T,",python,selection_mouse +2320,1411680,"genie.py",3408,6,"B, T, ",python,selection_mouse +2321,1412081,"genie.py",3414,0,"",python,selection_mouse +2322,1412143,"TERMINAL",0,0,"9",,terminal_output +2323,1412551,"genie.py",3444,0,"",python,selection_mouse +2324,1413188,"TERMINAL",0,0,"10",,terminal_output +2325,1413996,"genie.py",4281,0,"",python,selection_mouse +2326,1414312,"TERMINAL",0,0,"1",,terminal_output +2327,1414814,"genie.py",4319,0,"",python,selection_mouse +2328,1415340,"TERMINAL",0,0,"2",,terminal_output +2329,1415669,"genie.py",4319,0,"\n ",python,content +2330,1416136,"genie.py",4328,0,"\n ",python,content +2331,1416137,"genie.py",4320,8,"",python,content +2332,1416355,"genie.py",4329,0,"a",python,content +2333,1416355,"genie.py",4330,0,"",python,selection_keyboard +2334,1416386,"TERMINAL",0,0,"36",,terminal_output +2335,1416580,"genie.py",4330,0,"s",python,content +2336,1416581,"genie.py",4331,0,"",python,selection_keyboard +2337,1416778,"genie.py",4331,0,"s",python,content +2338,1416779,"genie.py",4332,0,"",python,selection_keyboard +2339,1417446,"genie.py",4329,3,"assert",python,content +2340,1417528,"TERMINAL",0,0,"4 8",,terminal_output +2341,1417838,"genie.py",4335,0," ",python,content +2342,1417839,"genie.py",4336,0,"",python,selection_keyboard +2343,1418021,"genie.py",4336,0,"m",python,content +2344,1418022,"genie.py",4337,0,"",python,selection_keyboard +2345,1418392,"genie.py",4336,1,"",python,content +2346,1418535,"TERMINAL",0,0,"59",,terminal_output +2347,1419171,"genie.py",4336,0,"i",python,content +2348,1419172,"genie.py",4337,0,"",python,selection_keyboard +2349,1419287,"genie.py",4337,0,"n",python,content +2350,1419288,"genie.py",4338,0,"",python,selection_keyboard +2351,1419423,"genie.py",4338,0,"i",python,content +2352,1419423,"genie.py",4339,0,"",python,selection_keyboard +2353,1419563,"genie.py",4339,0,"t",python,content +2354,1419564,"genie.py",4340,0,"",python,selection_keyboard +2355,1419596,"TERMINAL",0,0,"6",,terminal_output +2356,1420637,"TERMINAL",0,0,"7",,terminal_output +2357,1421106,"genie.py",4336,4,"init_mask",python,content +2358,1421672,"TERMINAL",0,0,"8",,terminal_output +2359,1421740,"genie.py",4345,0,".",python,content +2360,1421740,"genie.py",4346,0,"",python,selection_keyboard +2361,1422077,"genie.py",4346,0,"s",python,content +2362,1422077,"genie.py",4347,0,"",python,selection_keyboard +2363,1422262,"genie.py",4347,0,"h",python,content +2364,1422263,"genie.py",4348,0,"",python,selection_keyboard +2365,1422445,"genie.py",4348,0,"a",python,content +2366,1422446,"genie.py",4349,0,"",python,selection_keyboard +2367,1422522,"genie.py",4349,0,"p",python,content +2368,1422524,"genie.py",4350,0,"",python,selection_keyboard +2369,1422635,"genie.py",4350,0,"e",python,content +2370,1422636,"genie.py",4351,0,"",python,selection_keyboard +2371,1422732,"TERMINAL",0,0,"9",,terminal_output +2372,1423101,"genie.py",4351,0," ",python,content +2373,1423101,"genie.py",4352,0,"",python,selection_keyboard +2374,1423234,"genie.py",4352,0,"=",python,content +2375,1423234,"genie.py",4353,0,"",python,selection_keyboard +2376,1423346,"genie.py",4353,0,"=",python,content +2377,1423347,"genie.py",4354,0,"",python,selection_keyboard +2378,1423447,"genie.py",4354,0," ",python,content +2379,1423448,"genie.py",4355,0,"",python,selection_keyboard +2380,1423759,"TERMINAL",0,0,"20",,terminal_output +2381,1424539,"genie.py",4355,0,"[]",python,content +2382,1424540,"genie.py",4356,0,"",python,selection_keyboard +2383,1424791,"TERMINAL",0,0,"1",,terminal_output +2384,1425823,"TERMINAL",0,0,"2",,terminal_output +2385,1426919,"TERMINAL",0,0,"3",,terminal_output +2386,1427904,"TERMINAL",0,0,"4",,terminal_output +2387,1428747,"genie.py",4355,2,"",python,content +2388,1428952,"TERMINAL",0,0,"5",,terminal_output +2389,1430143,"TERMINAL",0,0,"6",,terminal_output +2390,1431103,"TERMINAL",0,0,"7",,terminal_output +2391,1432119,"TERMINAL",0,0,"8",,terminal_output +2392,1432603,"TERMINAL",0,0,"",,terminal_output +2393,1433120,"TERMINAL",0,0,"9",,terminal_output +2394,1433982,"genie.py",0,0,"",python,tab +2395,1434207,"TERMINAL",0,0,"31",,terminal_output +2396,1434838,"genie.py",4355,0,"()",python,content +2397,1434840,"genie.py",4356,0,"",python,selection_keyboard +2398,1435220,"TERMINAL",0,0,"2",,terminal_output +2399,1436243,"TERMINAL",0,0,"3",,terminal_output +2400,1437285,"TERMINAL",0,0,"4",,terminal_output +2401,1437609,"genie.py",4356,0,"B",python,content +2402,1437610,"genie.py",4357,0,"",python,selection_keyboard +2403,1437875,"genie.py",4357,0,",",python,content +2404,1437876,"genie.py",4358,0,"",python,selection_keyboard +2405,1438029,"genie.py",4358,0," ",python,content +2406,1438029,"genie.py",4359,0,"",python,selection_keyboard +2407,1438344,"TERMINAL",0,0,"5",,terminal_output +2408,1438375,"genie.py",4359,0,"S",python,content +2409,1438376,"genie.py",4360,0,"",python,selection_keyboard +2410,1438657,"genie.py",4360,0,",",python,content +2411,1438658,"genie.py",4361,0,"",python,selection_keyboard +2412,1438824,"genie.py",4361,0," ",python,content +2413,1438825,"genie.py",4362,0,"",python,selection_keyboard +2414,1439280,"genie.py",4362,0,"N",python,content +2415,1439281,"genie.py",4363,0,"",python,selection_keyboard +2416,1439367,"TERMINAL",0,0,"6",,terminal_output +2417,1440450,"TERMINAL",0,0,"7",,terminal_output +2418,1441409,"genie.py",4081,0,"",python,selection_mouse +2419,1441485,"TERMINAL",0,0,"8",,terminal_output +2420,1442507,"TERMINAL",0,0,"910",,terminal_output +2421,1442661,"genie.py",4121,0,"",python,selection_mouse +2422,1443605,"TERMINAL",0,0,"401",,terminal_output +2423,1443855,"genie.py",4396,0,"",python,selection_mouse +2424,1444532,"genie.py",4364,0,"",python,selection_mouse +2425,1444595,"TERMINAL",0,0,"1",,terminal_output +2426,1445544,"genie.py",4364,0,"\n ",python,content +2427,1445681,"TERMINAL",0,0,"2",,terminal_output +2428,1446691,"TERMINAL",0,0,"3",,terminal_output +2429,1447802,"TERMINAL",0,0,"4",,terminal_output +2430,1448821,"TERMINAL",0,0,"5",,terminal_output +2431,1449801,"TERMINAL",0,0,"6",,terminal_output +2432,1450838,"TERMINAL",0,0,"7",,terminal_output +2433,1451429,"genie.py",3424,0,"",python,selection_mouse +2434,1451877,"TERMINAL",0,0,"8",,terminal_output +2435,1452526,"genie.py",3578,0,"",python,selection_mouse +2436,1452921,"TERMINAL",0,0,"9",,terminal_output +2437,1453562,"genie.py",4122,0,"",python,selection_mouse +2438,1453954,"TERMINAL",0,0,"50",,terminal_output +2439,1453967,"genie.py",4121,0,"",python,selection_command +2440,1454664,"genie.py",4365,8,"",python,content +2441,1454664,"genie.py",4107,35,"",python,content +2442,1454688,"genie.py",4115,0,"",python,selection_command +2443,1454989,"TERMINAL",0,0,"1",,terminal_output +2444,1456062,"genie.py",4326,0,"",python,selection_mouse +2445,1456087,"TERMINAL",0,0,"2",,terminal_output +2446,1456691,"genie.py",4329,0,"\n ",python,content +2447,1457055,"genie.py",4338,0,"a",python,content +2448,1457056,"genie.py",4339,0,"",python,selection_keyboard +2449,1457221,"TERMINAL",0,0,"3",,terminal_output +2450,1457325,"genie.py",4339,0,"s",python,content +2451,1457326,"genie.py",4340,0,"",python,selection_keyboard +2452,1457438,"genie.py",4340,0,"s",python,content +2453,1457439,"genie.py",4341,0,"",python,selection_keyboard +2454,1458083,"genie.py",4338,3,"assert",python,content +2455,1458166,"TERMINAL",0,0,"49",,terminal_output +2456,1458735,"genie.py",4344,0," ",python,content +2457,1458735,"genie.py",4345,0,"",python,selection_keyboard +2458,1459173,"TERMINAL",0,0,"6",,terminal_output +2459,1460192,"TERMINAL",0,0,"7",,terminal_output +2460,1461225,"TERMINAL",0,0,"8",,terminal_output +2461,1462327,"TERMINAL",0,0,"9",,terminal_output +2462,1463315,"TERMINAL",0,0,"40:00",,terminal_output +2463,1464347,"TERMINAL",0,0,"1",,terminal_output +2464,1464611,"genie.py",4345,0,"a",python,content +2465,1464612,"genie.py",4346,0,"",python,selection_keyboard +2466,1464953,"genie.py",4346,0,"c",python,content +2467,1464954,"genie.py",4347,0,"",python,selection_keyboard +2468,1465452,"TERMINAL",0,0,"2",,terminal_output +2469,1465754,"genie.py",4345,2,"action_tokens",python,content +2470,1466427,"TERMINAL",0,0,"3",,terminal_output +2471,1467413,"genie.py",4358,0,".",python,content +2472,1467413,"genie.py",4359,0,"",python,selection_keyboard +2473,1467475,"TERMINAL",0,0,"4",,terminal_output +2474,1467623,"genie.py",4359,0,"s",python,content +2475,1467624,"genie.py",4360,0,"",python,selection_keyboard +2476,1467827,"genie.py",4360,0,"h",python,content +2477,1467828,"genie.py",4361,0,"",python,selection_keyboard +2478,1467903,"genie.py",4361,0,"a",python,content +2479,1467904,"genie.py",4362,0,"",python,selection_keyboard +2480,1468012,"genie.py",4362,0,"p",python,content +2481,1468013,"genie.py",4363,0,"",python,selection_keyboard +2482,1468050,"genie.py",4363,0,"e",python,content +2483,1468051,"genie.py",4364,0,"",python,selection_keyboard +2484,1468458,"genie.py",4364,0," ",python,content +2485,1468458,"genie.py",4365,0,"",python,selection_keyboard +2486,1468522,"TERMINAL",0,0,"5",,terminal_output +2487,1468641,"genie.py",4365,0,"=",python,content +2488,1468642,"genie.py",4366,0,"",python,selection_keyboard +2489,1468846,"genie.py",4366,0,"=",python,content +2490,1468847,"genie.py",4367,0,"",python,selection_keyboard +2491,1468885,"genie.py",4367,0," ",python,content +2492,1468886,"genie.py",4368,0,"",python,selection_keyboard +2493,1469664,"TERMINAL",0,0,"6",,terminal_output +2494,1470587,"TERMINAL",0,0,"7",,terminal_output +2495,1471039,"genie.py",4368,0,"()",python,content +2496,1471040,"genie.py",4369,0,"",python,selection_keyboard +2497,1471415,"genie.py",4369,0,"B",python,content +2498,1471416,"genie.py",4370,0,"",python,selection_keyboard +2499,1471708,"TERMINAL",0,0,"8",,terminal_output +2500,1472691,"TERMINAL",0,0,"9",,terminal_output +2501,1473078,"genie.py",4370,0,",",python,content +2502,1473079,"genie.py",4371,0,"",python,selection_keyboard +2503,1473135,"genie.py",4371,0," ",python,content +2504,1473136,"genie.py",4372,0,"",python,selection_keyboard +2505,1473436,"genie.py",4372,0,"S",python,content +2506,1473437,"genie.py",4373,0,"",python,selection_keyboard +2507,1473685,"genie.py",4373,0,",",python,content +2508,1473686,"genie.py",4374,0,"",python,selection_keyboard +2509,1473770,"genie.py",4374,0," ",python,content +2510,1473771,"genie.py",4375,0,"",python,selection_keyboard +2511,1473780,"TERMINAL",0,0,"10",,terminal_output +2512,1474099,"genie.py",4375,0,"A",python,content +2513,1474100,"genie.py",4376,0,"",python,selection_keyboard +2514,1474453,"genie.py",4376,0,",",python,content +2515,1474453,"genie.py",4377,0,"",python,selection_keyboard +2516,1474547,"genie.py",4377,0," ",python,content +2517,1474548,"genie.py",4378,0,"",python,selection_keyboard +2518,1474813,"TERMINAL",0,0,"1",,terminal_output +2519,1474883,"genie.py",4378,0,"D",python,content +2520,1474884,"genie.py",4379,0,"",python,selection_keyboard +2521,1475683,"genie.py",4329,0,"",python,selection_mouse +2522,1475805,"TERMINAL",0,0,"2",,terminal_output +2523,1476403,"genie.py",4329,0,",",python,content +2524,1476404,"genie.py",4330,0,"",python,selection_keyboard +2525,1476515,"genie.py",4330,0," ",python,content +2526,1476516,"genie.py",4331,0,"",python,selection_keyboard +2527,1476866,"TERMINAL",0,0,"3",,terminal_output +2528,1477099,"genie.py",4331,0,"w",python,content +2529,1477099,"genie.py",4332,0,"",python,selection_keyboard +2530,1477754,"genie.py",4331,1,"",python,content +2531,1477877,"TERMINAL",0,0,"4",,terminal_output +2532,1478123,"genie.py",4331,0,"""""",python,content +2533,1478124,"genie.py",4332,0,"",python,selection_keyboard +2534,1478972,"TERMINAL",0,0,"5",,terminal_output +2535,1479255,"genie.py",4332,0,"W",python,content +2536,1479256,"genie.py",4333,0,"",python,selection_keyboard +2537,1479555,"genie.py",4333,0,"r",python,content +2538,1479556,"genie.py",4334,0,"",python,selection_keyboard +2539,1479774,"genie.py",4334,0,"o",python,content +2540,1479775,"genie.py",4335,0,"",python,selection_keyboard +2541,1479947,"genie.py",4335,0,"n",python,content +2542,1479948,"genie.py",4336,0,"",python,selection_keyboard +2543,1479977,"TERMINAL",0,0,"6",,terminal_output +2544,1480050,"genie.py",4336,0,"g",python,content +2545,1480051,"genie.py",4337,0,"",python,selection_keyboard +2546,1480166,"genie.py",4337,0," ",python,content +2547,1480167,"genie.py",4338,0,"",python,selection_keyboard +2548,1480339,"genie.py",4338,0,"m",python,content +2549,1480340,"genie.py",4339,0,"",python,selection_keyboard +2550,1480479,"genie.py",4339,0,"a",python,content +2551,1480480,"genie.py",4340,0,"",python,selection_keyboard +2552,1480753,"genie.py",4339,1,"",python,content +2553,1480897,"genie.py",4338,1,"",python,content +2554,1481037,"TERMINAL",0,0,"7",,terminal_output +2555,1481191,"genie.py",4338,0,"m",python,content +2556,1481192,"genie.py",4339,0,"",python,selection_keyboard +2557,1481420,"genie.py",4339,0,"a",python,content +2558,1481421,"genie.py",4340,0,"",python,selection_keyboard +2559,1481503,"genie.py",4340,0,"s",python,content +2560,1481503,"genie.py",4341,0,"",python,selection_keyboard +2561,1481570,"genie.py",4341,0,"k",python,content +2562,1481570,"genie.py",4342,0,"",python,selection_keyboard +2563,1481682,"genie.py",4342,0," ",python,content +2564,1481683,"genie.py",4343,0,"",python,selection_keyboard +2565,1482068,"TERMINAL",0,0,"8",,terminal_output +2566,1482114,"genie.py",4343,0,"s",python,content +2567,1482115,"genie.py",4344,0,"",python,selection_keyboard +2568,1482193,"genie.py",4344,0,"h",python,content +2569,1482194,"genie.py",4345,0,"",python,selection_keyboard +2570,1482318,"genie.py",4345,0,"a",python,content +2571,1482318,"genie.py",4346,0,"",python,selection_keyboard +2572,1482423,"genie.py",4346,0,"p",python,content +2573,1482423,"genie.py",4347,0,"",python,selection_keyboard +2574,1482567,"genie.py",4347,0,"e",python,content +2575,1482568,"genie.py",4348,0,"",python,selection_keyboard +2576,1483090,"TERMINAL",0,0,"9",,terminal_output +2577,1483254,"genie.py",4347,0,"",python,selection_command +2578,1483587,"genie.py",4399,0,"",python,selection_command +2579,1483929,"genie.py",4400,0,"",python,selection_command +2580,1484126,"TERMINAL",0,0,"20",,terminal_output +2581,1484235,"genie.py",4400,0,",",python,content +2582,1484236,"genie.py",4401,0,"",python,selection_keyboard +2583,1484318,"genie.py",4401,0," ",python,content +2584,1484319,"genie.py",4402,0,"",python,selection_keyboard +2585,1485159,"TERMINAL",0,0,"2",,terminal_output +2586,1485573,"genie.py",4401,1,"",python,content +2587,1485718,"genie.py",4400,1,"",python,content +2588,1486185,"genie.py",4399,1,"",python,content +2589,1486225,"TERMINAL",0,0,"3",,terminal_output +2590,1486346,"genie.py",4398,1,"",python,content +2591,1486634,"genie.py",4396,2,"",python,content +2592,1486885,"genie.py",4395,1,"",python,content +2593,1487079,"genie.py",4393,2,"",python,content +2594,1487230,"genie.py",4392,1,"",python,content +2595,1487256,"TERMINAL",0,0,"4",,terminal_output +2596,1487412,"genie.py",4390,2,"",python,content +2597,1487547,"genie.py",4389,1,"",python,content +2598,1487710,"genie.py",4388,1,"",python,content +2599,1487899,"genie.py",4385,3,"",python,content +2600,1488069,"genie.py",4379,6,"",python,content +2601,1488234,"genie.py",4378,1,"",python,content +2602,1488328,"TERMINAL",0,0,"5",,terminal_output +2603,1488436,"genie.py",4365,13,"",python,content +2604,1488706,"genie.py",4358,7,"",python,content +2605,1489324,"TERMINAL",0,0,"6",,terminal_output +2606,1490385,"TERMINAL",0,0,"7",,terminal_output +2607,1491421,"TERMINAL",0,0,"8",,terminal_output +2608,1492468,"TERMINAL",0,0,"9",,terminal_output +2609,1493505,"TERMINAL",0,0,"30",,terminal_output +2610,1494555,"TERMINAL",0,0,"1",,terminal_output +2611,1495590,"TERMINAL",0,0,"2",,terminal_output +2612,1496634,"TERMINAL",0,0,"3",,terminal_output +2613,1497701,"TERMINAL",0,0,"4",,terminal_output +2614,1498790,"TERMINAL",0,0,"5",,terminal_output +2615,1499815,"TERMINAL",0,0,"6",,terminal_output +2616,1500809,"TERMINAL",0,0,"7",,terminal_output +2617,1501863,"TERMINAL",0,0,"8",,terminal_output +2618,1502996,"TERMINAL",0,0,"9",,terminal_output +2619,1503676,"genie.py",4219,0,"",python,selection_mouse +2620,1504013,"TERMINAL",0,0,"40",,terminal_output +2621,1504217,"genie.py",4284,0,"",python,selection_mouse +2622,1505040,"TERMINAL",0,0,"1",,terminal_output +2623,1505495,"genie.py",4217,0,"",python,selection_mouse +2624,1505658,"genie.py",4216,1,"B",python,selection_mouse +2625,1505912,"genie.py",4216,2,"B,",python,selection_mouse +2626,1505912,"genie.py",4216,3,"B, ",python,selection_mouse +2627,1505912,"genie.py",4216,4,"B, S",python,selection_mouse +2628,1506025,"TERMINAL",0,0,"2",,terminal_output +2629,1506286,"genie.py",4220,0,"",python,selection_mouse +2630,1507064,"genie.py",4285,0,"",python,selection_mouse +2631,1507087,"TERMINAL",0,0,"3",,terminal_output +2632,1507752,"genie.py",4284,0,"",python,selection_mouse +2633,1508109,"TERMINAL",0,0,"4",,terminal_output +2634,1509114,"genie.py",4331,0,"",python,selection_mouse +2635,1509215,"TERMINAL",0,0,"6",,terminal_output +2636,1510098,"genie.py",4358,0,"",python,selection_mouse +2637,1510206,"TERMINAL",0,0,"7",,terminal_output +2638,1511082,"genie.py",4285,0,"",python,selection_mouse +2639,1511243,"TERMINAL",0,0,"82",,terminal_output +2640,1511600,"genie.py",4341,0,"",python,selection_mouse +2641,1511897,"genie.py",4340,0,"",python,selection_command +2642,1512290,"TERMINAL",0,0,"9",,terminal_output +2643,1513169,"genie.py",4358,0,"",python,selection_mouse +2644,1513180,"genie.py",4357,0,"",python,selection_command +2645,1513322,"TERMINAL",0,0,"50",,terminal_output +2646,1513906,"genie.py",4327,0,"",python,selection_mouse +2647,1514374,"TERMINAL",0,0,"1",,terminal_output +2648,1514512,"genie.py",4285,0,"",python,selection_mouse +2649,1515488,"TERMINAL",0,0,"2",,terminal_output +2650,1515536,"genie.py",4284,0,"",python,selection_mouse +2651,1515548,"genie.py",4283,0,"",python,selection_command +2652,1515908,"genie.py",4283,1,")",python,selection_mouse +2653,1515908,"genie.py",4280,3,"ool",python,selection_mouse +2654,1515909,"genie.py",4196,87,"broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2655,1515909,"genie.py",4059,224,"ask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2656,1515909,"genie.py",3919,364," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2657,1515910,"genie.py",3768,515," action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2658,1515910,"genie.py",3715,568," print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2659,1515910,"genie.py",3714,569,"\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2660,1515910,"genie.py",3650,633," token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2661,1515943,"genie.py",4284,0,"",python,selection_command +2662,1515944,"genie.py",3650,634," token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2663,1515983,"genie.py",3592,692," pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2664,1516015,"genie.py",3593,691," pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2665,1516069,"genie.py",3653,631," token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2666,1516094,"genie.py",3714,570,"\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2667,1516116,"genie.py",3719,565," print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2668,1516141,"genie.py",3772,512," action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2669,1516164,"genie.py",3913,371,"\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2670,1516191,"genie.py",3984,300,"# token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2671,1516216,"genie.py",4059,225,"ask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2672,1516243,"genie.py",4264,20," = mask.astype(bool)",python,selection_mouse +2673,1516269,"genie.py",4284,1,"\n",python,selection_mouse +2674,1516308,"genie.py",4284,24,"\n\n assert init_ma",python,selection_mouse +2675,1516335,"genie.py",4284,25,"\n\n assert init_mas",python,selection_mouse +2676,1516355,"genie.py",4284,1,"\n",python,selection_mouse +2677,1516382,"genie.py",4121,163," (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2678,1516415,"genie.py",3842,442," print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2679,1516449,"genie.py",3435,849," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2680,1516486,"genie.py",3342,942," token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2681,1516514,"genie.py",3260,1024," tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2682,1516625,"TERMINAL",0,0,"3",,terminal_output +2683,1516830,"genie.py",3342,942," token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2684,1516831,"genie.py",3400,884," B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2685,1516831,"genie.py",3435,849," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2686,1516832,"genie.py",3456,828," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2687,1516878,"genie.py",3504,780," print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2688,1516878,"genie.py",3557,727," pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2689,1516993,"genie.py",3591,693," pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2690,1517227,"genie.py",3557,727," pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2691,1517361,"genie.py",3504,780," print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2692,1517362,"genie.py",3456,828," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2693,1517362,"genie.py",3435,849," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2694,1517362,"genie.py",3400,884," B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2695,1517363,"genie.py",3342,942," token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2696,1517363,"genie.py",3260,1024," tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2697,1517511,"TERMINAL",0,0,"4",,terminal_output +2698,1518557,"TERMINAL",0,0,"5",,terminal_output +2699,1519311,"genie.py",3475,0,"",python,selection_mouse +2700,1519612,"TERMINAL",0,0,"6",,terminal_output +2701,1519866,"genie.py",4150,0,"",python,selection_mouse +2702,1520451,"genie.py",4284,0,"",python,selection_mouse +2703,1520452,"genie.py",4283,0,"",python,selection_command +2704,1520759,"genie.py",4283,1,")",python,selection_mouse +2705,1520760,"genie.py",4080,203,"d positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2706,1520760,"genie.py",3606,677,"np.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2707,1520760,"genie.py",3465,818," begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2708,1520761,"genie.py",3455,828,"\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2709,1520761,"genie.py",3442,841," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2710,1520764,"genie.py",4284,0,"",python,selection_command +2711,1520765,"TERMINAL",0,0,"7",,terminal_output +2712,1520823,"genie.py",3442,842," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2713,1520941,"genie.py",3441,843," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2714,1521058,"genie.py",3440,844," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2715,1521059,"genie.py",3439,845," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2716,1521059,"genie.py",3404,880," B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2717,1521228,"genie.py",3438,846," S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2718,1521229,"genie.py",3459,825," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2719,1521229,"genie.py",3508,776," print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2720,1521230,"genie.py",3596,688," pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2721,1521230,"genie.py",3657,627," token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2722,1521265,"genie.py",3725,559,"int(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2723,1521302,"genie.py",3782,502,"_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2724,1521349,"genie.py",3784,500,"okens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2725,1521412,"genie.py",3714,570,"\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2726,1521432,"genie.py",3565,719,"pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2727,1521463,"genie.py",3402,882," B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2728,1521499,"genie.py",3343,941," token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2729,1521678,"TERMINAL",0,0,"8",,terminal_output +2730,1521843,"genie.py",3261,1023," tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2731,1521923,"genie.py",3260,1024," tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2732,1521963,"genie.py",3216,1068," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2733,1522715,"TERMINAL",0,0,"9",,terminal_output +2734,1523517,"genie.py",3217,0,"",python,selection_mouse +2735,1523518,"genie.py",3216,8," ",python,selection_mouse +2736,1523778,"genie.py",3216,46," # --- Encode videos and actions ---\n ",python,selection_mouse +2737,1523778,"genie.py",3216,130," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n ",python,selection_mouse +2738,1523779,"genie.py",3216,229," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S ",python,selection_mouse +2739,1523779,"genie.py",3216,256," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin ",python,selection_mouse +2740,1523780,"genie.py",3216,364," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B,",python,selection_mouse +2741,1523780,"genie.py",3216,408," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape",python,selection_mouse +2742,1523812,"genie.py",3216,470," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate",python,selection_mouse +2743,1523856,"genie.py",3216,534," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", ",python,selection_mouse +2744,1523874,"genie.py",3216,597," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes",python,selection_mouse +2745,1523904,"genie.py",3216,697," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n",python,selection_mouse +2746,1523949,"genie.py",3216,744," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where",python,selection_mouse +2747,1523950,"TERMINAL",0,0,"1:00",,terminal_output +2748,1523967,"genie.py",3216,747," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we",python,selection_mouse +2749,1523995,"genie.py",3216,811," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original",python,selection_mouse +2750,1524022,"genie.py",3216,883," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., ",python,selection_mouse +2751,1524050,"genie.py",3216,884," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t",python,selection_mouse +2752,1524097,"genie.py",3216,944," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) #",python,selection_mouse +2753,1524167,"genie.py",3216,945," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # ",python,selection_mouse +2754,1524191,"genie.py",3216,1015," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # ",python,selection_mouse +2755,1524230,"genie.py",3216,1020," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape",python,selection_mouse +2756,1524327,"genie.py",3216,1068," # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2757,1524697,"genie.py",4284,0,"",python,selection_mouse +2758,1524728,"genie.py",4283,0,"",python,selection_command +2759,1524812,"TERMINAL",0,0,"1",,terminal_output +2760,1524986,"genie.py",4284,0,"",python,selection_mouse +2761,1524987,"genie.py",4283,0,"",python,selection_command +2762,1525233,"genie.py",4283,1,")",python,selection_mouse +2763,1525233,"genie.py",4226,57," # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2764,1525233,"genie.py",4018,265," original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2765,1525234,"genie.py",3801,482,"vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2766,1525234,"genie.py",3714,569,"\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2767,1525340,"genie.py",3675,608,"concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2768,1525341,"genie.py",3615,668,"pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2769,1525341,"genie.py",3583,700,"- T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2770,1525341,"genie.py",3519,764,"token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2771,1525342,"genie.py",3472,811,"potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool",python,selection_mouse +2772,1525342,"genie.py",4284,0,"",python,selection_command +2773,1525394,"genie.py",3472,812,"potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2774,1525424,"genie.py",3455,829,"\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2775,1525476,"genie.py",3454,830,"\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2776,1525487,"genie.py",3417,867," token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2777,1525512,"genie.py",3350,934,"token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2778,1525542,"genie.py",3268,1016,"tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +2779,1525856,"TERMINAL",0,0,"2",,terminal_output +2780,1526893,"TERMINAL",0,0,"3",,terminal_output +2781,1526984,"genie.py",4003,0,"",python,selection_mouse +2782,1527961,"TERMINAL",0,0,"4",,terminal_output +2783,1528279,"genie.py",4360,0,"",python,selection_mouse +2784,1528832,"genie.py",4385,0,"",python,selection_mouse +2785,1528978,"TERMINAL",0,0,"5",,terminal_output +2786,1529357,"genie.py",4361,31,"",python,content +2787,1529401,"genie.py",4368,0,"",python,selection_command +2788,1529432,"genie.py",4370,0,"",python,selection_command +2789,1529711,"genie.py",4379,0,"",python,selection_command +2790,1529878,"genie.py",4416,0,"",python,selection_command +2791,1530056,"TERMINAL",0,0,"6",,terminal_output +2792,1530278,"genie.py",4408,31,"",python,content +2793,1530367,"genie.py",4416,0,"",python,selection_command +2794,1530368,"genie.py",4379,0,"",python,selection_command +2795,1530517,"genie.py",4370,0,"",python,selection_command +2796,1530668,"genie.py",4368,0,"",python,selection_command +2797,1530816,"genie.py",4360,0,"",python,selection_command +2798,1530978,"genie.py",4359,0,"",python,selection_command +2799,1531063,"TERMINAL",0,0,"7",,terminal_output +2800,1531165,"genie.py",4357,0,"",python,selection_command +2801,1531547,"genie.py",4350,10,"",python,content +2802,1531989,"genie.py",4350,10,"",python,content +2803,1532096,"TERMINAL",0,0,"8",,terminal_output +2804,1533232,"TERMINAL",0,0,"9",,terminal_output +2805,1533280,"genie.py",4458,0,"",python,selection_mouse +2806,1533336,"genie.py",4449,14,"new_frame_idxs",python,selection_mouse +2807,1534193,"TERMINAL",0,0,"11",,terminal_output +2808,1535303,"TERMINAL",0,0,"2",,terminal_output +2809,1536312,"TERMINAL",0,0,"3",,terminal_output +2810,1537487,"TERMINAL",0,0,"4",,terminal_output +2811,1538427,"TERMINAL",0,0,"5 9",,terminal_output +2812,1539021,"genie.py",4456,0,"",python,selection_mouse +2813,1539430,"TERMINAL",0,0,"62",,terminal_output +2814,1540460,"TERMINAL",0,0,"7",,terminal_output +2815,1541508,"TERMINAL",0,0,"8",,terminal_output +2816,1542549,"TERMINAL",0,0,"9",,terminal_output +2817,1543594,"TERMINAL",0,0,"20",,terminal_output +2818,1544639,"TERMINAL",0,0,"1",,terminal_output +2819,1545680,"TERMINAL",0,0,"2",,terminal_output +2820,1546777,"TERMINAL",0,0,"310",,terminal_output +2821,1547842,"TERMINAL",0,0,"4",,terminal_output +2822,1548866,"TERMINAL",0,0,"5",,terminal_output +2823,1549893,"TERMINAL",0,0,"6",,terminal_output +2824,1550914,"TERMINAL",0,0,"7",,terminal_output +2825,1551941,"TERMINAL",0,0,"8",,terminal_output +2826,1552980,"TERMINAL",0,0,"9",,terminal_output +2827,1554025,"TERMINAL",0,0,"30",,terminal_output +2828,1555073,"TERMINAL",0,0,"1",,terminal_output +2829,1556146,"TERMINAL",0,0,"2",,terminal_output +2830,1556385,"genie.py",4005,0,"",python,selection_mouse +2831,1557005,"genie.py",4285,0,"",python,selection_mouse +2832,1557148,"TERMINAL",0,0,"4",,terminal_output +2833,1558188,"TERMINAL",0,0,"5",,terminal_output +2834,1559311,"TERMINAL",0,0,"6",,terminal_output +2835,1560338,"TERMINAL",0,0,"7",,terminal_output +2836,1561313,"TERMINAL",0,0,"8",,terminal_output +2837,1561872,"genie.py",4327,0,"",python,selection_mouse +2838,1562388,"TERMINAL",0,0,"9",,terminal_output +2839,1562749,"genie.py",4308,0,"",python,selection_mouse +2840,1562906,"genie.py",4301,9,"init_mask",python,selection_mouse +2841,1563408,"TERMINAL",0,0,"40",,terminal_output +2842,1564432,"TERMINAL",0,0,"1",,terminal_output +2843,1565562,"TERMINAL",0,0,"2",,terminal_output +2844,1566521,"TERMINAL",0,0,"3",,terminal_output +2845,1567565,"TERMINAL",0,0,"4",,terminal_output +2846,1568607,"TERMINAL",0,0,"5",,terminal_output +2847,1568645,"genie.py",3369,0,"",python,selection_mouse +2848,1568762,"genie.py",3363,13,"tokenizer_out",python,selection_mouse +2849,1569647,"TERMINAL",0,0,"6",,terminal_output +2850,1570406,"genie.py",3356,0,"",python,selection_mouse +2851,1570530,"genie.py",3350,10,"token_idxs",python,selection_mouse +2852,1570688,"TERMINAL",0,0,"7",,terminal_output +2853,1571840,"TERMINAL",0,0,"8",,terminal_output +2854,1572316,"genie.py",3713,0,"",python,selection_mouse +2855,1572346,"genie.py",3712,0,"",python,selection_command +2856,1572828,"TERMINAL",0,0,"9",,terminal_output +2857,1573811,"TERMINAL",0,0,"50",,terminal_output +2858,1574857,"TERMINAL",0,0,"1",,terminal_output +2859,1575309,"genie.py",4229,0,"",python,selection_mouse +2860,1575539,"genie.py",4229,2,"# ",python,selection_mouse +2861,1575539,"genie.py",4229,5,"# sha",python,selection_mouse +2862,1575539,"genie.py",4229,8,"# shape ",python,selection_mouse +2863,1575649,"genie.py",4229,10,"# shape (B",python,selection_mouse +2864,1575649,"genie.py",4229,11,"# shape (B,",python,selection_mouse +2865,1575650,"genie.py",4229,13,"# shape (B, S",python,selection_mouse +2866,1575650,"genie.py",4229,14,"# shape (B, S,",python,selection_mouse +2867,1575650,"genie.py",4229,15,"# shape (B, S, ",python,selection_mouse +2868,1575650,"genie.py",4229,16,"# shape (B, S, N",python,selection_mouse +2869,1575651,"genie.py",4229,17,"# shape (B, S, N)",python,selection_mouse +2870,1575899,"TERMINAL",0,0,"2",,terminal_output +2871,1576938,"TERMINAL",0,0,"3",,terminal_output +2872,1578009,"TERMINAL",0,0,"4",,terminal_output +2873,1578699,"genie.py",3713,0,"",python,selection_mouse +2874,1578748,"genie.py",3712,0,"",python,selection_command +2875,1579038,"genie.py",3713,0,"",python,selection_command +2876,1579039,"TERMINAL",0,0,"5",,terminal_output +2877,1579224,"genie.py",3713,0," ",python,content +2878,1579225,"genie.py",3714,0,"",python,selection_keyboard +2879,1579999,"genie.py",3714,0,"# shape (B, S, N)",python,content +2880,1580119,"TERMINAL",0,0,"6",,terminal_output +2881,1580486,"genie.py",3730,0,"",python,selection_command +2882,1581107,"TERMINAL",0,0,"7",,terminal_output +2883,1582149,"TERMINAL",0,0,"9",,terminal_output +2884,1583195,"TERMINAL",0,0,"2:00",,terminal_output +2885,1584298,"TERMINAL",0,0,"1",,terminal_output +2886,1585327,"TERMINAL",0,0,"2",,terminal_output +2887,1586344,"TERMINAL",0,0,"3",,terminal_output +2888,1587349,"TERMINAL",0,0,"4",,terminal_output +2889,1588394,"TERMINAL",0,0,"5",,terminal_output +2890,1589493,"TERMINAL",0,0,"6",,terminal_output +2891,1590438,"genie.py",4368,0,"",python,selection_mouse +2892,1590522,"TERMINAL",0,0,"7",,terminal_output +2893,1591029,"genie.py",4597,0,"",python,selection_mouse +2894,1591043,"genie.py",4596,0,"",python,selection_command +2895,1591558,"TERMINAL",0,0,"8",,terminal_output +2896,1591713,"genie.py",4566,0,"",python,selection_mouse +2897,1591727,"genie.py",4565,0,"",python,selection_command +2898,1592242,"genie.py",4597,0,"",python,selection_mouse +2899,1592244,"genie.py",4596,0,"",python,selection_command +2900,1592562,"TERMINAL",0,0,"9",,terminal_output +2901,1593607,"TERMINAL",0,0,"10",,terminal_output +2902,1593947,"genie.py",4567,31,"",python,content +2903,1593991,"genie.py",4575,0,"",python,selection_command +2904,1594659,"TERMINAL",0,0,"1",,terminal_output +2905,1595268,"genie.py",4469,0,"",python,selection_mouse +2906,1595379,"genie.py",4467,14,"new_frame_idxs",python,selection_mouse +2907,1595688,"TERMINAL",0,0,"2",,terminal_output +2908,1596009,"genie.py",4444,0,"",python,selection_mouse +2909,1596159,"genie.py",4441,5,"batch",python,selection_mouse +2910,1596725,"TERMINAL",0,0,"3",,terminal_output +2911,1597771,"TERMINAL",0,0,"4",,terminal_output +2912,1598840,"TERMINAL",0,0,"5",,terminal_output +2913,1599864,"TERMINAL",0,0,"6",,terminal_output +2914,1600990,"TERMINAL",0,0,"7",,terminal_output +2915,1602011,"TERMINAL",0,0,"8",,terminal_output +2916,1602996,"TERMINAL",0,0,"9",,terminal_output +2917,1603088,"genie.py",7499,0,"",python,selection_mouse +2918,1603223,"genie.py",7490,16,"final_token_idxs",python,selection_mouse +2919,1604060,"TERMINAL",0,0,"20",,terminal_output +2920,1605090,"TERMINAL",0,0,"1",,terminal_output +2921,1606110,"TERMINAL",0,0,"2",,terminal_output +2922,1607131,"TERMINAL",0,0,"31",,terminal_output +2923,1608172,"TERMINAL",0,0,"5",,terminal_output +2924,1609208,"TERMINAL",0,0,"6",,terminal_output +2925,1610310,"TERMINAL",0,0,"7",,terminal_output +2926,1611315,"TERMINAL",0,0,"8",,terminal_output +2927,1611392,"genie.py",7495,0,"",python,selection_mouse +2928,1612367,"genie.py",4476,0,"",python,selection_mouse +2929,1612376,"TERMINAL",0,0,"9",,terminal_output +2930,1612490,"genie.py",4467,14,"new_frame_idxs",python,selection_mouse +2931,1613379,"TERMINAL",0,0,"30",,terminal_output +2932,1614127,"genie.py",4450,0,"",python,selection_mouse +2933,1614432,"TERMINAL",0,0,"1",,terminal_output +2934,1614801,"genie.py",4427,0,"",python,selection_command +2935,1615463,"TERMINAL",0,0,"2",,terminal_output +2936,1615485,"genie.py",4450,0,"",python,selection_command +2937,1615619,"genie.py",4476,0,"",python,selection_command +2938,1616532,"TERMINAL",0,0,"3",,terminal_output +2939,1617543,"TERMINAL",0,0,"4",,terminal_output +2940,1618603,"genie.py",4455,28,"",python,content +2941,1618668,"genie.py",4467,0,"",python,selection_command +2942,1618763,"TERMINAL",0,0,"5",,terminal_output +2943,1619768,"TERMINAL",0,0,"61",,terminal_output +2944,1620808,"TERMINAL",0,0,"7",,terminal_output +2945,1621777,"genie.py",4472,0,"",python,selection_mouse +2946,1621889,"TERMINAL",0,0,"8",,terminal_output +2947,1621921,"genie.py",4467,9,"init_mask",python,selection_mouse +2948,1622888,"TERMINAL",0,0,"9",,terminal_output +2949,1623924,"TERMINAL",0,0,"4010",,terminal_output +2950,1624491,"genie.py",7463,0,"",python,selection_mouse +2951,1624605,"genie.py",7462,16,"final_token_idxs",python,selection_mouse +2952,1624974,"TERMINAL",0,0,"1",,terminal_output +2953,1625305,"genie.py",7462,16,"",python,content +2954,1625822,"genie.py",7462,1,"",python,content +2955,1626014,"genie.py",7462,1,"",python,content +2956,1626026,"TERMINAL",0,0,"2",,terminal_output +2957,1627031,"genie.py",7464,0,"",python,selection_mouse +2958,1627051,"TERMINAL",0,0,"3",,terminal_output +2959,1627196,"genie.py",7462,4,"mask",python,selection_mouse +2960,1628110,"TERMINAL",0,0,"4",,terminal_output +2961,1628583,"genie.py",7472,0,"",python,selection_mouse +2962,1628718,"genie.py",7468,10,"token_idxs",python,selection_mouse +2963,1629139,"TERMINAL",0,0,"5",,terminal_output +2964,1629822,"genie.py",7724,0,"",python,selection_mouse +2965,1629828,"genie.py",7723,0,"",python,selection_command +2966,1630177,"TERMINAL",0,0,"7",,terminal_output +2967,1630363,"genie.py",7686,0,"",python,selection_mouse +2968,1630522,"genie.py",7685,16,"final_token_idxs",python,selection_mouse +2969,1631262,"genie.py",7692,0,"",python,selection_mouse +2970,1631262,"genie.py",7685,16,"final_token_idxs",python,selection_mouse +2971,1631315,"TERMINAL",0,0,"8",,terminal_output +2972,1632263,"genie.py",7465,0,"",python,selection_mouse +2973,1632296,"TERMINAL",0,0,"9",,terminal_output +2974,1633310,"TERMINAL",0,0,"50",,terminal_output +2975,1633724,"genie.py",7518,0,"",python,selection_mouse +2976,1633725,"genie.py",7517,0,"",python,selection_command +2977,1634344,"TERMINAL",0,0,"1",,terminal_output +2978,1634761,"genie.py",7466,51,", token_idxs, action_tokens = carry\n step = ",python,selection_mouse +2979,1634763,"genie.py",7466,52,", token_idxs, action_tokens = carry\n step = x",python,selection_command +2980,1635064,"genie.py",7466,0,"",python,selection_mouse +2981,1635497,"TERMINAL",0,0,"2",,terminal_output +2982,1635888,"genie.py",7462,0," ",python,content +2983,1635918,"genie.py",7462,0,"",python,selection_command +2984,1636381,"genie.py",7462,0,",",python,content +2985,1636401,"genie.py",7462,0,"",python,selection_command +2986,1636484,"TERMINAL",0,0,"3",,terminal_output +2987,1636853,"genie.py",7462,0,"final_token_idxs",python,content +2988,1636869,"genie.py",7462,0,"",python,selection_command +2989,1637476,"TERMINAL",0,0,"4",,terminal_output +2990,1638536,"TERMINAL",0,0,"51",,terminal_output +2991,1639594,"TERMINAL",0,0,"6",,terminal_output +2992,1640297,"genie.py",4454,0,"",python,selection_mouse +2993,1640311,"genie.py",4453,0,"",python,selection_command +2994,1640603,"TERMINAL",0,0,"7",,terminal_output +2995,1641406,"genie.py",4477,0,"",python,selection_mouse +2996,1641408,"genie.py",4476,0,"",python,selection_command +2997,1641641,"TERMINAL",0,0,"8",,terminal_output +2998,1642045,"genie.py",4454,0,"",python,selection_mouse +2999,1642062,"genie.py",4453,0,"",python,selection_command +3000,1642682,"TERMINAL",0,0,"9",,terminal_output +3001,1643505,"genie.py",4454,0,"\n ",python,content +3002,1643729,"TERMINAL",0,0,"3:00",,terminal_output +3003,1644330,"genie.py",4467,0,"t",python,content +3004,1644331,"genie.py",4468,0,"",python,selection_keyboard +3005,1644432,"genie.py",4468,0,"o",python,content +3006,1644433,"genie.py",4469,0,"",python,selection_keyboard +3007,1644483,"genie.py",4469,0,"k",python,content +3008,1644484,"genie.py",4470,0,"",python,selection_keyboard +3009,1644583,"genie.py",4470,0,"e",python,content +3010,1644585,"genie.py",4471,0,"",python,selection_keyboard +3011,1644717,"genie.py",4471,0,"n",python,content +3012,1644718,"genie.py",4472,0,"",python,selection_keyboard +3013,1644782,"TERMINAL",0,0,"1",,terminal_output +3014,1645383,"genie.py",4472,0,",",python,content +3015,1645383,"genie.py",4473,0,"",python,selection_keyboard +3016,1645850,"TERMINAL",0,0,"2",,terminal_output +3017,1646215,"genie.py",4472,1,"",python,content +3018,1646521,"genie.py",4472,0,"_",python,content +3019,1646522,"genie.py",4473,0,"",python,selection_keyboard +3020,1646784,"genie.py",4473,0,"i",python,content +3021,1646785,"genie.py",4474,0,"",python,selection_keyboard +3022,1646877,"TERMINAL",0,0,"3",,terminal_output +3023,1646919,"genie.py",4474,0,"d",python,content +3024,1646919,"genie.py",4475,0,"",python,selection_keyboard +3025,1647549,"genie.py",4475,0,"c",python,content +3026,1647550,"genie.py",4476,0,"",python,selection_keyboard +3027,1647701,"genie.py",4476,0,"s",python,content +3028,1647702,"genie.py",4477,0,"",python,selection_keyboard +3029,1647957,"TERMINAL",0,0,"42",,terminal_output +3030,1648269,"genie.py",4476,1,"",python,content +3031,1648362,"genie.py",4475,1,"",python,content +3032,1648843,"genie.py",4475,0,"x",python,content +3033,1648844,"genie.py",4476,0,"",python,selection_keyboard +3034,1648892,"genie.py",4476,0,"s",python,content +3035,1648893,"genie.py",4477,0,"",python,selection_keyboard +3036,1648963,"TERMINAL",0,0,"5",,terminal_output +3037,1649014,"genie.py",4477,0,",",python,content +3038,1649015,"genie.py",4478,0,"",python,selection_keyboard +3039,1649630,"genie.py",4477,0,"",python,selection_command +3040,1649991,"TERMINAL",0,0,"6",,terminal_output +3041,1650181,"genie.py",4472,0,"",python,selection_mouse +3042,1650299,"genie.py",4467,10,"token_idxs",python,selection_mouse +3043,1651064,"TERMINAL",0,0,"7",,terminal_output +3044,1651522,"genie.py",4496,0,"",python,selection_mouse +3045,1651659,"genie.py",4491,9,"init_mask",python,selection_mouse +3046,1652133,"TERMINAL",0,0,"8",,terminal_output +3047,1653112,"TERMINAL",0,0,"9",,terminal_output +3048,1654001,"genie.py",7508,0,"",python,selection_mouse +3049,1654154,"genie.py",7504,4,"mask",python,selection_mouse +3050,1654164,"TERMINAL",0,0,"11",,terminal_output +3051,1654704,"genie.py",7515,0,"",python,selection_mouse +3052,1654867,"genie.py",7510,10,"token_idxs",python,selection_mouse +3053,1655198,"TERMINAL",0,0,"2",,terminal_output +3054,1656288,"TERMINAL",0,0,"3",,terminal_output +3055,1657308,"TERMINAL",0,0,"4",,terminal_output +3056,1657530,"genie.py",7560,0,"",python,selection_mouse +3057,1657563,"genie.py",7559,0,"",python,selection_command +3058,1657882,"genie.py",7559,1,"x",python,selection_mouse +3059,1657882,"genie.py",7531,28,"kens = carry\n step = ",python,selection_mouse +3060,1657914,"genie.py",7560,0,"",python,selection_command +3061,1658054,"genie.py",7531,0,"",python,selection_mouse +3062,1658197,"genie.py",7522,13,"action_tokens",python,selection_mouse +3063,1658666,"TERMINAL",0,0,"50",,terminal_output +3064,1659674,"TERMINAL",0,0,"6",,terminal_output +3065,1660714,"TERMINAL",0,0,"7",,terminal_output +3066,1661760,"TERMINAL",0,0,"8",,terminal_output +3067,1662137,"genie.py",7558,0,"",python,selection_mouse +3068,1662644,"genie.py",7552,0,"",python,selection_mouse +3069,1662780,"genie.py",7552,4,"step",python,selection_mouse +3070,1662814,"TERMINAL",0,0,"9",,terminal_output +3071,1663579,"genie.py",7559,0,"",python,selection_mouse +3072,1663743,"genie.py",7559,1,"x",python,selection_mouse +3073,1663875,"TERMINAL",0,0,"20",,terminal_output +3074,1664889,"TERMINAL",0,0,"1",,terminal_output +3075,1665414,"genie.py",7555,0,"",python,selection_mouse +3076,1665497,"genie.py",7552,4,"step",python,selection_mouse +3077,1665910,"TERMINAL",0,0,"2",,terminal_output +3078,1666980,"TERMINAL",0,0,"3",,terminal_output +3079,1668009,"genie.py",7595,0,"",python,selection_mouse +3080,1668018,"TERMINAL",0,0,"4",,terminal_output +3081,1668558,"genie.py",7600,0,"",python,selection_mouse +3082,1669088,"TERMINAL",0,0,"5",,terminal_output +3083,1670111,"TERMINAL",0,0,"6",,terminal_output +3084,1670275,"genie.py",7572,0,"",python,selection_mouse +3085,1671138,"TERMINAL",0,0,"7",,terminal_output +3086,1671249,"genie.py",7573,0,"",python,selection_command +3087,1671642,"genie.py",7572,1,"",python,content +3088,1671950,"genie.py",7572,0,"S",python,content +3089,1671951,"genie.py",7573,0,"",python,selection_keyboard +3090,1672258,"TERMINAL",0,0,"9",,terminal_output +3091,1672533,"genie.py",7572,0,"",python,selection_command +3092,1673105,"genie.py",7790,0,"",python,selection_mouse +3093,1673201,"TERMINAL",0,0,"30",,terminal_output +3094,1674238,"TERMINAL",0,0,"1",,terminal_output +3095,1674470,"genie.py",7581,0,"",python,selection_mouse +3096,1675048,"genie.py",7592,0,"",python,selection_mouse +3097,1675197,"genie.py",7590,5,"shape",python,selection_mouse +3098,1675292,"TERMINAL",0,0,"2",,terminal_output +3099,1675756,"genie.py",7585,0,"",python,selection_mouse +3100,1675908,"genie.py",7579,10,"token_idxs",python,selection_mouse +3101,1676311,"TERMINAL",0,0,"3",,terminal_output +3102,1677392,"TERMINAL",0,0,"4",,terminal_output +3103,1678004,"genie.py",7774,0,"",python,selection_mouse +3104,1678395,"TERMINAL",0,0,"5",,terminal_output +3105,1678562,"genie.py",7766,0,"",python,selection_mouse +3106,1678565,"genie.py",7765,0,"",python,selection_command +3107,1679440,"TERMINAL",0,0,"6",,terminal_output +3108,1680555,"TERMINAL",0,0,"7",,terminal_output +3109,1681780,"TERMINAL",0,0,"8",,terminal_output +3110,1682875,"TERMINAL",0,0,"9",,terminal_output +3111,1683372,"genie.py",7764,0,"",python,selection_mouse +3112,1683873,"TERMINAL",0,0,"40",,terminal_output +3113,1683946,"genie.py",7766,0,"",python,selection_mouse +3114,1683961,"genie.py",7765,0,"",python,selection_command +3115,1684908,"TERMINAL",0,0,"1",,terminal_output +3116,1685985,"TERMINAL",0,0,"2",,terminal_output +3117,1687010,"TERMINAL",0,0,"3",,terminal_output +3118,1688029,"TERMINAL",0,0,"4",,terminal_output +3119,1689156,"TERMINAL",0,0,"5",,terminal_output +3120,1690180,"TERMINAL",0,0,"6",,terminal_output +3121,1691150,"TERMINAL",0,0,"8",,terminal_output +3122,1692192,"TERMINAL",0,0,"9",,terminal_output +3123,1693259,"TERMINAL",0,0,"50",,terminal_output +3124,1694277,"TERMINAL",0,0,"1",,terminal_output +3125,1695320,"TERMINAL",0,0,"2",,terminal_output +3126,1696425,"TERMINAL",0,0,"3",,terminal_output +3127,1697455,"TERMINAL",0,0,"4",,terminal_output +3128,1698474,"TERMINAL",0,0,"5",,terminal_output +3129,1699498,"TERMINAL",0,0,"6",,terminal_output +3130,1700628,"TERMINAL",0,0,"7",,terminal_output +3131,1701567,"TERMINAL",0,0,"8",,terminal_output +3132,1702609,"TERMINAL",0,0,"9",,terminal_output +3133,1703650,"TERMINAL",0,0,"4:00",,terminal_output +3134,1704691,"TERMINAL",0,0,"1",,terminal_output +3135,1705727,"TERMINAL",0,0,"2",,terminal_output +3136,1706872,"TERMINAL",0,0,"3",,terminal_output +3137,1707901,"TERMINAL",0,0,"4",,terminal_output +3138,1708920,"TERMINAL",0,0,"5",,terminal_output +3139,1709944,"TERMINAL",0,0,"6",,terminal_output +3140,1710938,"TERMINAL",0,0,"7",,terminal_output +3141,1711978,"TERMINAL",0,0,"8",,terminal_output +3142,1713015,"TERMINAL",0,0,"9",,terminal_output +3143,1714057,"TERMINAL",0,0,"10",,terminal_output +3144,1715107,"TERMINAL",0,0,"1",,terminal_output +3145,1716189,"TERMINAL",0,0,"3",,terminal_output +3146,1717214,"TERMINAL",0,0,"4",,terminal_output +3147,1718341,"TERMINAL",0,0,"5",,terminal_output +3148,1719365,"TERMINAL",0,0,"6",,terminal_output +3149,1720327,"TERMINAL",0,0,"7",,terminal_output +3150,1721358,"TERMINAL",0,0,"8",,terminal_output +3151,1722391,"TERMINAL",0,0,"9",,terminal_output +3152,1723440,"TERMINAL",0,0,"20",,terminal_output +3153,1724468,"TERMINAL",0,0,"1",,terminal_output +3154,1725615,"TERMINAL",0,0,"2",,terminal_output +3155,1726572,"TERMINAL",0,0,"3 90",,terminal_output +3156,1727592,"TERMINAL",0,0,"4",,terminal_output +3157,1728630,"TERMINAL",0,0,"5",,terminal_output +3158,1729665,"TERMINAL",0,0,"6",,terminal_output +3159,1730710,"TERMINAL",0,0,"7",,terminal_output +3160,1731798,"TERMINAL",0,0,"8",,terminal_output +3161,1732829,"TERMINAL",0,0,"9",,terminal_output +3162,1733852,"TERMINAL",0,0,"30",,terminal_output +3163,1734884,"TERMINAL",0,0,"11",,terminal_output +3164,1735955,"TERMINAL",0,0,"2",,terminal_output +3165,1736961,"TERMINAL",0,0,"3",,terminal_output +3166,1738002,"TERMINAL",0,0,"4",,terminal_output +3167,1739040,"TERMINAL",0,0,"5",,terminal_output +3168,1740075,"TERMINAL",0,0,"6",,terminal_output +3169,1741175,"TERMINAL",0,0,"7",,terminal_output +3170,1742153,"TERMINAL",0,0,"9",,terminal_output +3171,1743190,"TERMINAL",0,0,"40",,terminal_output +3172,1744250,"TERMINAL",0,0,"1",,terminal_output +3173,1745269,"TERMINAL",0,0,"2",,terminal_output +3174,1746304,"TERMINAL",0,0,"3",,terminal_output +3175,1747421,"TERMINAL",0,0,"42",,terminal_output +3176,1748389,"TERMINAL",0,0,"5",,terminal_output +3177,1749436,"TERMINAL",0,0,"6",,terminal_output +3178,1750494,"TERMINAL",0,0,"7",,terminal_output +3179,1751522,"TERMINAL",0,0,"8",,terminal_output +3180,1752585,"TERMINAL",0,0,"9",,terminal_output +3181,1753572,"TERMINAL",0,0,"50",,terminal_output +3182,1754616,"TERMINAL",0,0,"1",,terminal_output +3183,1755656,"TERMINAL",0,0,"2",,terminal_output +3184,1756704,"TERMINAL",0,0,"3",,terminal_output +3185,1757745,"TERMINAL",0,0,"4",,terminal_output +3186,1758893,"TERMINAL",0,0,"5",,terminal_output +3187,1759823,"TERMINAL",0,0,"6",,terminal_output +3188,1760867,"TERMINAL",0,0,"7",,terminal_output +3189,1761916,"TERMINAL",0,0,"8",,terminal_output +3190,1762991,"TERMINAL",0,0,"9",,terminal_output +3191,1763998,"TERMINAL",0,0,"5:00",,terminal_output +3192,1765044,"TERMINAL",0,0,"1",,terminal_output +3193,1766077,"TERMINAL",0,0,"2",,terminal_output +3194,1767121,"TERMINAL",0,0,"3",,terminal_output +3195,1768160,"TERMINAL",0,0,"5",,terminal_output +3196,1769241,"TERMINAL",0,0,"6",,terminal_output +3197,1770260,"TERMINAL",0,0,"7",,terminal_output +3198,1771423,"TERMINAL",0,0,"8",,terminal_output +3199,1772383,"TERMINAL",0,0,"9",,terminal_output +3200,1773379,"TERMINAL",0,0,"10",,terminal_output +3201,1774423,"TERMINAL",0,0,"1",,terminal_output +3202,1775468,"TERMINAL",0,0,"2",,terminal_output +3203,1776499,"TERMINAL",0,0,"3",,terminal_output +3204,1777630,"TERMINAL",0,0,"4",,terminal_output +3205,1778594,"TERMINAL",0,0,"5",,terminal_output +3206,1779632,"TERMINAL",0,0,"60",,terminal_output +3207,1780672,"TERMINAL",0,0,"7",,terminal_output +3208,1781713,"TERMINAL",0,0,"8",,terminal_output +3209,1782746,"TERMINAL",0,0,"9",,terminal_output +3210,1783789,"TERMINAL",0,0,"20",,terminal_output +3211,1784837,"TERMINAL",0,0,"1",,terminal_output +3212,1785925,"TERMINAL",0,0,"2",,terminal_output +3213,1786954,"TERMINAL",0,0,"3",,terminal_output +3214,1787983,"TERMINAL",0,0,"4",,terminal_output +3215,1788998,"TERMINAL",0,0,"5",,terminal_output +3216,1790123,"TERMINAL",0,0,"6",,terminal_output +3217,1791077,"TERMINAL",0,0,"7",,terminal_output +3218,1792173,"TERMINAL",0,0,"8",,terminal_output +3219,1793161,"TERMINAL",0,0,"30",,terminal_output +3220,1794323,"TERMINAL",0,0,"126",,terminal_output +3221,1795263,"TERMINAL",0,0,"2",,terminal_output +3222,1796302,"TERMINAL",0,0,"3",,terminal_output +3223,1797493,"TERMINAL",0,0,"454",,terminal_output +3224,1798422,"TERMINAL",0,0,"5",,terminal_output +3225,1799442,"TERMINAL",0,0,"6",,terminal_output +3226,1800571,"TERMINAL",0,0,"7",,terminal_output +3227,1801508,"TERMINAL",0,0,"8",,terminal_output +3228,1802556,"TERMINAL",0,0,"9",,terminal_output +3229,1803641,"TERMINAL",0,0,"40",,terminal_output +3230,1804625,"TERMINAL",0,0,"1",,terminal_output +3231,1805662,"TERMINAL",0,0,"21",,terminal_output +3232,1806702,"TERMINAL",0,0,"3",,terminal_output +3233,1807759,"TERMINAL",0,0,"4",,terminal_output +3234,1808787,"TERMINAL",0,0,"5",,terminal_output +3235,1809822,"TERMINAL",0,0,"6",,terminal_output +3236,1810863,"TERMINAL",0,0,"7",,terminal_output +3237,1811938,"TERMINAL",0,0,"8",,terminal_output +3238,1812979,"TERMINAL",0,0,"9",,terminal_output +3239,1813982,"TERMINAL",0,0,"50",,terminal_output +3240,1815143,"TERMINAL",0,0,"1",,terminal_output +3241,1816251,"TERMINAL",0,0,"2",,terminal_output +3242,1817108,"TERMINAL",0,0,"3",,terminal_output +3243,1817995,"genie.py",7495,0,"",python,selection_mouse +3244,1818135,"genie.py",7486,16,"final_token_idxs",python,selection_mouse +3245,1818170,"TERMINAL",0,0,"5",,terminal_output +3246,1819210,"TERMINAL",0,0,"6",,terminal_output +3247,1820212,"genie.py",7702,0,"",python,selection_mouse +3248,1820268,"TERMINAL",0,0,"7",,terminal_output +3249,1820714,"genie.py",7766,0,"",python,selection_mouse +3250,1820734,"genie.py",7765,0,"",python,selection_command +3251,1820863,"genie.py",7766,0,"",python,selection_mouse +3252,1820866,"genie.py",7765,0,"",python,selection_command +3253,1821048,"genie.py",7765,1,")",python,selection_mouse +3254,1821079,"genie.py",7766,0,"",python,selection_command +3255,1821182,"genie.py",7697,69," (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )",python,selection_mouse +3256,1821183,"genie.py",7696,70," (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )",python,selection_mouse +3257,1821288,"TERMINAL",0,0,"8",,terminal_output +3258,1821597,"genie.py",7696,0,"",python,selection_mouse +3259,1822145,"genie.py",7734,0,"",python,selection_mouse +3260,1822265,"genie.py",7727,16,"final_token_idxs",python,selection_mouse +3261,1822327,"TERMINAL",0,0,"9",,terminal_output +3262,1823348,"genie.py",7706,0,"",python,selection_mouse +3263,1823377,"TERMINAL",0,0,"6:00",,terminal_output +3264,1824050,"genie.py",7766,0,"",python,selection_mouse +3265,1824053,"genie.py",7765,0,"",python,selection_command +3266,1824193,"genie.py",7766,0,"",python,selection_mouse +3267,1824205,"genie.py",7765,0,"",python,selection_command +3268,1824383,"TERMINAL",0,0,"1",,terminal_output +3269,1824791,"genie.py",7705,0,"",python,selection_mouse +3270,1824941,"genie.py",7699,10,"token_idxs",python,selection_mouse +3271,1825458,"TERMINAL",0,0,"2",,terminal_output +3272,1826081,"genie.py",7787,0,"",python,selection_mouse +3273,1826476,"TERMINAL",0,0,"3",,terminal_output +3274,1826862,"genie.py",7663,0,"",python,selection_mouse +3275,1827507,"TERMINAL",0,0,"4",,terminal_output +3276,1828130,"genie.py",7669,0,"",python,selection_mouse +3277,1828463,"genie.py",7669,1,"j",python,selection_mouse +3278,1828463,"genie.py",7669,44,"jnp.concatenate(\n (token_idxs, jn",python,selection_mouse +3279,1828464,"genie.py",7669,45,"jnp.concatenate(\n (token_idxs, jnp",python,selection_mouse +3280,1828509,"genie.py",7669,97,"jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )",python,selection_mouse +3281,1828609,"TERMINAL",0,0,"5",,terminal_output +3282,1829649,"TERMINAL",0,0,"6",,terminal_output +3283,1830157,"genie.py",7669,97,"",python,content +3284,1830192,"genie.py",7668,0,"",python,selection_command +3285,1830259,"genie.py",7626,0,"",python,selection_command +3286,1830627,"TERMINAL",0,0,"75",,terminal_output +3287,1830850,"genie.py",7668,0,"",python,selection_command +3288,1831680,"TERMINAL",0,0,"8",,terminal_output +3289,1831829,"genie.py",7669,0,"",python,selection_command +3290,1832641,"genie.py",7669,0,"t",python,content +3291,1832642,"genie.py",7670,0,"",python,selection_keyboard +3292,1832757,"genie.py",7670,0,"o",python,content +3293,1832758,"genie.py",7671,0,"",python,selection_keyboard +3294,1832758,"TERMINAL",0,0,"9",,terminal_output +3295,1832804,"genie.py",7671,0,"k",python,content +3296,1832804,"genie.py",7672,0,"",python,selection_keyboard +3297,1832910,"genie.py",7672,0,"e",python,content +3298,1832911,"genie.py",7673,0,"",python,selection_keyboard +3299,1833052,"genie.py",7673,0,"n",python,content +3300,1833053,"genie.py",7674,0,"",python,selection_keyboard +3301,1833687,"genie.py",7669,5,"token_idxs",python,content +3302,1833750,"TERMINAL",0,0,"10",,terminal_output +3303,1834203,"genie.py",7678,0,"",python,selection_command +3304,1834793,"TERMINAL",0,0,"1",,terminal_output +3305,1835218,"genie.py",7674,0,"",python,selection_mouse +3306,1835355,"genie.py",7669,10,"token_idxs",python,selection_mouse +3307,1835825,"TERMINAL",0,0,"2",,terminal_output +3308,1836037,"genie.py",7660,0,"",python,selection_mouse +3309,1836183,"genie.py",7652,14,"vid_token_idxs",python,selection_mouse +3310,1836860,"TERMINAL",0,0,"3",,terminal_output +3311,1837938,"TERMINAL",0,0,"4",,terminal_output +3312,1838954,"TERMINAL",0,0,"5",,terminal_output +3313,1839991,"TERMINAL",0,0,"6",,terminal_output +3314,1841031,"TERMINAL",0,0,"710",,terminal_output +3315,1841771,"genie.py",9003,0,"",python,selection_mouse +3316,1842148,"TERMINAL",0,0,"8",,terminal_output +3317,1842340,"genie.py",9196,0,"",python,selection_mouse +3318,1842478,"genie.py",9196,6,"arange",python,selection_mouse +3319,1843115,"genie.py",9494,0,"",python,selection_mouse +3320,1843148,"TERMINAL",0,0,"9",,terminal_output +3321,1843274,"genie.py",9491,14,"new_token_idxs",python,selection_mouse +3322,1844147,"TERMINAL",0,0,"21",,terminal_output +3323,1845220,"TERMINAL",0,0,"2",,terminal_output +3324,1846239,"TERMINAL",0,0,"3",,terminal_output +3325,1846588,"genie.py",7498,0,"",python,selection_mouse +3326,1846725,"genie.py",7486,16,"final_token_idxs",python,selection_mouse +3327,1847271,"TERMINAL",0,0,"4",,terminal_output +3328,1848391,"TERMINAL",0,0,"5",,terminal_output +3329,1849360,"TERMINAL",0,0,"6",,terminal_output +3330,1850399,"TERMINAL",0,0,"7",,terminal_output +3331,1851461,"TERMINAL",0,0,"8",,terminal_output +3332,1852473,"TERMINAL",0,0,"9",,terminal_output +3333,1853612,"TERMINAL",0,0,"30",,terminal_output +3334,1854641,"TERMINAL",0,0,"1",,terminal_output +3335,1855594,"TERMINAL",0,0,"21",,terminal_output +3336,1856633,"TERMINAL",0,0,"3",,terminal_output +3337,1857676,"TERMINAL",0,0,"4",,terminal_output +3338,1858725,"TERMINAL",0,0,"5",,terminal_output +3339,1859763,"TERMINAL",0,0,"6",,terminal_output +3340,1860798,"TERMINAL",0,0,"7",,terminal_output +3341,1861860,"TERMINAL",0,0,"8",,terminal_output +3342,1862304,"genie.py",7489,0,"",python,selection_mouse +3343,1862882,"TERMINAL",0,0,"9",,terminal_output +3344,1863957,"TERMINAL",0,0,"40",,terminal_output +3345,1864159,"genie.py",7487,0,"",python,selection_mouse +3346,1864978,"TERMINAL",0,0,"1",,terminal_output +3347,1865152,"genie.py",7486,0,"",python,selection_mouse +3348,1866106,"TERMINAL",0,0,"2",,terminal_output +3349,1866437,"genie.py",9044,0,"",python,selection_mouse +3350,1867047,"TERMINAL",0,0,"3",,terminal_output +3351,1867118,"genie.py",9044,1,"",python,content +3352,1867261,"genie.py",9044,1,"",python,content +3353,1867469,"genie.py",9044,1,"",python,content +3354,1867621,"genie.py",9044,1,"",python,content +3355,1867798,"genie.py",9044,1,"",python,content +3356,1867992,"genie.py",9044,1,"",python,content +3357,1868147,"TERMINAL",0,0,"4",,terminal_output +3358,1869133,"TERMINAL",0,0,"5",,terminal_output +3359,1869239,"genie.py",7486,0,"",python,selection_mouse +3360,1869886,"genie.py",7486,16,"",python,content +3361,1870202,"TERMINAL",0,0,"7",,terminal_output +3362,1870223,"genie.py",7486,1,"",python,content +3363,1870487,"genie.py",7486,1,"",python,content +3364,1871225,"TERMINAL",0,0,"8",,terminal_output +3365,1872396,"TERMINAL",0,0,"9",,terminal_output +3366,1873282,"TERMINAL",0,0,"50",,terminal_output +3367,1873851,"genie.py",4472,0,"",python,selection_mouse +3368,1874424,"TERMINAL",0,0,"1",,terminal_output +3369,1874910,"genie.py",4455,24,"",python,content +3370,1874993,"genie.py",4467,0,"",python,selection_command +3371,1875413,"TERMINAL",0,0,"2",,terminal_output +3372,1876407,"TERMINAL",0,0,"3",,terminal_output +3373,1877498,"TERMINAL",0,0,"4",,terminal_output +3374,1878505,"TERMINAL",0,0,"5",,terminal_output +3375,1879601,"TERMINAL",0,0,"6",,terminal_output +3376,1880537,"genie.py",7846,0,"",python,selection_mouse +3377,1880587,"TERMINAL",0,0,"7",,terminal_output +3378,1881048,"genie.py",7810,0,"",python,selection_mouse +3379,1881609,"TERMINAL",0,0,"8",,terminal_output +3380,1881777,"genie.py",7838,0,"",python,selection_mouse +3381,1882294,"genie.py",7840,0,"",python,selection_mouse +3382,1882647,"TERMINAL",0,0,"9",,terminal_output +3383,1883723,"TERMINAL",0,0,"7:00",,terminal_output +3384,1884743,"TERMINAL",0,0,"1",,terminal_output +3385,1885773,"TERMINAL",0,0,"2",,terminal_output +3386,1886812,"TERMINAL",0,0,"3",,terminal_output +3387,1887840,"TERMINAL",0,0,"4",,terminal_output +3388,1888876,"TERMINAL",0,0,"5",,terminal_output +3389,1889235,"genie.py",7771,0,"",python,selection_mouse +3390,1889525,"genie.py",7771,1,",",python,selection_mouse +3391,1889611,"genie.py",7771,2,", ",python,selection_mouse +3392,1889612,"genie.py",7771,3,", -",python,selection_mouse +3393,1889968,"TERMINAL",0,0,"6",,terminal_output +3394,1890039,"genie.py",7771,4,", -1",python,selection_mouse +3395,1890957,"TERMINAL",0,0,"7",,terminal_output +3396,1891102,"genie.py",7771,4,"",python,content +3397,1892020,"TERMINAL",0,0,"8",,terminal_output +3398,1892471,"genie.py",7806,0,"",python,selection_command +3399,1892651,"genie.py",7843,0,"",python,selection_command +3400,1893037,"TERMINAL",0,0,"9",,terminal_output +3401,1893063,"genie.py",7842,0,"",python,selection_command +3402,1893267,"genie.py",7841,0,"",python,selection_command +3403,1893438,"genie.py",7840,0,"",python,selection_command +3404,1893576,"genie.py",7839,0,"",python,selection_command +3405,1893713,"genie.py",7838,0,"",python,selection_command +3406,1893830,"genie.py",7837,0,"",python,selection_command +3407,1893964,"genie.py",7836,0,"",python,selection_command +3408,1894095,"TERMINAL",0,0,"10",,terminal_output +3409,1894186,"genie.py",7836,1,"",python,content +3410,1894366,"genie.py",7836,1,"",python,content +3411,1894531,"genie.py",7836,1,"",python,content +3412,1894716,"genie.py",7836,1,"",python,content +3413,1894882,"genie.py",7836,1,"",python,content +3414,1895046,"genie.py",7836,1,"",python,content +3415,1895127,"TERMINAL",0,0,"11",,terminal_output +3416,1895756,"genie.py",7836,1,"",python,content +3417,1896160,"TERMINAL",0,0,"3",,terminal_output +3418,1897204,"TERMINAL",0,0,"4",,terminal_output +3419,1898246,"TERMINAL",0,0,"56",,terminal_output +3420,1899486,"TERMINAL",0,0,"6",,terminal_output +3421,1900449,"TERMINAL",0,0,"71100",,terminal_output +3422,1901298,"genie.py",7847,0,"",python,selection_mouse +3423,1901327,"genie.py",7846,0,"",python,selection_command +3424,1901385,"TERMINAL",0,0,"8",,terminal_output +3425,1902404,"TERMINAL",0,0,"9",,terminal_output +3426,1902979,"genie.py",7856,0,"",python,selection_command +3427,1903440,"TERMINAL",0,0,"20",,terminal_output +3428,1904491,"TERMINAL",0,0,"1",,terminal_output +3429,1905631,"TERMINAL",0,0,"2",,terminal_output +3430,1906599,"TERMINAL",0,0,"3",,terminal_output +3431,1907630,"TERMINAL",0,0,"4",,terminal_output +3432,1908705,"TERMINAL",0,0,"5",,terminal_output +3433,1909729,"TERMINAL",0,0,"6",,terminal_output +3434,1910066,"genie.py",7846,0,"",python,selection_command +3435,1910544,"genie.py",7847,0,"\n ",python,content +3436,1910756,"TERMINAL",0,0,"7",,terminal_output +3437,1911455,"genie.py",7856,0,"j",python,content +3438,1911456,"genie.py",7857,0,"",python,selection_keyboard +3439,1911477,"genie.py",7857,0,"a",python,content +3440,1911478,"genie.py",7858,0,"",python,selection_keyboard +3441,1911670,"genie.py",7858,0,"x",python,content +3442,1911671,"genie.py",7859,0,"",python,selection_keyboard +3443,1911775,"genie.py",7859,0,".",python,content +3444,1911775,"genie.py",7860,0,"",python,selection_keyboard +3445,1911826,"TERMINAL",0,0,"8",,terminal_output +3446,1912120,"genie.py",7860,0,"d",python,content +3447,1912121,"genie.py",7861,0,"",python,selection_keyboard +3448,1912270,"genie.py",7861,0,"e",python,content +3449,1912271,"genie.py",7862,0,"",python,selection_keyboard +3450,1912374,"genie.py",7862,0,"b",python,content +3451,1912375,"genie.py",7863,0,"",python,selection_keyboard +3452,1912478,"genie.py",7863,0,"u",python,content +3453,1912478,"genie.py",7864,0,"",python,selection_keyboard +3454,1912586,"genie.py",7864,0,"g",python,content +3455,1912587,"genie.py",7865,0,"",python,selection_keyboard +3456,1912736,"genie.py",7865,0,".",python,content +3457,1912736,"genie.py",7866,0,"",python,selection_keyboard +3458,1912868,"TERMINAL",0,0,"9",,terminal_output +3459,1912939,"genie.py",7866,0,"b",python,content +3460,1912940,"genie.py",7867,0,"",python,selection_keyboard +3461,1913007,"genie.py",7867,0,"r",python,content +3462,1913007,"genie.py",7868,0,"",python,selection_keyboard +3463,1913168,"genie.py",7868,0,"e",python,content +3464,1913168,"genie.py",7869,0,"",python,selection_keyboard +3465,1913318,"genie.py",7869,0,"a",python,content +3466,1913319,"genie.py",7870,0,"",python,selection_keyboard +3467,1913441,"genie.py",7870,0,"k",python,content +3468,1913442,"genie.py",7871,0,"",python,selection_keyboard +3469,1913705,"genie.py",7866,5,"breakpoint",python,content +3470,1913901,"TERMINAL",0,0,"30",,terminal_output +3471,1914403,"genie.py",7876,0,"()",python,content +3472,1914404,"genie.py",7877,0,"",python,selection_keyboard +3473,1914472,"genie.py",7877,1,")",python,content +3474,1914472,"genie.py",7878,0,"",python,selection_keyboard +3475,1914532,"genie.py",7877,0,"",python,selection_command +3476,1914927,"TERMINAL",0,0,"1",,terminal_output +3477,1915944,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3478,1915962,"TERMINAL",0,0,"2",,terminal_output +3479,1916300,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3480,1916445,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3481,1917049,"TERMINAL",0,0,"3",,terminal_output +3482,1918028,"TERMINAL",0,0,"4",,terminal_output +3483,1919072,"TERMINAL",0,0,"5",,terminal_output +3484,1920111,"TERMINAL",0,0,"6",,terminal_output +3485,1920335,"TERMINAL",0,0,"2025-07-03 16:47:37.238103: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3486,1921198,"TERMINAL",0,0,"8",,terminal_output +3487,1922222,"TERMINAL",0,0,"9",,terminal_output +3488,1923251,"TERMINAL",0,0,"40",,terminal_output +3489,1923493,"TERMINAL",0,0,"watch",,terminal_focus +3490,1923947,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +3491,1924776,"TERMINAL",0,0,"2025-07-03 16:47:41.632949: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3492,1925058,"TERMINAL",0,0,"queue",,terminal_command +3493,1925110,"TERMINAL",0,0,"]633;E;2025-07-03 16:47:41 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C",,terminal_output +3494,1925187,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 16:47:41 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3315644 accelerat interact tum_cte0 R30:35\t 1 hkn0704",,terminal_output +3495,1926127,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +3496,1933906,"TERMINAL",0,0,"2025-07-03 16:47:50.804416: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3497,1941989,"TERMINAL",0,0,"2025-07-03 16:47:58.782158: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3498,1948642,"TERMINAL",0,0,"2025-07-03 16:48:05.483272: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3499,1955441,"TERMINAL",0,0,"2025-07-03 16:48:12.300212: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3500,1958897,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3501,1967074,"TERMINAL",0,0,"2025-07-03 16:48:23.975684: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3502,1970354,"TERMINAL",0,0,"2025-07-03 16:48:27.194147: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3503,1978955,"TERMINAL",0,0,"2025-07-03 16:48:35.847392: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3504,1980726,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +3505,1980869,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3506,1981040,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 239, in __call__\r\n jnp.expand_dims(mask),\r\nTypeError: expand_dims() missing 1 required positional argument: 'axis'\r\n",,terminal_output +3507,1982437,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3508,2023374,"genie.py",0,0,"",python,tab +3509,2023375,"genie.py",7847,0,"",python,selection_mouse +3510,2023448,"genie.py",7846,0,"",python,selection_command +3511,2025295,"genie.py",7764,0,"",python,selection_mouse +3512,2026012,"genie.py",7754,0,"",python,selection_mouse +3513,2026814,"genie.py",7753,0,"",python,selection_command +3514,2026943,"genie.py",7752,0,"",python,selection_command +3515,2027428,"genie.py",7751,0,"",python,selection_command +3516,2027794,"genie.py",7754,0,"",python,selection_command +3517,2028392,"genie.py",7753,1,"",python,content +3518,2028507,"genie.py",7752,0,"",python,selection_command +3519,2028712,"genie.py",7751,0,"",python,selection_command +3520,2029618,"genie.py",7751,2,"",python,content +3521,2029905,"genie.py",7751,1,"",python,content +3522,2030371,"genie.py",7751,11,"",python,content +3523,2031628,"genie.py",7751,1,"",python,content +3524,2032044,"genie.py",7752,0,"",python,selection_command +3525,2032234,"genie.py",7753,0,"",python,selection_command +3526,2032323,"genie.py",7754,0,"",python,selection_command +3527,2032426,"genie.py",7755,0,"",python,selection_command +3528,2032565,"genie.py",7756,0,"",python,selection_command +3529,2032703,"genie.py",7756,1,"",python,content +3530,2032703,"genie.py",7755,0,"",python,selection_command +3531,2033266,"genie.py",7755,1,"",python,content +3532,2033278,"genie.py",7754,0,"",python,selection_command +3533,2033782,"genie.py",7755,0,"",python,selection_command +3534,2034261,"genie.py",7754,0,"",python,selection_command +3535,2035301,"genie.py",7755,0,"",python,selection_command +3536,2035415,"genie.py",7755,0,",",python,content +3537,2035415,"genie.py",7756,0,"",python,selection_keyboard +3538,2035551,"genie.py",7755,0,"",python,selection_command +3539,2037347,"TERMINAL",0,0,"srun",,terminal_focus +3540,2037608,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3541,2038787,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3542,2038840,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3543,2041826,"TERMINAL",0,0,"2025-07-03 16:49:38.680731: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3544,2042356,"genie.py",0,0,"",python,tab +3545,2042357,"genie.py",7797,0,"",python,selection_mouse +3546,2042435,"genie.py",7796,0,"",python,selection_command +3547,2045943,"TERMINAL",0,0,"2025-07-03 16:49:42.840510: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3548,2055108,"TERMINAL",0,0,"2025-07-03 16:49:52.009886: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3549,2063451,"TERMINAL",0,0,"2025-07-03 16:50:00.335050: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3550,2070088,"TERMINAL",0,0,"2025-07-03 16:50:06.935147: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3551,2076542,"TERMINAL",0,0,"2025-07-03 16:50:13.380208: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3552,2080021,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3553,2088174,"TERMINAL",0,0,"2025-07-03 16:50:25.072698: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3554,2091108,"TERMINAL",0,0,"2025-07-03 16:50:27.991875: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3555,2099171,"TERMINAL",0,0,"2025-07-03 16:50:35.975863: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3556,2100615,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +3557,2100723,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +3558,2100869,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3559,2101196,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 1, 6, 920), (1, 1, 1, 128), (1, 6, 920, 128).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 196, in broadcast_shapes\r\n return _broadcast_shapes_cached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 294, in wrapper\r\n return cached(config.trace_context() if trace_context_in_key else _ignore(),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 288, in cached\r\n return f(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 202, in _broadcast_shapes_cached\r\n return _broadcast_shapes_uncached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 221, in _broadcast_shapes_uncached\r\n raise ValueError(f""Incompatible shapes for broadcasting: shapes={list(shapes)}"") from err\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 6, 920), (1, 1, 128), (1, 6, 920, 128)]\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 1, 6, 920), (1, 1, 1, 128), (1, 6, 920, 128).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 238, in __call__\r\n curr_masked_frame = jnp.where(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 2850, in where\r\n return util._where(condition, x, y)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 311, in _where\r\n condition, x_arr, y_arr = _broadcast_arrays(condition, x, y)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 264, in _broadcast_arrays\r\n result_shape = lax.broadcast_shapes(*shapes)\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 6, 920), (1, 1, 128), (1, 6, 920, 128)]\r\n",,terminal_output +3560,2102554,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3561,2182372,"genie.py",0,0,"",python,tab +3562,2182374,"genie.py",7756,0,"",python,selection_mouse +3563,2182424,"genie.py",7755,0,"",python,selection_command +3564,2183414,"genie.py",7756,0,"",python,selection_command +3565,2183926,"genie.py",7756,0," ",python,content +3566,2183927,"genie.py",7757,0,"",python,selection_keyboard +3567,2184062,"genie.py",7757,0,"#",python,content +3568,2184063,"genie.py",7758,0,"",python,selection_keyboard +3569,2184136,"genie.py",7758,0," ",python,content +3570,2184137,"genie.py",7759,0,"",python,selection_keyboard +3571,2184675,"genie.py",7759,0,"s",python,content +3572,2184676,"genie.py",7760,0,"",python,selection_keyboard +3573,2184721,"genie.py",7760,0,"h",python,content +3574,2184721,"genie.py",7761,0,"",python,selection_keyboard +3575,2184847,"genie.py",7761,0,"a",python,content +3576,2184847,"genie.py",7762,0,"",python,selection_keyboard +3577,2184942,"genie.py",7762,0,"p",python,content +3578,2184943,"genie.py",7763,0,"",python,selection_keyboard +3579,2185058,"genie.py",7763,0,"e",python,content +3580,2185059,"genie.py",7764,0,"",python,selection_keyboard +3581,2185830,"genie.py",7764,0," ",python,content +3582,2185831,"genie.py",7765,0,"",python,selection_keyboard +3583,2190132,"genie.py",7765,0,"()",python,content +3584,2190133,"genie.py",7766,0,"",python,selection_keyboard +3585,2190446,"genie.py",7766,0,"B",python,content +3586,2190447,"genie.py",7767,0,"",python,selection_keyboard +3587,2190818,"genie.py",7767,0,",",python,content +3588,2190819,"genie.py",7768,0,"",python,selection_keyboard +3589,2190867,"genie.py",7768,0," ",python,content +3590,2190868,"genie.py",7769,0,"",python,selection_keyboard +3591,2192739,"genie.py",7769,0,"S",python,content +3592,2192740,"genie.py",7770,0,"",python,selection_keyboard +3593,2192911,"genie.py",7770,0,",",python,content +3594,2192912,"genie.py",7771,0,"",python,selection_keyboard +3595,2193042,"genie.py",7771,0," ",python,content +3596,2193043,"genie.py",7772,0,"",python,selection_keyboard +3597,2193332,"genie.py",7772,0,"N",python,content +3598,2193333,"genie.py",7773,0,"",python,selection_keyboard +3599,2195194,"genie.py",7815,0,"",python,selection_mouse +3600,2196328,"genie.py",7815,0,"#",python,content +3601,2196329,"genie.py",7816,0,"",python,selection_keyboard +3602,2196408,"genie.py",7816,0," ",python,content +3603,2196409,"genie.py",7817,0,"",python,selection_keyboard +3604,2197208,"genie.py",7817,0,"()",python,content +3605,2197209,"genie.py",7818,0,"",python,selection_keyboard +3606,2197523,"genie.py",7818,0,"S",python,content +3607,2197524,"genie.py",7819,0,"",python,selection_keyboard +3608,2198678,"genie.py",7818,1,"",python,content +3609,2200385,"genie.py",7818,0,"B",python,content +3610,2200386,"genie.py",7819,0,"",python,selection_keyboard +3611,2201166,"genie.py",7819,0,",",python,content +3612,2201166,"genie.py",7820,0,"",python,selection_keyboard +3613,2201283,"genie.py",7820,0," ",python,content +3614,2201284,"genie.py",7821,0,"",python,selection_keyboard +3615,2202369,"genie.py",7821,0,"1",python,content +3616,2202370,"genie.py",7822,0,"",python,selection_keyboard +3617,2202512,"genie.py",7822,0,",",python,content +3618,2202513,"genie.py",7823,0,"",python,selection_keyboard +3619,2202565,"genie.py",7823,0," ",python,content +3620,2202566,"genie.py",7824,0,"",python,selection_keyboard +3621,2206040,"genie.py",7824,0,"L",python,content +3622,2206041,"genie.py",7825,0,"",python,selection_keyboard +3623,2211907,"genie.py",7824,1,"",python,content +3624,2212983,"genie.py",7824,0,"D",python,content +3625,2212983,"genie.py",7825,0,"",python,selection_keyboard +3626,2214392,"genie.py",7849,0,"",python,selection_mouse +3627,2215273,"genie.py",7849,0," ",python,content +3628,2215274,"genie.py",7850,0,"",python,selection_keyboard +3629,2216133,"genie.py",7850,0,"()",python,content +3630,2216134,"genie.py",7851,0,"",python,selection_keyboard +3631,2216785,"genie.py",7850,0,"",python,selection_command +3632,2217614,"genie.py",7850,0,"#",python,content +3633,2217615,"genie.py",7851,0,"",python,selection_keyboard +3634,2217912,"genie.py",7851,0," ",python,content +3635,2217913,"genie.py",7852,0,"",python,selection_keyboard +3636,2218373,"genie.py",7851,0,"",python,selection_command +3637,2218710,"genie.py",7852,0,"",python,selection_command +3638,2218840,"genie.py",7853,0,"",python,selection_command +3639,2220454,"genie.py",7853,0,"B",python,content +3640,2220455,"genie.py",7854,0,"",python,selection_keyboard +3641,2220806,"genie.py",7854,0,",",python,content +3642,2220807,"genie.py",7855,0,"",python,selection_keyboard +3643,2220896,"genie.py",7855,0," ",python,content +3644,2220897,"genie.py",7856,0,"",python,selection_keyboard +3645,2221360,"genie.py",7856,0,"S",python,content +3646,2221360,"genie.py",7857,0,"",python,selection_keyboard +3647,2221576,"genie.py",7857,0,",",python,content +3648,2221577,"genie.py",7858,0,"",python,selection_keyboard +3649,2221731,"genie.py",7858,0," ",python,content +3650,2221732,"genie.py",7859,0,"",python,selection_keyboard +3651,2223019,"genie.py",7859,0,"N",python,content +3652,2223020,"genie.py",7860,0,"",python,selection_keyboard +3653,2223278,"genie.py",7860,0,",",python,content +3654,2223278,"genie.py",7861,0,"",python,selection_keyboard +3655,2223344,"genie.py",7861,0," ",python,content +3656,2223345,"genie.py",7862,0,"",python,selection_keyboard +3657,2224249,"genie.py",7862,0,"D",python,content +3658,2224250,"genie.py",7863,0,"",python,selection_keyboard +3659,2224929,"genie.py",7862,0,"",python,selection_command +3660,2225404,"genie.py",7864,0,"",python,selection_mouse +3661,2225412,"genie.py",7863,0,"",python,selection_command +3662,2227154,"genie.py",7874,0,"",python,selection_mouse +3663,2227169,"genie.py",7873,0,"",python,selection_command +3664,2227457,"genie.py",7874,0,"",python,selection_mouse +3665,2227460,"genie.py",7873,0,"",python,selection_command +3666,2227660,"genie.py",7873,1,")",python,selection_mouse +3667,2227661,"genie.py",7872,1," ",python,selection_mouse +3668,2227661,"genie.py",7869,4," ",python,selection_mouse +3669,2227662,"genie.py",7874,0,"",python,selection_command +3670,2227740,"genie.py",7829,45," vid_embed, # (B, S, N, D)\n )",python,selection_mouse +3671,2227740,"genie.py",7828,46," vid_embed, # (B, S, N, D)\n )",python,selection_mouse +3672,2227741,"genie.py",7775,99," self.dynamics.mask_token[0],# (B, 1, D)\n vid_embed, # (B, S, N, D)\n )",python,selection_mouse +3673,2227771,"genie.py",7739,135," mask, # shape (B, S, N)\n self.dynamics.mask_token[0],# (B, 1, D)\n vid_embed, # (B, S, N, D)\n )",python,selection_mouse +3674,2227860,"genie.py",7700,174," curr_masked_frame = jnp.where(\n mask, # shape (B, S, N)\n self.dynamics.mask_token[0],# (B, 1, D)\n vid_embed, # (B, S, N, D)\n )",python,selection_mouse +3675,2229142,"genie.py",7708,0,"",python,selection_command +3676,2275648,"genie.py",7700,0,"",python,selection_command +3677,2277635,"genie.py",7700,0," # Mask vid_embed: set to mask_token where mask==1, else keep vid_embed\n",python,content +3678,2277906,"genie.py",7779,0," # mask: (B, S, N), vid_embed: (B, S, N, D), mask_token: (D,)\n",python,content +3679,2278154,"genie.py",7848,0," mask_token = self.dynamics.mask_token[0] # (D,)\n",python,content +3680,2278354,"genie.py",7905,0," # Expand mask to (B, S, N, 1) for broadcasting\n",python,content +3681,2278526,"genie.py",7960,0," mask_expanded = mask[..., None]\n",python,content +3682,2278956,"genie.py",8000,0," curr_masked_frame = jnp.where(mask_expanded, mask_token, vid_embed)\n",python,content +3683,2278960,"genie.py",8076,175,"",python,content +3684,2307381,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3685,2307557,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3686,2307667,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3687,2310630,"TERMINAL",0,0,"2025-07-03 16:54:07.482783: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3688,2314831,"TERMINAL",0,0,"2025-07-03 16:54:11.644584: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3689,2323681,"TERMINAL",0,0,"2025-07-03 16:54:20.581487: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3690,2331623,"TERMINAL",0,0,"2025-07-03 16:54:28.481365: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3691,2338242,"TERMINAL",0,0,"2025-07-03 16:54:35.116649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3692,2344832,"TERMINAL",0,0,"2025-07-03 16:54:41.651009: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3693,2348316,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3694,2356609,"TERMINAL",0,0,"2025-07-03 16:54:53.409803: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3695,2359936,"TERMINAL",0,0,"2025-07-03 16:54:56.835978: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3696,2367986,"TERMINAL",0,0,"2025-07-03 16:55:04.845843: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3697,2369627,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +3698,2369807,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3699,2370334,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 245, in __call__\r\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 812, in set\r\n return scatter._scatter_update(self.array, self.index, values, lax.scatter,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 92, in _scatter_update\r\n return internal_scatter(x, y, dynamic_idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 127, in _scatter_impl\r\n y = jnp.broadcast_to(y, tuple(indexer.slice_shape))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3138, in broadcast_to\r\n return util._broadcast_to(array, shape, sharding=out_sharding)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 281, in _broadcast_to\r\n raise ValueError(f""Cannot broadcast to shape with fewer dimensions: {arr_shape=} {shape=}"")\r\nValueError: Cannot broadcast to shape with fewer dimensions: arr_shape=(1, 6, 920, 128) shape=(1, 920, 128)\r\n",,terminal_output +3700,2371663,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3701,2454607,"genie.py",0,0,"",python,tab +3702,2454608,"genie.py",8106,0,"",python,selection_mouse +3703,2454713,"genie.py",8105,0,"",python,selection_command +3704,2456103,"genie.py",8076,31,"",python,content +3705,2456149,"genie.py",8084,0,"",python,selection_command +3706,2456160,"genie.py",8008,0,"",python,selection_command +3707,2456547,"genie.py",7968,0,"",python,selection_command +3708,2456861,"genie.py",8008,0,"",python,selection_command +3709,2457198,"genie.py",8075,0,"\n jax.debug.breakpoint()",python,content +3710,2457232,"genie.py",8084,0,"",python,selection_command +3711,2457709,"genie.py",8008,0,"",python,selection_command +3712,2457892,"genie.py",7968,0,"",python,selection_command +3713,2458077,"genie.py",7999,0,"\n jax.debug.breakpoint()",python,content +3714,2458106,"genie.py",8008,0,"",python,selection_command +3715,2459651,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3716,2459785,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3717,2459917,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3718,2462802,"TERMINAL",0,0,"2025-07-03 16:56:39.672643: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3719,2467202,"TERMINAL",0,0,"2025-07-03 16:56:44.069453: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3720,2476725,"TERMINAL",0,0,"2025-07-03 16:56:53.624310: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3721,2484517,"TERMINAL",0,0,"2025-07-03 16:57:01.417957: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3722,2491372,"TERMINAL",0,0,"2025-07-03 16:57:08.201775: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3723,2497923,"TERMINAL",0,0,"2025-07-03 16:57:14.726795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3724,2501341,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3725,2509425,"TERMINAL",0,0,"2025-07-03 16:57:26.325757: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3726,2512376,"TERMINAL",0,0,"2025-07-03 16:57:29.260328: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3727,2520451,"TERMINAL",0,0,"2025-07-03 16:57:37.242945: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3728,2521901,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +3729,2522074,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3730,2522529,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 246, in __call__\r\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 812, in set\r\n return scatter._scatter_update(self.array, self.index, values, lax.scatter,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 92, in _scatter_update\r\n return internal_scatter(x, y, dynamic_idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 127, in _scatter_impl\r\n y = jnp.broadcast_to(y, tuple(indexer.slice_shape))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3138, in broadcast_to\r\n return util._broadcast_to(array, shape, sharding=out_sharding)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 281, in _broadcast_to\r\n raise ValueError(f""Cannot broadcast to shape with fewer dimensions: {arr_shape=} {shape=}"")\r\nValueError: Cannot broadcast to shape with fewer dimensions: arr_shape=(1, 6, 920, 128) shape=(1, 920, 128)\r\n",,terminal_output +3731,2523934,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3732,2561431,"genie.py",0,0,"",python,tab +3733,2561432,"genie.py",8185,0,"",python,selection_mouse +3734,2562028,"genie.py",8176,0,"",python,selection_mouse +3735,2563322,"genie.py",8146,0,"",python,selection_command +3736,2564099,"genie.py",8146,0,"#",python,content +3737,2564100,"genie.py",8147,0,"",python,selection_keyboard +3738,2564160,"genie.py",8147,0," ",python,content +3739,2564161,"genie.py",8148,0,"",python,selection_keyboard +3740,2564392,"genie.py",8147,0,"",python,selection_command +3741,2573697,"genie.py",8040,0,"",python,selection_mouse +3742,2575099,"genie.py",8039,0,"",python,selection_mouse +3743,2575542,"genie.py",8039,18,"",python,content +3744,2576577,"genie.py",8039,0,"v",python,content +3745,2576578,"genie.py",8040,0,"",python,selection_keyboard +3746,2576680,"genie.py",8040,0,"i",python,content +3747,2576681,"genie.py",8041,0,"",python,selection_keyboard +3748,2576832,"genie.py",8041,0,"d",python,content +3749,2576833,"genie.py",8042,0,"",python,selection_keyboard +3750,2577013,"genie.py",8042,0,"_",python,content +3751,2577014,"genie.py",8043,0,"",python,selection_keyboard +3752,2577189,"genie.py",8043,0,"e",python,content +3753,2577190,"genie.py",8044,0,"",python,selection_keyboard +3754,2577295,"genie.py",8044,0,"m",python,content +3755,2577298,"genie.py",8045,0,"",python,selection_keyboard +3756,2577516,"genie.py",8045,0,"b",python,content +3757,2577517,"genie.py",8046,0,"",python,selection_keyboard +3758,2577631,"genie.py",8046,0,"e",python,content +3759,2577632,"genie.py",8047,0,"",python,selection_keyboard +3760,2577681,"genie.py",8047,0,"d",python,content +3761,2577681,"genie.py",8048,0,"",python,selection_keyboard +3762,2577734,"genie.py",8048,0," ",python,content +3763,2577735,"genie.py",8049,0,"",python,selection_keyboard +3764,2578855,"genie.py",8030,0,"",python,selection_mouse +3765,2579353,"genie.py",8098,0,"",python,selection_mouse +3766,2581069,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3767,2581757,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3768,2581866,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3769,2584877,"TERMINAL",0,0,"2025-07-03 16:58:41.776964: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3770,2589162,"TERMINAL",0,0,"2025-07-03 16:58:46.021273: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3771,2598131,"TERMINAL",0,0,"2025-07-03 16:58:55.016452: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3772,2605958,"TERMINAL",0,0,"2025-07-03 16:59:02.843232: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3773,2612612,"TERMINAL",0,0,"2025-07-03 16:59:09.434327: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3774,2619063,"TERMINAL",0,0,"2025-07-03 16:59:15.906501: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3775,2622343,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3776,2630477,"TERMINAL",0,0,"2025-07-03 16:59:27.288902: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3777,2633501,"TERMINAL",0,0,"2025-07-03 16:59:30.401095: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3778,2641682,"TERMINAL",0,0,"2025-07-03 16:59:38.555904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3779,2643432,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +3780,2643489,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +3781,2643666,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3782,2645225,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\nTypeError: scan body function carry input and carry output must have the same pytree structure, but they differ:\r\n\r\nThe input carry component c[1] is a tuple of length 4 but the corresponding component of the carry output is a tuple of length 5, so the lengths do not match.\r\n\r\nRevise the function so that the carry output has the same pytree structure as the carry input.\r\n",,terminal_output +3783,2646714,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3784,2667891,"genie.py",0,0,"",python,tab +3785,2667893,"genie.py",7942,0,"",python,selection_mouse +3786,2668460,"genie.py",8030,0,"",python,selection_mouse +3787,2668929,"genie.py",7999,0,"",python,selection_mouse +3788,2669744,"genie.py",8030,0,"",python,selection_mouse +3789,2670951,"genie.py",7990,0,"",python,selection_command +3790,2672547,"genie.py",7999,0,"",python,selection_command +3791,2672835,"genie.py",7999,0,"\n ",python,content +3792,2673222,"genie.py",8008,0,"p",python,content +3793,2673222,"genie.py",8009,0,"",python,selection_keyboard +3794,2673345,"genie.py",8009,0,"r",python,content +3795,2673346,"genie.py",8010,0,"",python,selection_keyboard +3796,2673468,"genie.py",8010,0,"i",python,content +3797,2673468,"genie.py",8011,0,"",python,selection_keyboard +3798,2673641,"genie.py",8011,0,"n",python,content +3799,2673642,"genie.py",8012,0,"",python,selection_keyboard +3800,2673660,"genie.py",8012,0,"t",python,content +3801,2673660,"genie.py",8013,0,"",python,selection_keyboard +3802,2675399,"genie.py",8008,5,"print",python,content +3803,2677386,"genie.py",8013,0,"()",python,content +3804,2677386,"genie.py",8014,0,"",python,selection_keyboard +3805,2677591,"genie.py",8014,0,"m",python,content +3806,2677591,"genie.py",8015,0,"",python,selection_keyboard +3807,2677680,"genie.py",8015,0,"a",python,content +3808,2677681,"genie.py",8016,0,"",python,selection_keyboard +3809,2677796,"genie.py",8016,0,"s",python,content +3810,2677796,"genie.py",8017,0,"",python,selection_keyboard +3811,2677811,"genie.py",8017,0,"k",python,content +3812,2677811,"genie.py",8018,0,"",python,selection_keyboard +3813,2678759,"genie.py",8019,0,"",python,selection_command +3814,2679801,"genie.py",8018,0,"",python,selection_command +3815,2680121,"genie.py",8018,0,"_",python,content +3816,2680121,"genie.py",8019,0,"",python,selection_keyboard +3817,2680849,"genie.py",8014,5,"mask_expanded",python,content +3818,2681222,"genie.py",8027,0,".",python,content +3819,2681223,"genie.py",8028,0,"",python,selection_keyboard +3820,2681415,"genie.py",8028,0,"s",python,content +3821,2681416,"genie.py",8029,0,"",python,selection_keyboard +3822,2681580,"genie.py",8029,0,"h",python,content +3823,2681581,"genie.py",8030,0,"",python,selection_keyboard +3824,2681659,"genie.py",8030,0,"a",python,content +3825,2681659,"genie.py",8031,0,"",python,selection_keyboard +3826,2681762,"genie.py",8031,0,"p",python,content +3827,2681762,"genie.py",8032,0,"",python,selection_keyboard +3828,2681877,"genie.py",8032,0,"e",python,content +3829,2681877,"genie.py",8033,0,"",python,selection_keyboard +3830,2682198,"genie.py",8032,0,"",python,selection_command +3831,2682337,"genie.py",8064,0,"",python,selection_command +3832,2682569,"genie.py",8032,0,"",python,selection_command +3833,2683136,"genie.py",8064,0,"",python,selection_command +3834,2683289,"genie.py",8098,0,"",python,selection_command +3835,2683670,"genie.py",8133,0,"\n print(mask_expanded.shape)",python,content +3836,2683719,"genie.py",8142,0,"",python,selection_command +3837,2684134,"genie.py",8143,0,"",python,selection_command +3838,2684395,"genie.py",8144,0,"",python,selection_command +3839,2684552,"genie.py",8145,0,"",python,selection_command +3840,2684713,"genie.py",8146,0,"",python,selection_command +3841,2684840,"genie.py",8147,0,"",python,selection_command +3842,2685046,"genie.py",8148,0,"",python,selection_command +3843,2685452,"genie.py",8148,13,"",python,content +3844,2687552,"genie.py",8148,0,"m",python,content +3845,2687553,"genie.py",8149,0,"",python,selection_keyboard +3846,2687624,"genie.py",8149,0,"a",python,content +3847,2687625,"genie.py",8150,0,"",python,selection_keyboard +3848,2687695,"genie.py",8150,0,"s",python,content +3849,2687696,"genie.py",8151,0,"",python,selection_keyboard +3850,2687789,"genie.py",8151,0,"k",python,content +3851,2687790,"genie.py",8152,0,"",python,selection_keyboard +3852,2688778,"genie.py",8152,0,"_",python,content +3853,2688779,"genie.py",8153,0,"",python,selection_keyboard +3854,2688983,"genie.py",8153,0,"t",python,content +3855,2688984,"genie.py",8154,0,"",python,selection_keyboard +3856,2689120,"genie.py",8154,0,"o",python,content +3857,2689121,"genie.py",8155,0,"",python,selection_keyboard +3858,2689924,"genie.py",8148,7,"mask_token",python,content +3859,2690498,"genie.py",8157,0,"",python,selection_command +3860,2691596,"genie.py",8134,32,"",python,content +3861,2691670,"genie.py",8142,0,"",python,selection_command +3862,2691695,"genie.py",8074,0,"",python,selection_command +3863,2691839,"genie.py",8043,0,"",python,selection_command +3864,2692220,"genie.py",8008,0,"",python,selection_command +3865,2692676,"genie.py",8034,0,"\n print(mask_token.shape)",python,content +3866,2692725,"genie.py",8043,0,"",python,selection_command +3867,2693533,"genie.py",8075,0,"",python,selection_command +3868,2693675,"genie.py",8106,0,"",python,selection_command +3869,2694586,"genie.py",8165,0,"\n ",python,content +3870,2695245,"genie.py",8174,0,"p",python,content +3871,2695246,"genie.py",8175,0,"",python,selection_keyboard +3872,2695347,"genie.py",8175,0,"r",python,content +3873,2695348,"genie.py",8176,0,"",python,selection_keyboard +3874,2695501,"genie.py",8176,0,"i",python,content +3875,2695502,"genie.py",8177,0,"",python,selection_keyboard +3876,2696019,"genie.py",8174,3,"print",python,content +3877,2697277,"genie.py",8179,0,"()",python,content +3878,2697278,"genie.py",8180,0,"",python,selection_keyboard +3879,2697615,"genie.py",8180,0,"v",python,content +3880,2697616,"genie.py",8181,0,"",python,selection_keyboard +3881,2697693,"genie.py",8181,0,"i",python,content +3882,2697694,"genie.py",8182,0,"",python,selection_keyboard +3883,2697844,"genie.py",8182,0,"d",python,content +3884,2697845,"genie.py",8183,0,"",python,selection_keyboard +3885,2698125,"genie.py",8183,0,"_",python,content +3886,2698125,"genie.py",8184,0,"",python,selection_keyboard +3887,2699255,"genie.py",8180,4,"vid_embed",python,content +3888,2699467,"genie.py",8189,0,".",python,content +3889,2699468,"genie.py",8190,0,"",python,selection_keyboard +3890,2699662,"genie.py",8190,0,"s",python,content +3891,2699663,"genie.py",8191,0,"",python,selection_keyboard +3892,2699824,"genie.py",8191,0,"g",python,content +3893,2699825,"genie.py",8192,0,"",python,selection_keyboard +3894,2699939,"genie.py",8192,0,"a",python,content +3895,2699940,"genie.py",8193,0,"",python,selection_keyboard +3896,2700071,"genie.py",8193,0,"p",python,content +3897,2700071,"genie.py",8194,0,"",python,selection_keyboard +3898,2700167,"genie.py",8194,0,"e",python,content +3899,2700168,"genie.py",8195,0,"",python,selection_keyboard +3900,2701315,"genie.py",8190,0,"",python,selection_mouse +3901,2702343,"genie.py",8192,0,"",python,selection_mouse +3902,2702653,"genie.py",8191,1,"",python,content +3903,2703004,"genie.py",8191,0,"h",python,content +3904,2703004,"genie.py",8192,0,"",python,selection_keyboard +3905,2703542,"genie.py",8191,0,"",python,selection_command +3906,2704459,"genie.py",8166,30," print(vid_embed.shape)",python,selection_command +3907,2704646,"genie.py",8098,98," vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3908,2704802,"genie.py",8067,129," jax.debug.breakpoint()\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3909,2704951,"genie.py",8035,161," print(mask_token.shape)\n jax.debug.breakpoint()\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3910,2705068,"genie.py",8000,196," print(mask_expanded.shape)\n print(mask_token.shape)\n jax.debug.breakpoint()\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3911,2705204,"genie.py",7960,236," mask_expanded = mask[..., None]\n print(mask_expanded.shape)\n print(mask_token.shape)\n jax.debug.breakpoint()\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3912,2705457,"genie.py",8000,196," print(mask_expanded.shape)\n print(mask_token.shape)\n jax.debug.breakpoint()\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n print(vid_embed.shape)",python,selection_command +3913,2706830,"genie.py",8008,0,"",python,selection_command +3914,2712246,"genie.py",8000,0,"",python,selection_command +3915,2713494,"genie.py",8000,0," print(f""mask_expanded.shape: {mask_expanded.shape}"")\n",python,content +3916,2713600,"genie.py",8061,0," print(f""mask_token.shape: {mask_token.shape}"")\n",python,content +3917,2713603,"genie.py",8116,67,"",python,content +3918,2713842,"genie.py",8215,0," print(f""vid_embed.shape: {vid_embed.shape}"")\n",python,content +3919,2713843,"genie.py",8268,31,"",python,content +3920,2716519,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +3921,2716753,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3922,2716863,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +3923,2718686,"genie.py",0,0,"",python,tab +3924,2718686,"genie.py",7992,0,"",python,selection_mouse +3925,2719331,"genie.py",8494,0,"",python,selection_mouse +3926,2720132,"TERMINAL",0,0,"2025-07-03 17:00:57.003221: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3927,2720789,"genie.py",8086,0,"",python,selection_mouse +3928,2721543,"genie.py",8244,0,"",python,selection_mouse +3929,2722029,"genie.py",8167,0,"",python,selection_mouse +3930,2722514,"genie.py",8164,0,"",python,selection_mouse +3931,2723363,"genie.py",8254,0,"",python,selection_mouse +3932,2724223,"genie.py",8354,0,"",python,selection_mouse +3933,2724641,"TERMINAL",0,0,"2025-07-03 17:01:01.447966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3934,2724710,"genie.py",8267,0,"",python,selection_mouse +3935,2724727,"genie.py",8266,0,"",python,selection_command +3936,2730612,"genie.py",8309,0,"",python,selection_mouse +3937,2732313,"genie.py",8308,0,"",python,selection_command +3938,2733447,"TERMINAL",0,0,"2025-07-03 17:01:10.280852: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3939,2733564,"genie.py",8307,0,"",python,selection_command +3940,2734083,"genie.py",8307,1,"",python,content +3941,2734242,"genie.py",8307,1,"",python,content +3942,2735579,"genie.py",8159,0,"",python,selection_mouse +3943,2736267,"genie.py",8155,0,"",python,selection_command +3944,2737002,"genie.py",8155,0,"#",python,content +3945,2737002,"genie.py",8156,0,"",python,selection_keyboard +3946,2737203,"genie.py",8156,0," ",python,content +3947,2737203,"genie.py",8157,0,"",python,selection_keyboard +3948,2737616,"genie.py",8156,0,"",python,selection_command +3949,2738091,"genie.py",8173,0,"",python,selection_mouse +3950,2738546,"genie.py",8300,0,"",python,selection_mouse +3951,2738557,"genie.py",8299,0,"",python,selection_command +3952,2739135,"genie.py",8300,0,"\n ",python,content +3953,2739427,"genie.py",8309,0,"\n ",python,content +3954,2739428,"genie.py",8301,8,"",python,content +3955,2739588,"genie.py",8310,0,"\n ",python,content +3956,2739589,"genie.py",8302,8,"",python,content +3957,2741348,"TERMINAL",0,0,"2025-07-03 17:01:18.200707: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3958,2743952,"genie.py",10767,0,"",python,selection_mouse +3959,2744109,"genie.py",10726,41,":, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3960,2744109,"genie.py",10724,43,"d[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3961,2744177,"genie.py",10726,41,":, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_command +3962,2744178,"genie.py",10721,46,"mbed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3963,2744178,"genie.py",10665,102,"lf.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3964,2744179,"genie.py",10663,104,"self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3965,2744179,"genie.py",10661,106," self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3966,2744235,"genie.py",10608,159," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3967,2744236,"genie.py",10607,160," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3968,2744237,"genie.py",10606,161," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3969,2744251,"genie.py",10605,162," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3970,2744268,"genie.py",10604,163," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3971,2744284,"genie.py",10603,164," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3972,2744300,"genie.py",10602,165," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3973,2744316,"genie.py",10601,166," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3974,2744376,"genie.py",10600,167," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3975,2744616,"genie.py",10561,206," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +3976,2747899,"TERMINAL",0,0,"2025-07-03 17:01:24.799995: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3977,2749024,"genie.py",8302,0,"",python,selection_mouse +3978,2750096,"genie.py",8303,8,"",python,content +3979,2750096,"genie.py",8302,0," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)",python,content +3980,2754345,"TERMINAL",0,0,"2025-07-03 17:01:31.214507: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3981,2757863,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3982,2765896,"TERMINAL",0,0,"2025-07-03 17:01:42.797057: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3983,2768758,"TERMINAL",0,0,"2025-07-03 17:01:45.615668: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3984,2777274,"TERMINAL",0,0,"2025-07-03 17:01:54.120899: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3985,2778914,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +3986,2778974,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +3987,2779152,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +3988,2779334,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +3989,2779534,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +3990,2780558,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\nvid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +3991,2780730,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\nTypeError: scan body function carry input and carry output must have the same pytree structure, but they differ:\r\n\r\nThe input carry component c[1] is a tuple of length 4 but the corresponding component of the carry output is a tuple of length 5, so the lengths do not match.\r\n\r\nRevise the function so that the carry output has the same pytree structure as the carry input.\r\n",,terminal_output +3992,2782190,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +3993,2839033,"genie.py",0,0,"",python,tab +3994,2839988,"genie.py",8509,0,"",python,selection_mouse +3995,2841488,"genie.py",8167,0,"",python,selection_mouse +3996,2842270,"genie.py",8158,0,"",python,selection_mouse +3997,2843590,"genie.py",8132,0,"",python,selection_mouse +3998,2843725,"genie.py",8128,5,"debug",python,selection_mouse +3999,2844225,"genie.py",8162,0,"",python,selection_mouse +4000,2844359,"genie.py",8157,9,"vid_embed",python,selection_mouse +4001,2845086,"genie.py",8157,9,"t",python,content +4002,2845087,"genie.py",8158,0,"",python,selection_keyboard +4003,2845125,"genie.py",8158,0,"m",python,content +4004,2845126,"genie.py",8159,0,"",python,selection_keyboard +4005,2845739,"genie.py",8159,0,"p",python,content +4006,2845740,"genie.py",8160,0,"",python,selection_keyboard +4007,2846373,"genie.py",8159,0,"",python,selection_command +4008,2846731,"genie.py",8158,0,"",python,selection_command +4009,2846864,"genie.py",8157,0,"",python,selection_command +4010,2847021,"genie.py",8156,0,"",python,selection_command +4011,2847160,"genie.py",8155,0,"",python,selection_command +4012,2848193,"genie.py",8155,1,"",python,content +4013,2849743,"genie.py",8155,0,"#",python,content +4014,2849744,"genie.py",8156,0,"",python,selection_keyboard +4015,2849806,"genie.py",8156,0," ",python,content +4016,2849807,"genie.py",8157,0,"",python,selection_keyboard +4017,2850328,"genie.py",8156,0,"",python,selection_command +4018,2850478,"genie.py",8221,0,"",python,selection_command +4019,2850665,"genie.py",8274,0,"",python,selection_command +4020,2850848,"genie.py",8296,0,"",python,selection_command +4021,2851005,"genie.py",8306,0,"",python,selection_command +4022,2851123,"genie.py",8345,0,"",python,selection_command +4023,2851263,"genie.py",8396,0,"",python,selection_command +4024,2851448,"genie.py",8449,0,"",python,selection_command +4025,2852112,"genie.py",8450,0,"",python,selection_command +4026,2852605,"genie.py",8451,0,"",python,selection_command +4027,2852666,"genie.py",8452,0,"",python,selection_command +4028,2852676,"genie.py",8453,0,"",python,selection_command +4029,2852700,"genie.py",8454,0,"",python,selection_command +4030,2852724,"genie.py",8455,0,"",python,selection_command +4031,2852756,"genie.py",8456,0,"",python,selection_command +4032,2852782,"genie.py",8457,0,"",python,selection_command +4033,2852825,"genie.py",8458,0,"",python,selection_command +4034,2853015,"genie.py",8459,0,"",python,selection_command +4035,2853350,"genie.py",8406,0,"",python,selection_command +4036,2853513,"genie.py",8355,0,"",python,selection_command +4037,2853653,"genie.py",8316,0,"",python,selection_command +4038,2853806,"genie.py",8296,0,"",python,selection_command +4039,2853954,"genie.py",8284,0,"",python,selection_command +4040,2854089,"genie.py",8231,0,"",python,selection_command +4041,2854243,"genie.py",8166,0,"",python,selection_command +4042,2854473,"genie.py",8231,0,"",python,selection_command +4043,2854617,"genie.py",8284,0,"",python,selection_command +4044,2854775,"genie.py",8296,0,"",python,selection_command +4045,2855116,"genie.py",8316,0,"",python,selection_command +4046,2858029,"genie.py",8355,0,"",python,selection_command +4047,2858210,"genie.py",8406,0,"",python,selection_command +4048,2858379,"genie.py",8459,0,"",python,selection_command +4049,2858581,"genie.py",8501,0,"",python,selection_command +4050,2858761,"genie.py",8459,0,"",python,selection_command +4051,2859047,"genie.py",8406,0,"",python,selection_command +4052,2859246,"genie.py",8407,0,"",python,selection_command +4053,2859423,"genie.py",8460,0,"",python,selection_command +4054,2859782,"genie.py",8461,0,"",python,selection_command +4055,2859939,"genie.py",8462,0,"",python,selection_command +4056,2861151,"genie.py",8462,3,"",python,content +4057,2861516,"genie.py",8462,1,"",python,content +4058,2861982,"genie.py",8462,1,"",python,content +4059,2862190,"genie.py",8462,1,"",python,content +4060,2862425,"genie.py",8461,0,"",python,selection_command +4061,2862776,"genie.py",8461,1,"",python,content +4062,2864269,"genie.py",8496,0,"",python,selection_mouse +4063,2864283,"genie.py",8495,0,"",python,selection_command +4064,2865266,"genie.py",8281,0,"",python,selection_mouse +4065,2865832,"genie.py",8296,0,"",python,selection_command +4066,2865986,"genie.py",8313,0,"",python,selection_command +4067,2866156,"genie.py",8352,0,"",python,selection_command +4068,2866270,"genie.py",8403,0,"",python,selection_command +4069,2866407,"genie.py",8456,0,"",python,selection_command +4070,2866578,"genie.py",8491,0,"",python,selection_command +4071,2868626,"genie.py",8496,0,"\n jax.debug.breakpoint()",python,content +4072,2868661,"genie.py",8505,0,"",python,selection_command +4073,2870695,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4074,2870960,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4075,2871076,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4076,2873942,"TERMINAL",0,0,"2025-07-03 17:03:30.828109: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4077,2878399,"TERMINAL",0,0,"2025-07-03 17:03:35.251060: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4078,2887663,"TERMINAL",0,0,"2025-07-03 17:03:44.478090: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4079,2895299,"TERMINAL",0,0,"2025-07-03 17:03:52.201042: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4080,2896320,"genie.py",0,0,"",python,tab +4081,2896323,"genie.py",10119,0,"",python,selection_mouse +4082,2896427,"genie.py",10113,14,"new_token_idxs",python,selection_mouse +4083,2897609,"genie.py",10113,14,"",python,content +4084,2898043,"genie.py",10113,1,"",python,content +4085,2898496,"genie.py",10113,1,"",python,content +4086,2901640,"TERMINAL",0,0,"^C",,terminal_output +4087,2901821,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 86, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 82, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 51, in __call__\r\n logits = self.dynamics(vid_embed)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 41, in __call__\r\n z = nn.MultiHeadAttention(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py"", line 549, in __call__\r\n dense(name='query')(inputs_q),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"", line 164, in __call__\r\n kernel = self.param(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"", line 151, in kernel_init_wrap\r\n kernel = self.kernel_init(rng, flat_shape, dtype)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/initializers.py"", line 335, in init\r\n return random.truncated_normal(key, -2, 2, shape, dtype) * stddev\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1081, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 180, in __call__\r\n return call(*args)\r\njax._src.source_info_util.JaxStackTraceBeforeTransformation: KeyboardInterrupt\r\n\r\nThe preceding stack trace is the source of the JAX operation that, once transformed by JAX, triggered the following exception.\r\n\r\n--------------------\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 86, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2452, in init\r\n _, v_out = self.init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2304, in init_with_output\r\n return init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1115, in wrapper\r\n return apply(fn, mutable=mutable, flags=init_flags)(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3093, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 82, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 51, in __call__\r\n logits = self.dynamics(vid_embed)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 433, in wrapped_fn\r\n return trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 319, in wrapper\r\n y, out_variable_groups_xs_t = fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1474, in inner\r\n return rematted(variable_groups, rng_groups, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 333, in fun_remat\r\n out_flat = remat_p.bind(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 514, in remat_impl\r\n return core.eval_jaxpr(jaxpr, (), *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 624, in eval_jaxpr\r\n ans = eqn.primitive.bind(*subfuns, *map(read, eqn.invars), **bind_params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n^C File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 335, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +4088,2901977,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14e01d37f250>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +4089,2902094,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14e01d37f250>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +4090,2902717,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4091,2907176,"genie.py",0,0,"",python,tab +4092,2907176,"genie.py",8155,0,"",python,selection_mouse +4093,2907960,"genie.py",8155,1,"",python,content +4094,2908124,"genie.py",8155,1,"",python,content +4095,2908801,"genie.py",8155,1,"",python,content +4096,2910303,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4097,2910562,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4098,2910677,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4099,2913572,"TERMINAL",0,0,"2025-07-03 17:04:10.402250: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4100,2917753,"TERMINAL",0,0,"2025-07-03 17:04:14.651809: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4101,2926924,"TERMINAL",0,0,"2025-07-03 17:04:23.811348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4102,2934767,"TERMINAL",0,0,"2025-07-03 17:04:31.598237: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4103,2941321,"TERMINAL",0,0,"2025-07-03 17:04:38.142416: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4104,2947669,"TERMINAL",0,0,"2025-07-03 17:04:44.532600: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4105,2951050,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4106,2959036,"TERMINAL",0,0,"2025-07-03 17:04:55.848269: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4107,2961903,"TERMINAL",0,0,"2025-07-03 17:04:58.754378: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4108,2970712,"TERMINAL",0,0,"2025-07-03 17:05:07.585302: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4109,2972463,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +4110,2972593,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4111,2972778,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +4112,2973003,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4113,2973122,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 257, in __call__\r\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 812, in set\r\n return scatter._scatter_update(self.array, self.index, values, lax.scatter,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 92, in _scatter_update\r\n return internal_scatter(x, y, dynamic_idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 127, in _scatter_impl\r\n y = jnp.broadcast_to(y, tuple(indexer.slice_shape))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3138, in broadcast_to\r\n return util._broadcast_to(array, shape, sharding=out_sharding)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 281, in _broadcast_to\r\n raise ValueError(f""Cannot broadcast to shape with fewer dimensions: {arr_shape=} {shape=}"")\r\nValueError: Cannot broadcast to shape with fewer dimensions: arr_shape=(1, 6, 920, 128) shape=(1, 920, 128)\r\n",,terminal_output +4114,2974504,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4115,3027138,"genie.py",0,0,"",python,tab +4116,3027139,"genie.py",8082,0,"",python,selection_mouse +4117,3027740,"genie.py",8286,0,"",python,selection_mouse +4118,3028848,"genie.py",8238,0,"",python,selection_mouse +4119,3029535,"genie.py",8144,0,"",python,selection_mouse +4120,3030094,"genie.py",8174,0,"",python,selection_mouse +4121,3030763,"genie.py",8185,0,"",python,selection_mouse +4122,3032090,"genie.py",8099,0,"",python,selection_mouse +4123,3032973,"genie.py",7923,0,"",python,selection_mouse +4124,3033943,"genie.py",8178,0,"",python,selection_mouse +4125,3034481,"genie.py",8250,0,"",python,selection_mouse +4126,3034612,"genie.py",8243,9,"vid_embed",python,selection_mouse +4127,3035129,"genie.py",8193,0,"",python,selection_mouse +4128,3035268,"genie.py",8186,10,"mask_token",python,selection_mouse +4129,3036181,"genie.py",8048,0,"",python,selection_mouse +4130,3036691,"genie.py",8115,0,"",python,selection_mouse +4131,3036704,"genie.py",8114,0,"",python,selection_command +4132,3037263,"genie.py",8060,0,"",python,selection_mouse +4133,3037275,"genie.py",8059,0,"",python,selection_command +4134,3116052,"genie.py",0,0,"",python,tab +4135,3116053,"genie.py",8146,0,"",python,selection_mouse +4136,3116151,"genie.py",8145,0,"",python,selection_command +4137,3116737,"genie.py",8208,0,"",python,selection_mouse +4138,3116750,"genie.py",8207,0,"",python,selection_command +4139,3116868,"genie.py",8208,0,"",python,selection_mouse +4140,3116871,"genie.py",8207,0,"",python,selection_command +4141,3117034,"genie.py",8147,62," tmp = jnp.where(mask_expanded, mask_token, vid_embed)\n",python,selection_mouse +4142,3117052,"genie.py",8148,61," tmp = jnp.where(mask_expanded, mask_token, vid_embed)\n",python,selection_command +4143,3146256,"genie.py",8115,0,"",python,selection_mouse +4144,3146297,"genie.py",8114,0,"",python,selection_command +4145,3147133,"genie.py",8146,0,"",python,selection_mouse +4146,3147134,"genie.py",8145,0,"",python,selection_command +4147,3148137,"genie.py",8146,0,"\n ",python,content +4148,3148477,"genie.py",8155,0,"mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)",python,content +4149,3150608,"genie.py",8289,0,"",python,selection_mouse +4150,3151725,"genie.py",8291,0,"",python,selection_mouse +4151,3152501,"genie.py",8291,0,"_",python,content +4152,3152502,"genie.py",8292,0,"",python,selection_keyboard +4153,3152718,"genie.py",8292,0,"e",python,content +4154,3152719,"genie.py",8293,0,"",python,selection_keyboard +4155,3153248,"genie.py",8281,12,"mask_token_expanded",python,content +4156,3155197,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4157,3155642,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4158,3155749,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4159,3158718,"TERMINAL",0,0,"2025-07-03 17:08:15.556896: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4160,3162926,"TERMINAL",0,0,"2025-07-03 17:08:19.782792: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4161,3172543,"TERMINAL",0,0,"2025-07-03 17:08:29.436636: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4162,3180530,"TERMINAL",0,0,"2025-07-03 17:08:37.404041: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4163,3187307,"TERMINAL",0,0,"2025-07-03 17:08:44.209321: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4164,3193944,"TERMINAL",0,0,"2025-07-03 17:08:50.802279: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4165,3197324,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4166,3205478,"TERMINAL",0,0,"2025-07-03 17:09:02.363125: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4167,3208415,"TERMINAL",0,0,"2025-07-03 17:09:05.315556: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4168,3216677,"TERMINAL",0,0,"2025-07-03 17:09:13.482371: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4169,3218196,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +4170,3218270,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +4171,3218445,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4172,3218635,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +4173,3218830,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4174,3218986,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 258, in __call__\r\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 812, in set\r\n return scatter._scatter_update(self.array, self.index, values, lax.scatter,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 92, in _scatter_update\r\n return internal_scatter(x, y, dynamic_idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ops/scatter.py"", line 127, in _scatter_impl\r\n y = jnp.broadcast_to(y, tuple(indexer.slice_shape))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3138, in broadcast_to\r\n return util._broadcast_to(array, shape, sharding=out_sharding)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 281, in _broadcast_to\r\n raise ValueError(f""Cannot broadcast to shape with fewer dimensions: {arr_shape=} {shape=}"")\r\nValueError: Cannot broadcast to shape with fewer dimensions: arr_shape=(1, 6, 920, 128) shape=(1, 920, 128)\r\n",,terminal_output +4175,3220361,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4176,3226670,"genie.py",0,0,"",python,tab +4177,3226671,"genie.py",8296,0,"",python,selection_mouse +4178,3226752,"genie.py",8281,19,"mask_token_expanded",python,selection_mouse +4179,3241722,"genie.py",0,0,"",python,tab +4180,3241724,"genie.py",8680,0,"",python,selection_mouse +4181,3241824,"genie.py",8674,17,"curr_masked_frame",python,selection_mouse +4182,3249538,"genie.py",8679,0,"",python,selection_mouse +4183,3249686,"genie.py",8674,17,"curr_masked_frame",python,selection_mouse +4184,3250497,"genie.py",8679,0,"",python,selection_mouse +4185,3252082,"genie.py",8411,0,"",python,selection_mouse +4186,3252685,"genie.py",8597,0,"",python,selection_mouse +4187,3252868,"genie.py",8562,35,", # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +4188,3252868,"genie.py",8553,44,"vid_embed, # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +4189,3252869,"genie.py",8493,104," self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +4190,3252869,"genie.py",8437,160," jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +4191,3253271,"genie.py",8398,199," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)",python,selection_mouse +4192,3255836,"genie.py",8692,0,"",python,selection_mouse +4193,3256022,"genie.py",8690,2,"e)",python,selection_mouse +4194,3256023,"genie.py",8629,63,"\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4195,3256023,"genie.py",8628,64,"\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4196,3256141,"genie.py",8570,122,"N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4197,3256141,"genie.py",8511,181,"cs.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4198,3256141,"genie.py",8507,185,"namics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4199,3256142,"genie.py",8454,238,"xpand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4200,3256142,"genie.py",8451,241,"p.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4201,3256197,"genie.py",8449,243,"jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4202,3256258,"genie.py",8407,285,"urr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4203,3256287,"genie.py",8406,286,"curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4204,3256387,"genie.py",8405,287," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4205,3256426,"genie.py",8404,288," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4206,3256450,"genie.py",8403,289," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4207,3256474,"genie.py",8402,290," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4208,3256497,"genie.py",8401,291," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4209,3256520,"genie.py",8400,292," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4210,3256586,"genie.py",8399,293," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4211,3256643,"genie.py",8398,294," curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed, # (B, N, D)\n ) # (B, N, D)\n jax.debug.breakpoint()\n\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)",python,selection_mouse +4212,3257033,"genie.py",8398,294,"",python,content +4213,3257483,"genie.py",8397,1,"",python,content +4214,3257630,"genie.py",8396,1,"",python,content +4215,3259269,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4216,3259493,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4217,3259627,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4218,3262554,"TERMINAL",0,0,"2025-07-03 17:09:59.409227: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4219,3266612,"TERMINAL",0,0,"2025-07-03 17:10:03.512049: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4220,3275442,"TERMINAL",0,0,"2025-07-03 17:10:12.338077: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4221,3283848,"TERMINAL",0,0,"2025-07-03 17:10:20.543339: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4222,3290309,"TERMINAL",0,0,"2025-07-03 17:10:27.207455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4223,3296859,"TERMINAL",0,0,"2025-07-03 17:10:33.740166: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4224,3300253,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4225,3308419,"TERMINAL",0,0,"2025-07-03 17:10:45.319609: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4226,3311398,"TERMINAL",0,0,"2025-07-03 17:10:48.297452: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4227,3319694,"TERMINAL",0,0,"2025-07-03 17:10:56.546233: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4228,3321341,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +4229,3321527,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4230,3321686,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +4231,3321884,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4232,3322984,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\nvid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4233,3323380,"TERMINAL",0,0,"2025-07-03 17:11:00.279483: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4234,3329114,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +4235,3395107,"TERMINAL",0,0,"l",,terminal_output +4236,3395169,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(249)\r\n print(f""mask_expanded.shape: {mask_expanded.shape}"")\r\n print(f""mask_token.shape: {mask_token.shape}"")\r\n jax.debug.breakpoint()\r\n mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)\r\n tmp = jnp.where(mask_expanded, mask_token_expanded, vid_embed)\r\n print(f""vid_embed.shape: {vid_embed.shape}"")\r\n-> jax.debug.breakpoint()\r\n \r\n # --- Predict transition ---\r\n act_embed = self.dynamics.action_up(action_tokens)\r\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n(jdb) ",,terminal_output +4237,3401481,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4238,3403513,"TERMINAL",0,0,"\r\nEntering jdb:\r\n(jdb) ",,terminal_output +4239,3404351,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4240,3404463,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4241,3404577,"TERMINAL",0,0,"[?25ls[?25h[?25lk[?25h",,terminal_output +4242,3404923,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +4243,3405145,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4244,3405208,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4245,3405569,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +4246,3405632,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4247,3405811,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4248,3406270,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +4249,3406661,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4250,3406914,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +4251,3407118,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4252,3407286,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4253,3408073,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4254,3408483,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4255,3408643,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4256,3408707,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4257,3408856,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4258,3409083,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4259,3409190,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4260,3409251,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4261,3409440,"TERMINAL",0,0,"[?25lp[?25h[?25le[?25h",,terminal_output +4262,3409791,"TERMINAL",0,0,"\r\n*** NameError: name 'mask_token_expanded' is not defined\r\n(jdb) ",,terminal_output +4263,3414499,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4264,3414643,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(245)\r\n # mask: (B, S, N), vid_embed: (B, S, N, D), mask_token: (D,)\r\n mask_token = self.dynamics.mask_token[0] # (D,)\r\n # Expand mask to (B, S, N, 1) for broadcasting\r\n mask_expanded = mask[..., None]\r\n print(f""mask_expanded.shape: {mask_expanded.shape}"")\r\n print(f""mask_token.shape: {mask_token.shape}"")\r\n-> jax.debug.breakpoint()\r\n mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)\r\n tmp = jnp.where(mask_expanded, mask_token_expanded, vid_embed)\r\n print(f""vid_embed.shape: {vid_embed.shape}"")\r\n jax.debug.breakpoint()\r\n \r\n(jdb) ",,terminal_output +4265,3417804,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4266,3418001,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 156, in sample_mihir\r\n new_frame_pixels = self.tokenizer.decode(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 67, in decode\r\n z = self.vq.codebook[indices]\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 382, in __getitem__\r\n",,terminal_output +4267,3418070,"TERMINAL",0,0," return indexing.rewriting_take(self, idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 643, in rewriting_take\r\n treedef, static_idx, dynamic_idx = split_index_for_jit(idx, arr.shape)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 736, in split_index_for_jit\r\n idx = _expand_bool_indices(idx, shape)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 1065, in _expand_bool_indices\r\n raise IndexError(f""too many boolean indices at index {dim_number}: got mask of shape ""\r\nIndexError: too many boolean indices at index 0: got mask of shape (1, 1, 6, 920), but only 2 dimensions remain.\r\n",,terminal_output +4268,3419537,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4269,3434422,"genie.py",0,0,"",python,tab +4270,3434423,"genie.py",8138,0,"",python,selection_mouse +4271,3434856,"genie.py",8137,0,"",python,selection_command +4272,3435512,"genie.py",8116,31,"",python,content +4273,3435577,"genie.py",8124,0,"",python,selection_command +4274,3435863,"genie.py",8210,0,"\n jax.debug.breakpoint()",python,content +4275,3435896,"genie.py",8219,0,"",python,selection_command +4276,3438656,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4277,3439041,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4278,3439147,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4279,3440468,"genie.py",0,0,"",python,tab +4280,3440469,"genie.py",8115,0,"",python,selection_mouse +4281,3440530,"genie.py",8114,0,"",python,selection_command +4282,3441090,"genie.py",8192,0,"",python,selection_mouse +4283,3441881,"genie.py",8156,0,"",python,selection_mouse +4284,3442039,"genie.py",8151,8,"dynamics",python,selection_mouse +4285,3442109,"TERMINAL",0,0,"2025-07-03 17:12:59.009155: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4286,3442155,"genie.py",8116,95," mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)\n",python,selection_mouse +4287,3443454,"genie.py",7902,0,"",python,selection_mouse +4288,3444211,"genie.py",7901,0,"",python,selection_mouse +4289,3445913,"genie.py",7901,0,"B",python,content +4290,3445914,"genie.py",7902,0,"",python,selection_keyboard +4291,3446190,"genie.py",7902,0,"m",python,content +4292,3446191,"genie.py",7903,0,"",python,selection_keyboard +4293,3446351,"TERMINAL",0,0,"2025-07-03 17:13:03.253842: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4294,3446736,"genie.py",7902,1,"",python,content +4295,3446962,"genie.py",7902,0,",",python,content +4296,3446963,"genie.py",7903,0,"",python,selection_keyboard +4297,3447180,"genie.py",7903,0," ",python,content +4298,3447181,"genie.py",7904,0,"",python,selection_keyboard +4299,3447827,"genie.py",7904,0,"1",python,content +4300,3447828,"genie.py",7905,0,"",python,selection_keyboard +4301,3448016,"genie.py",7905,0,",",python,content +4302,3448017,"genie.py",7906,0,"",python,selection_keyboard +4303,3448175,"genie.py",7906,0," ",python,content +4304,3448176,"genie.py",7907,0,"",python,selection_keyboard +4305,3453702,"genie.py",8197,0,"",python,selection_mouse +4306,3454726,"genie.py",8196,0,"",python,selection_command +4307,3455287,"TERMINAL",0,0,"2025-07-03 17:13:12.134327: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4308,3455580,"genie.py",7567,0,"",python,selection_command +4309,3457349,"genie.py",8121,0,"",python,selection_mouse +4310,3457354,"genie.py",8120,0,"",python,selection_command +4311,3457633,"genie.py",8120,1,")",python,selection_mouse +4312,3457634,"genie.py",8120,60,")\n mask_token_expanded = self.dynamics.mask_token[0][",python,selection_mouse +4313,3457634,"genie.py",8120,59,")\n mask_token_expanded = self.dynamics.mask_token[0]",python,selection_mouse +4314,3457654,"genie.py",8121,0,"",python,selection_command +4315,3457784,"genie.py",8179,0,"",python,selection_mouse +4316,3457929,"genie.py",8178,2,"][",python,selection_mouse +4317,3458069,"genie.py",8122,95," mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)\n",python,selection_mouse +4318,3459081,"genie.py",8130,0,"",python,selection_command +4319,3463139,"TERMINAL",0,0,"2025-07-03 17:13:20.009991: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4320,3469565,"TERMINAL",0,0,"2025-07-03 17:13:26.460904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4321,3475958,"TERMINAL",0,0,"2025-07-03 17:13:32.846363: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4322,3479200,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4323,3479718,"genie.py",7888,0,"",python,selection_mouse +4324,3480245,".venv/lib/python3.10/site-packages/flax/linen/module.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Flax Module.""""""\n\nimport contextlib\nimport dataclasses\nimport enum\nimport functools\nimport inspect\nimport sys\nimport threading\nimport typing\nimport weakref\nfrom types import MappingProxyType\nfrom typing import (\n Any,\n Literal,\n Optional,\n TypeVar,\n Union,\n overload,\n)\nfrom collections.abc import Callable, Iterable, Iterator, Mapping\n\nimport jax\nimport jax.numpy as jnp\nimport typing_extensions as tpe\n\nimport flax\nimport flax.linen as nn\nfrom flax import (\n config,\n core,\n errors,\n serialization,\n traceback_util,\n traverse_util,\n)\nfrom flax.core import Scope, meta, partial_eval\nfrom flax.core.frozen_dict import FrozenDict\nfrom flax.core.scope import (\n CollectionFilter,\n DenyList,\n Variable,\n union_filters,\n)\nfrom flax.ids import FlaxId, uuid\nfrom flax.linen import kw_only_dataclasses\nfrom flax.typing import (\n RNGSequences,\n PRNGKey,\n FrozenVariableDict,\n VariableDict,\n)\n\ntraceback_util.register_exclusion(__file__)\n\n\nT = TypeVar('T')\nK = TypeVar('K')\nM = TypeVar('M', bound='Module')\n_CallableT = TypeVar('_CallableT', bound=Callable)\n\n\n# Used for abstractly testing module behavior.\nTestScope = type(\n 'TestScope',\n (Scope,),\n {'make_rng': lambda self, name: jax.random.key(0)},\n)\n\n\n# pylint: disable=protected-access,attribute-defined-outside-init\ndef _get_fn_name(fn):\n if isinstance(fn, functools.partial):\n return _get_fn_name(fn.func)\n return getattr(fn, '__name__', 'unnamed_function')\n\n\ndef _indent(x: str, num_spaces: int):\n indent_str = ' ' * num_spaces\n lines = x.split('\n')\n # skip last line because it is always empty and should not be indented.\n assert not lines[-1]\n return '\n'.join(indent_str + line for line in lines[:-1]) + '\n'\n\n\ndef _attr_repr(value: Any):\n if callable(value) and (\n (isinstance(value, nn.Module) and value.__dict__.get('__name__', None))\n or (not isinstance(value, nn.Module) and getattr(value, '__name__', None))\n ):\n value_rep = value.__name__\n else:\n value_rep = repr(value)\n return value_rep\n\n\ndef _module_repr(module: 'Module', num_spaces: int = 4):\n """"""Returns a pretty printed representation of the module.""""""\n cls = type(module)\n try:\n fields = dataclasses.fields(cls)\n except TypeError:\n # Edge case with no fields e.g. module = nn.Module() causes error later.\n return object.__repr__(module)\n cls_name = cls.__name__\n rep = ''\n\n attributes = {\n f.name: f.type\n for f in fields\n if f.name not in ('parent', 'name') and f.repr\n }\n child_modules = {\n k: v\n for k, v in module._state.children.items() # pytype: disable=attribute-error\n if isinstance(v, Module)\n }\n if attributes:\n rep += '# attributes\n'\n for attr in attributes.keys():\n # TODO(jheek): can we get a nice string representation of attribute types?\n value = module.__dict__.get(attr, None)\n value_rep = _attr_repr(value)\n rep += f'{attr} = {value_rep}\n'\n if child_modules:\n rep += '# children\n'\n for name, child in child_modules.items():\n child_rep = _module_repr(child, num_spaces)\n rep += f'{name} = {child_rep}\n'\n if rep:\n return f'{cls_name}(\n{_indent(rep, num_spaces)})'\n else:\n return f'{cls_name}()'\n\n\n# Tabulation utilities.\n# -----------------------------------------------------------------------------\n@dataclasses.dataclass\nclass _CallInfo:\n index: int\n path: tuple[str, ...]\n module: 'Module'\n rngs: dict[str, core.scope.PRNGKey | core.scope.LazyRng] | None\n mutable: bool\n method: str\n args: tuple[Any, ...]\n kwargs: dict[str, Any]\n outputs: Any\n\n\n@dataclasses.dataclass\nclass _CallInfoContext(threading.local):\n index: int\n calls: list[_CallInfo]\n\n def get_call_index(self) -> int:\n index = self.index\n self.index += 1\n return index\n\n\n@contextlib.contextmanager\ndef _tabulate_context():\n _context.call_info_stack.append(_CallInfoContext(0, []))\n try:\n yield\n finally:\n _context.call_info_stack.pop()\n\n\n# Track parent relationship across Modules.\n# -----------------------------------------------------------------------------\nclass _DynamicContext(threading.local):\n """"""Dynamic context.""""""\n\n # TODO(marcvanzee): switch to using contextvars once minimum python version is\n # 3.7\n\n def __init__(self):\n self.module_stack: list['Module' | None] = [\n None,\n ]\n self.capture_stack = []\n self.call_info_stack: list[_CallInfoContext] = []\n\n\n# The global context\n_context = _DynamicContext()\n\n\nclass _Sentinel:\n def __copy__(self):\n return self # Do not copy singleton sentinel.\n\n def __deepcopy__(self, memo):\n del memo\n return self # Do not copy singleton sentinel.\n\n def __reduce__(self):\n return _get_unspecified_parent, ()\n\n\ndef _get_unspecified_parent():\n return _unspecified_parent\n\n\n_unspecified_parent = _Sentinel()\n\n\n# Enable automatic named_call wrapping for labelling profile traces.\n# -----------------------------------------------------------------------------\n_use_named_call = config.flax_profile\n\n\ndef _derive_profiling_name(module, fn):\n fn_name = _get_fn_name(fn)\n method_suffix = f'.{fn_name}' if fn_name != '__call__' else ''\n module_name = module.name or module.__class__.__name__\n return f'{module_name}{method_suffix}'\n\n\ndef enable_named_call():\n """"""Enables named call wrapping for labelling profile traces.\n\n When named call wrapping is enabled all JAX ops executed in a Module\n will be run under ``jax.named_scope``. The ``Module`` class name will\n show up around the operations belonging to that Module in the\n Tensorboard profiling UI, simplifying the profiling process.\n\n Note that ``jax.named_scope`` only works for\n compiled functions (e.g.: using jax.jit or jax.pmap).\n """"""\n global _use_named_call\n _use_named_call = True\n\n\ndef disable_named_call():\n """"""Disables named call wrapping.\n\n See ``enable_named_call``\n """"""\n global _use_named_call\n _use_named_call = False\n\n\n@contextlib.contextmanager\ndef override_named_call(enable: bool = True):\n # pylint: disable=g-doc-return-or-yield\n """"""Returns a context manager that enables/disables named call wrapping.\n\n Args:\n enable: If true, enables named call wrapping for labelling profile traces.\n (see ``enabled_named_call``).\n """"""\n # pylint: enable=g-doc-return-or-yield\n global _use_named_call\n use_named_call_prev = _use_named_call\n _use_named_call = enable\n try:\n yield\n finally:\n _use_named_call = use_named_call_prev\n\n\n# Intercept module methods.\n# -----------------------------------------------------------------------------\n@dataclasses.dataclass(frozen=True)\nclass InterceptorContext:\n """"""Read only state showing the calling context for method interceptors.\n\n Attributes:\n module: The Module instance whose method is being called.\n method_name: The name of the method being called on the module.\n orig_method: The original method defined on the module. Calling it will\n short circuit all other interceptors.\n """"""\n\n module: 'Module'\n method_name: str\n orig_method: Callable[..., Any]\n\n\nclass ThreadLocalStack(threading.local):\n """"""Thread-local stack.""""""\n\n def __init__(self):\n self._storage = []\n\n def push(self, elem: Any) -> None:\n self._storage.append(elem)\n\n def pop(self) -> Any:\n return self._storage.pop()\n\n def __iter__(self) -> Iterator[Any]:\n return iter(reversed(self._storage))\n\n def __len__(self) -> int:\n return len(self._storage)\n\n def __repr__(self) -> str:\n return f'{self.__class__.__name__}({self._storage})'\n\n\nArgs = tuple[Any]\nKwargs = dict[str, Any]\nNextGetter = Callable[..., Any]\nInterceptor = Callable[[NextGetter, Args, Kwargs, InterceptorContext], Any]\n_global_interceptor_stack = ThreadLocalStack()\n\n\n@contextlib.contextmanager\ndef intercept_methods(interceptor: Interceptor):\n # pylint: disable=g-doc-return-or-yield\n r""""""Registers a new method interceptor.\n\n Method interceptors allow you to (at a distance) intercept method calls to\n modules. It works similarly to decorators. You could modify args/kwargs before\n calling the underlying method and/or modify the result returning from calling\n the underlying method. Or you could completely skip calling the underlying\n method and decide to do something differently. For example::\n\n >>> import flax.linen as nn\n >>> import jax.numpy as jnp\n ...\n >>> class Foo(nn.Module):\n ... def __call__(self, x):\n ... return x\n ...\n >>> def my_interceptor1(next_fun, args, kwargs, context):\n ... print('calling my_interceptor1')\n ... return next_fun(*args, **kwargs)\n ...\n >>> foo = Foo()\n >>> with nn.intercept_methods(my_interceptor1):\n ... _ = foo(jnp.ones([1]))\n calling my_interceptor1\n\n You could also register multiple interceptors on the same method. Interceptors\n will run in order. For example::\n\n >>> def my_interceptor2(next_fun, args, kwargs, context):\n ... print('calling my_interceptor2')\n ... return next_fun(*args, **kwargs)\n ...\n >>> with nn.intercept_methods(my_interceptor1), \\n ... nn.intercept_methods(my_interceptor2):\n ... _ = foo(jnp.ones([1]))\n calling my_interceptor1\n calling my_interceptor2\n\n You could skip other interceptors by directly calling the\n ``context.orig_method``. For example::\n\n >>> def my_interceptor3(next_fun, args, kwargs, context):\n ... print('calling my_interceptor3')\n ... return context.orig_method(*args, **kwargs)\n >>> with nn.intercept_methods(my_interceptor3), \\n ... nn.intercept_methods(my_interceptor1), \\n ... nn.intercept_methods(my_interceptor2):\n ... _ = foo(jnp.ones([1]))\n calling my_interceptor3\n\n The following methods couldn't be intercepted:\n\n 1. Methods decoratored with ``nn.nowrap``.\n 2. Dunder methods including ``__eq__``, ``__repr__``, ``__init__``, ``__hash__``, and ``__post_init__``.\n 3. Module dataclass fields.\n 4. Module descriptors.\n\n Args:\n interceptor: A method interceptor.\n """"""\n _global_interceptor_stack.push(interceptor)\n try:\n yield\n finally:\n assert _global_interceptor_stack.pop() is interceptor\n\n\ndef run_interceptors(\n orig_method: Callable[..., Any],\n module: 'Module',\n *args,\n **kwargs,\n) -> Any:\n """"""Runs method interceptors.""""""\n method_name = _get_fn_name(orig_method)\n fun = functools.partial(orig_method, module)\n context = InterceptorContext(module, method_name, fun)\n\n def wrap_interceptor(interceptor, fun):\n """"""Wraps `fun` with `interceptor`.""""""\n\n @functools.wraps(fun)\n def wrapped(*args, **kwargs):\n return interceptor(fun, args, kwargs, context)\n\n return wrapped\n\n # Wraps interceptors around the original method. The innermost interceptor is\n # the last one added and directly wrapped around the original bound method.\n for interceptor in _global_interceptor_stack:\n fun = wrap_interceptor(interceptor, fun)\n return fun(*args, **kwargs)\n\n\n# Utilities for pytrees of Modules defined inside setup()\n# -----------------------------------------------------------------------------\n\n\ndef _sorted_items(x):\n """"""Returns items of a dict ordered by keys.""""""\n return sorted(x.items(), key=lambda x: x[0])\n\n\ndef _get_suffix_value_pairs(\n tree_or_leaf: Any,\n) -> list[tuple[str, type['Module']]]:\n """"""Helper for naming pytrees of submodules.""""""\n dict_or_leaf = serialization.to_state_dict(tree_or_leaf)\n if not isinstance(dict_or_leaf, dict) or not dict_or_leaf:\n return [('', tree_or_leaf)]\n else:\n flat_dict = traverse_util.flatten_dict(dict_or_leaf)\n return [('_' + '_'.join(k), v) for k, v in _sorted_items(flat_dict)]\n\n\ndef _map_over_modules_in_tree(fn, tree_or_leaf):\n """"""Helper for mapping function over submodules.""""""\n dict_or_leaf = serialization.to_state_dict(tree_or_leaf)\n if not isinstance(dict_or_leaf, dict) or not dict_or_leaf:\n return fn('', tree_or_leaf)\n else:\n flat_dict = traverse_util.flatten_dict(dict_or_leaf, keep_empty_nodes=True)\n mapped_flat_dict = {\n k: fn('_' + '_'.join(k), v) for k, v in _sorted_items(flat_dict)\n }\n return serialization.from_state_dict(\n tree_or_leaf, traverse_util.unflatten_dict(mapped_flat_dict)\n )\n\n\ndef _freeze_attr(val: Any) -> Any:\n """"""Recursively wrap the given attribute `var` in ``FrozenDict``.""""""\n if isinstance(val, (dict, FrozenDict)):\n return FrozenDict({k: _freeze_attr(v) for k, v in val.items()})\n elif isinstance(val, tuple):\n # Special case namedtuples and special JAX tuple structures otherwise they\n # would be downgraded to normal tuples.\n if hasattr(val, '_fields') or type(val).__name__ == 'PartitionSpec':\n return type(val)(*[_freeze_attr(v) for v in val])\n else:\n return tuple(_freeze_attr(v) for v in val)\n elif isinstance(val, list):\n return tuple(_freeze_attr(v) for v in val)\n else:\n return val\n\n\n# Method wrapping of ""compact methods"" and setup()\n# -----------------------------------------------------------------------------\ndef compact(fun: _CallableT) -> _CallableT:\n """"""Marks the given module method allowing inlined submodules.\n\n Methods wrapped in @compact can define submodules directly within the method.\n\n For instance::\n\n >>> import flax.linen as nn\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x, features):\n ... x = nn.Dense(features)(x)\n ... ...\n ... return x\n\n At most one method in each Module may be wrapped with @compact.\n\n Args:\n fun: The Module method to mark as compact.\n\n Returns:\n The given function ``fun`` marked as compact.\n """"""\n fun.compact = True # type: ignore[attr-defined]\n return fun\n\n\ndef nowrap(fun: _CallableT) -> _CallableT:\n """"""Marks the given module method as a helper method that needn't be wrapped.\n\n Methods wrapped in ``@nowrap`` are private helper methods that needn't be wrapped\n with the state handler or a separate named_call transform.\n\n This is needed in several concrete instances:\n - if you're subclassing a method like Module.param and don't want this\n overriden core function decorated with the state management wrapper.\n - If you want a method to be callable from an unbound Module (e.g.: a\n function of construction of arguments that doesn't depend on params/RNGs).\n If you want to learn more about how Flax Modules manage their state read the\n [The Flax Module lifecycle](https://flax.readthedocs.io/en/latest/developer_notes/module_lifecycle.html)\n guide.\n\n For instance::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n\n >>> class Foo(nn.Module):\n ... num_features: int\n\n ... @nn.nowrap\n ... def _make_dense(self, num_features):\n ... return nn.Dense(num_features)\n\n ... @nn.compact\n ... def __call__(self, x):\n ... # now safe to use constructor helper even if using named_call\n ... dense = self._make_dense(self.num_features)\n ... return dense(x)\n\n Args:\n fun: The Module method to mark as nowrap.\n\n Returns:\n The given function ``fun`` marked as nowrap.\n """"""\n fun.nowrap = True # type: ignore[attr-defined]\n return fun\n\n\ndef compact_name_scope(fun: _CallableT) -> _CallableT:\n """"""Creates compact submodules from a method.\n\n This is a decorator that allows you to define compact submodules from a\n method. It's intention is to make it easier to port code Haiku code to Flax\n by providing the same functionality.\n\n Example::\n\n >>> import flax.linen as nn\n >>> import jax\n >>> import jax.numpy as jnp\n >>> from flax.core import pretty_repr\n ...\n >>> class Foo(nn.Module):\n ... @nn.compact_name_scope\n ... def up(self, x):\n ... return nn.Dense(3)(x)\n ...\n ... @nn.compact_name_scope\n ... def down(self, x):\n ... return nn.Dense(3)(x)\n ...\n ... def __call__(self, x):\n ... return self.up(x) + self.down(x)\n ...\n >>> module = Foo()\n >>> variables = module.init(jax.random.PRNGKey(0), jnp.ones((1, 2)))\n >>> params = variables['params']\n >>> print(pretty_repr(jax.tree_util.tree_map(jnp.shape, params)))\n {\n down: {\n Dense_0: {\n bias: (3,),\n kernel: (2, 3),\n },\n },\n up: {\n Dense_0: {\n bias: (3,),\n kernel: (2, 3),\n },\n },\n }\n\n You can also use ``compact_name_scope`` inside ``@compact`` methods or even\n other\n ``compact_name_scope`` methods. Methods that are decorated with\n ``compact_name_scope``\n can also be called directly from ``init`` or ``apply`` via the ``method``\n argument::\n\n >>> y_down = module.apply({'params': params}, jnp.ones((1, 2)), method='down')\n >>> y_down.shape\n (1, 3)\n\n Args:\n fun: The Module method to mark as compact_name_scope.\n\n Returns:\n The given function ``fun`` marked as compact_name_scope.\n """"""\n\n @functools.wraps(fun)\n def compact_name_scope_wrapper(self: nn.Module, *args, **kwargs):\n name = fun.__name__\n if not hasattr(self, '_compact_name_scope_modules'):\n raise ValueError(\n f'Cannot call compact_name_scope method {name!r} on a Module that has not been '\n f'setup. This is likely because you are calling {name!r} '\n 'from outside of init or apply.'\n )\n module = self._compact_name_scope_modules[name]\n return module(*args, **kwargs)\n\n compact_name_scope_wrapper.compact_name_scope = True # type: ignore[attr-defined]\n compact_name_scope_wrapper.inner_fun = fun # type: ignore[attr-defined]\n compact_name_scope_wrapper.nowrap = True # type: ignore[attr-defined]\n return compact_name_scope_wrapper # type: ignore[return-value]\n\n\ndef _get_local_method_names(\n cls: Any, exclude: Iterable[str] = ()\n) -> tuple[str, ...]:\n """"""Gets method names of a class, excluding class and static methods.\n\n Args:\n cls: The class to get method names for.\n exclude: Names to exclude from output.\n\n Returns:\n A list of method names.\n """"""\n true_methods = set()\n for m in cls.__dict__:\n if callable(cls.__dict__[m]) and not inspect.isclass(\n cls.__dict__[m]\n ): # pytype: disable=not-supported-yet\n mtype = type(cls.__dict__[m])\n if mtype != staticmethod and mtype != classmethod:\n true_methods.add(m)\n return tuple(true_methods.difference(set(exclude)))\n\n\ndef _get_local_descriptor_names(\n cls: Any, exclude: Iterable[str] = ()\n) -> tuple[str, ...]:\n """"""Gets descriptor names of a class.\n\n Args:\n cls: The class to get property names for.\n exclude: Names to exclude from output.\n\n Returns:\n A list of property names.\n """"""\n true_properties = set()\n for m, attr in cls.__dict__.items():\n if not callable(attr) and (\n hasattr(attr, '__get__')\n or hasattr(attr, '__set__')\n or hasattr(attr, '__delete__')\n ):\n mtype = type(attr)\n if mtype != staticmethod and mtype != classmethod:\n true_properties.add(m)\n return tuple(true_properties.difference(set(exclude)))\n\n\ndef wrap_method_once(fun: Callable[..., Any]) -> Callable[..., Any]:\n """"""Manages Module state for a given user-defined method.\n\n Args:\n fun: User-defined Module method to manage state for.\n\n Returns:\n Wrapped method.\n """"""\n # Don't rewrap methods that have already had the state management wrapper\n # applied in the decorator stack. This wrapper should always be applied\n # before transformation wrappers.\n if hasattr(fun, 'method_handler_wrapped'):\n return fun\n\n @functools.wraps(fun)\n def wrapped_module_method(*args, **kwargs):\n # We might have incorrectly wrappped a callable\n # that is not a method. Check whether the first arg is self,\n # otherwise call the wrapped function as is.\n if args and isinstance(args[0], Module):\n self, args = args[0], args[1:]\n return self._call_wrapped_method(fun, args, kwargs)\n else:\n return fun(*args, **kwargs)\n\n wrapped_module_method.method_handler_wrapped = True # type: ignore[attr-defined]\n return wrapped_module_method\n\n\ndef wrap_descriptor_once(descriptor) -> 'DescriptorWrapper':\n """"""Wraps a descriptor to give better error messages.\n\n Args:\n descriptor: User-defined Module attribute descriptor.\n\n Returns:\n Wrapped descriptor.\n """"""\n # Don't rewrap descriptors.\n if isinstance(descriptor, DescriptorWrapper):\n return descriptor\n\n return create_descriptor_wrapper(descriptor)\n\n\ndef _wrap_hash(hash_fn: Callable[..., Any]) -> Callable[..., Any]:\n """"""Wraps a hash function with some check for Flax Modules.""""""\n\n @functools.wraps(hash_fn)\n def wrapped(self):\n if self.scope is not None:\n raise TypeError(""Can't call __hash__ on modules that hold variables."")\n try:\n hash_value = hash_fn(self)\n except TypeError as exc:\n raise TypeError(\n 'Failed to hash Flax Module. '\n 'The module probably contains unhashable attributes. '\n f'Module={self}'\n ) from exc\n return hash_value\n\n return wrapped\n\n\ndef _get_unbound_fn(method_or_fn: Callable[..., Any]) -> Callable[..., Any]:\n """"""Returns an unbound function from a method that is possibly bound.\n\n This means that if the passed function belongs of an instance of a class, then\n the returned function does no longer depend on the instance, which is passed\n as the first argument to the function.\n\n Args:\n method_or_fn: A class method or function.\n\n Returns:\n An unbound version of input function.\n """"""\n if inspect.ismethod(method_or_fn) and isinstance(\n method_or_fn.__self__, Module\n ): # pytype: disable=attribute-error\n method_or_fn = method_or_fn.__func__ # pytype: disable=attribute-error\n\n # The method should be callable, and it should have at least one argument\n # representing the class that is passed in.\n if (\n not callable(method_or_fn)\n or len(inspect.signature(method_or_fn).parameters) < 1\n ):\n raise errors.ApplyModuleInvalidMethodError(method_or_fn)\n\n return method_or_fn\n\n\ndef _map_submodules(fn: Callable[['Module'], Any], tree):\n """"""Map a function over all submodules in a tree.""""""\n g = lambda _, x: fn(x) if isinstance(x, Module) else x\n return _freeze_attr(_map_over_modules_in_tree(g, tree))\n\n\nclass SetupState(enum.IntEnum):\n # setup() has not been called.\n NEW = 0\n # setup() has been called outside a transform boundary.\n TRANSFORMED = 1\n # setup() has been called.\n DONE = 2\n\n\n@dataclasses.dataclass\nclass _ModuleInternalState:\n """"""Ephemeral Module Evaluation State.\n\n For clarity, we collect all of the temporary flags and ephemeral state used by\n Modules for autonaming and error messages here, alongside the rules used\n to pass this ephemeral state across transform boundaries.\n """"""\n\n in_compact_method: bool = False\n in_setup: bool = False\n setup_called: SetupState = SetupState.NEW\n is_initialized: bool = False\n autoname_cursor: dict[str, int] = dataclasses.field(default_factory=dict)\n children: dict[str, Union[str, 'Module']] = dataclasses.field(\n default_factory=dict\n )\n\n def reset(self) -> None:\n """"""Resets transient state.\n\n This function is called after each module method, so only attributes that\n are method-dependent are reset.\n """"""\n self.in_compact_method = False\n self.in_setup = False\n self.autoname_cursor = dict()\n\n def export(self) -> '_ModuleInternalState':\n """"""Exports transform-preserved state across transform boundary.""""""\n setup_state = (\n SetupState.TRANSFORMED if self.setup_called else SetupState.NEW\n )\n cloned = _ModuleInternalState(\n in_compact_method=self.in_compact_method,\n in_setup=self.in_setup,\n setup_called=setup_state,\n is_initialized=self.is_initialized,\n autoname_cursor=dict(self.autoname_cursor),\n )\n return cloned\n\n def reimport(self, other: '_ModuleInternalState') -> None:\n """"""Re-imports transform-preserved state from across transform boundary.""""""\n self.in_compact_method = other.in_compact_method\n self.in_setup = other.in_setup\n self.is_initialized = other.is_initialized\n self.autoname_cursor = dict(other.autoname_cursor)\n\n\n_uninitialized_module_internal_state = _ModuleInternalState()\n\n\n_UNDEFINED_COPY_PICKLE_METHODS = (\n '__getstate__',\n '__setstate__',\n '__getnewargs_ex__',\n '__reduce__',\n '__reduce_ex__',\n '__copy__',\n '__deepcopy__',\n)\n\n\n_caches: 'weakref.WeakKeyDictionary[Scope, weakref.WeakValueDictionary[FlaxId, Module]]' = weakref.WeakKeyDictionary()\n\n\ntuple_reduce = lambda xs, x: xs + (x,)\ntuple_init = lambda: ()\n\n\ncapture_call_intermediates = lambda _, method_name: method_name == '__call__'\n\n\nclass ParentDescriptor:\n """"""Wraps parent module references in weak refs.\n\n This prevents reference cycles from forming via parent links which can lead\n to accidental OOMs in eager mode due to slow garbage collection as well as\n spurious tracer leaks during jit compilation.\n\n Note: ""descriptors"" are the underlying python mechanism for implementing\n dynamic @property decorators. We need to use a raw descriptor instead of the\n more common decorator in order to force that the appropriate getter/setter\n logic applies in subclasses even after various dataclass transforms.\n """"""\n\n def __get__(self, obj, objtype=None):\n # check if obj is None, happens during %autoreload\n if obj is None:\n return None\n parent = object.__getattribute__(obj, '_parent_ref')\n return parent() if isinstance(parent, weakref.ReferenceType) else parent\n\n def __set__(self, obj, value):\n maybe_weak = weakref.ref(value) if isinstance(value, Module) else value\n object.__setattr__(obj, '_parent_ref', maybe_weak)\n\n\nclass Descriptor(tpe.Protocol):\n __isabstractmethod__: bool\n\n def __get__(self, obj, objtype=None) -> Any:\n ...\n\n def __set__(self, obj, value) -> None:\n ...\n\n def __delete__(self, obj) -> None:\n ...\n\n def __set_name__(self, owner, name) -> None:\n ...\n\n\nclass DescriptorWrapper:\n pass\n\n\ndef create_descriptor_wrapper(descriptor: Descriptor):\n """"""Creates a descriptor wrapper that calls a get_fn on the descriptor.""""""\n\n class _DescriptorWrapper(DescriptorWrapper):\n """"""A descriptor that can wrap any descriptor.""""""\n\n if hasattr(descriptor, '__isabstractmethod__'):\n __isabstractmethod__ = descriptor.__isabstractmethod__\n\n def __init__(self, wrapped: Descriptor):\n self.wrapped = wrapped\n\n # conditionally define descriptor methods\n if hasattr(descriptor, '__get__'):\n\n def __get__(self, *args, **kwargs):\n # here we will catch internal AttributeError and re-raise it as a\n # more informative and correct error message.\n try:\n return self.wrapped.__get__(*args, **kwargs)\n except AttributeError as e:\n raise errors.DescriptorAttributeError() from e\n\n if hasattr(descriptor, '__set__'):\n\n def __set__(self, *args, **kwargs):\n return self.wrapped.__set__(*args, **kwargs)\n\n if hasattr(descriptor, '__delete__'):\n\n def __delete__(self, *args, **kwargs):\n return self.wrapped.__delete__(*args, **kwargs)\n\n if hasattr(descriptor, '__set_name__'):\n\n def __set_name__(self, *args, **kwargs):\n self.wrapped.__set_name__(*args, **kwargs)\n\n def __getattr__(self, name):\n if 'wrapped' not in vars(self):\n raise AttributeError()\n return getattr(self.wrapped, name)\n\n return _DescriptorWrapper(descriptor)\n\n\n# Base Module definition.\n# -----------------------------------------------------------------------------\n\n\ndef module_field(*, kw_only: bool = False, default: Any | None = ...) -> Any:\n ...\n\n\n# The ModuleBase class is created only to make static analyzers happy\n# mainly pytype and pyright. Some notes:\n# * pyright (correctly) complains that Module itself is not a dataclass, even\n# though all its subclasses and intances ARE dataclasses. Because there is no\n# way to annotate this in a way that pyright understands, we create a\n# ModuleBase class decorated with `dataclass_transform` such that pyright\n# thinks Module is a dataclass (in reality only subclasses are instantiated\n# so this is fine).\n# * The `__dataclass_fields__` attribute is needed because pytype seems to\n# not understand the `dataclass_transform` decorator, therefore we need\n# to add the attribute manually.\n# * Other attributes are annotated for completeness. Because we are using\n# the `if typing.TYPE_CHECKING` pattern, these annotations are not present\n# at runtime so they don't affect the dataclass behavior.\n@tpe.dataclass_transform(field_specifiers=(module_field,)) # type: ignore[literal-required]\nclass ModuleBase:\n if typing.TYPE_CHECKING:\n scope: Scope | None\n _state: _ModuleInternalState\n _parent_ref: Union['Module', weakref.ReferenceType['Module'], None]\n __dataclass_fields__: dict[str, dataclasses.Field]\n\n\nclass Module(ModuleBase):\n """"""Base class for all neural network modules.\n\n Layers and models should subclass this class.\n\n All Flax Modules are Python 3.7\n `dataclasses `_. Since\n dataclasses take over ``__init__``, you should instead override :meth:`setup`,\n which is automatically called to initialize the module.\n\n Modules can contain submodules, and in this way can be nested in a tree\n structure. Submodels can be assigned as regular attributes inside the\n :meth:`setup` method.\n\n You can define arbitrary ""forward pass"" methods on your Module subclass.\n While no methods are special-cased, ``__call__`` is a popular choice because\n it allows you to use module instances as if they are functions::\n\n >>> from flax import linen as nn\n >>> from typing import Tuple\n\n >>> class Module(nn.Module):\n ... features: Tuple[int, ...] = (16, 4)\n\n ... def setup(self):\n ... self.dense1 = nn.Dense(self.features[0])\n ... self.dense2 = nn.Dense(self.features[1])\n\n ... def __call__(self, x):\n ... return self.dense2(nn.relu(self.dense1(x)))\n\n Optionally, for more concise module implementations where submodules\n definitions are co-located with their usage, you can use the\n :meth:`compact` wrapper.\n """"""\n\n if typing.TYPE_CHECKING:\n name: str | None = module_field(kw_only=True, default=None)\n parent: Union['Module', _Sentinel, None] = module_field(\n kw_only=True, default=None\n )\n\n def __init__(self, *args, **kwargs):\n # this stub makes sure pytype accepts constructor arguments.\n pass\n\n def __call__(self, *args, **kwargs) -> Any:\n # this stub allows pytype to accept Modules as Callables.\n pass\n\n @classmethod\n def __init_subclass__(cls, kw_only: bool = False, **kwargs: Any) -> None:\n """"""Automatically initializes all subclasses as custom dataclasses.""""""\n super().__init_subclass__(**kwargs)\n # All Flax Modules are dataclasses. We force this convention since\n # it encourages the stateless behavior needed to clone module instances for\n # functional transformation. Instead of using a python metaclass, we\n # automatically transform Modules into dataclasses at subclass creation\n # time, and we set the last dataclass arguments to `parent` and `name`.\n cls._customized_dataclass_transform(kw_only)\n # We wrap user-defined methods including setup and __call__ to enforce\n # a number of different checks and to provide clear error messages.\n cls._find_compact_name_scope_methods()\n cls._wrap_module_attributes()\n # Set empty class defaults.\n cls._state = _uninitialized_module_internal_state # type: ignore[attr-defined]\n cls.scope: Scope | None = None # type: ignore\n # Handles weak referencing of parent Modules to prevent reference cycles.\n cls._parent_ref = None # type: ignore[attr-defined]\n cls.parent = ParentDescriptor() # type: ignore[assignment]\n\n @classmethod\n def _customized_dataclass_transform(cls, kw_only: bool):\n """"""Transforms `cls` into a dataclass, with custom additional behavior.\n\n 1. Inject `parent` and `name` fields. (If they are already present,\n then check that they have the expected types.)\n 2. Set compare, hash, and repr to False for non-init fields.\n 3. Generate a hash function (if not provided by cls).\n """"""\n # Check reserved attributes have expected type annotations.\n annotations = dict(cls.__dict__.get('__annotations__', {}))\n if annotations.get('parent', _ParentType) != _ParentType:\n raise errors.ReservedModuleAttributeError(annotations)\n if annotations.get('name', str) not in ('str', str, Optional[str]):\n raise errors.ReservedModuleAttributeError(annotations)\n\n # any non-init field will only be set in setup\n # During __hash__ and __eq__ the field is not set yet\n # so it should not be used in compare, hash or repr.\n for field in annotations:\n field_meta = getattr(cls, field, None)\n if isinstance(field_meta, dataclasses.Field) and not field_meta.init:\n field_meta.compare = False\n field_meta.hash = False\n field_meta.repr = False\n\n extra_fields = [\n (\n 'parent',\n _ParentType,\n kw_only_dataclasses.field(\n repr=False, default=_unspecified_parent, kw_only=True\n ),\n ),\n (\n 'name',\n Optional[str],\n kw_only_dataclasses.field(default=None, kw_only=True),\n ),\n ]\n\n if kw_only:\n if tuple(sys.version_info)[:3] >= (3, 10, 0):\n for (\n name,\n annotation, # pytype: disable=invalid-annotation\n default,\n ) in extra_fields:\n setattr(cls, name, default)\n cls.__annotations__[name] = annotation\n dataclasses.dataclass( # type: ignore[call-overload]\n unsafe_hash='__hash__' not in cls.__dict__,\n repr=False,\n kw_only=True,\n )(cls)\n else:\n raise TypeError('`kw_only` is not available before Py 3.10.')\n else:\n # Now apply dataclass transform (which operates in-place).\n # Do generate a hash function only if not provided by the class.\n kw_only_dataclasses.dataclass(\n cls,\n unsafe_hash='__hash__' not in cls.__dict__,\n repr=False,\n extra_fields=extra_fields,\n ) # pytype: disable=wrong-keyword-args\n\n cls.__hash__ = _wrap_hash(cls.__hash__) # type: ignore[method-assign]\n\n @classmethod\n def _find_compact_name_scope_methods(cls):\n """"""Finds all compact_name_scope methods in the class.""""""\n methods = [m[0] for m in inspect.getmembers(cls, predicate=callable)]\n compact_name_scope_fns = tuple(\n method_name\n for method_name in methods\n if hasattr(getattr(cls, method_name), 'compact_name_scope')\n )\n cls._compact_name_scope_methods = compact_name_scope_fns\n\n @classmethod\n def _wrap_module_attributes(cls):\n """"""Wraps user-defined non-inherited methods and descriptors with state\n\n management functions.\n """"""\n # wrap methods\n method_exclusions = [f.name for f in dataclasses.fields(cls)] + [\n '__eq__',\n '__repr__',\n '__init__',\n '__hash__',\n '__post_init__',\n ]\n for key in _get_local_method_names(cls, exclude=method_exclusions):\n method = getattr(cls, key)\n if hasattr(method, 'nowrap'):\n continue\n setattr(cls, key, wrap_method_once(method))\n\n # wrap descriptors\n descriptor_exclusions = [f.name for f in dataclasses.fields(cls)] + [\n 'parent',\n '__dict__',\n ]\n for key in _get_local_descriptor_names(cls, descriptor_exclusions):\n # don't use getattr here, since it will call the descriptor\n descriptor = cls.__dict__[key]\n if hasattr(descriptor, 'nowrap'):\n continue\n setattr(cls, key, wrap_descriptor_once(descriptor))\n return cls\n\n def _call_wrapped_method(self, fun, args, kwargs):\n """"""Calls a wrapped method.\n\n This function is responsible for setting up the thread local state\n correctly before calling the method and cleaning up afterwards.\n This includes storing intermediates, setup of the compact scope,\n and making sure setup is called before any other method.\n\n Args:\n fun: The wrapped method.\n args: Named arguments passed to ``fun``.\n kwargs: Keyword arguments passed to ``fun``.\n\n Returns:\n The results of calling ``fun``.\n """"""\n is_compact_method = hasattr(fun, 'compact')\n fun_name = _get_fn_name(fun)\n is_setup_method = fun_name == 'setup'\n add_call_info = not is_setup_method and len(_context.call_info_stack) > 0\n # We lazily call setup() only when needed.\n if is_setup_method:\n if self.scope is None:\n raise errors.CallSetupUnboundModuleError()\n is_recurrent = self._state.in_setup\n self._state.in_setup = True\n else:\n self._try_setup()\n\n if is_compact_method:\n if self.scope is None:\n raise errors.CallCompactUnboundModuleError()\n is_recurrent = self._state.in_compact_method\n self._state.in_compact_method = True\n _context.module_stack.append(self)\n try:\n # get call info\n if add_call_info:\n assert self.scope is not None\n call_index = _context.call_info_stack[-1].get_call_index()\n\n if _global_interceptor_stack:\n run_fun = functools.partial(run_interceptors, fun)\n else:\n run_fun = fun\n\n # call method\n if _use_named_call:\n with jax.named_scope(_derive_profiling_name(self, fun)):\n y = run_fun(self, *args, **kwargs)\n else:\n y = run_fun(self, *args, **kwargs)\n\n if _context.capture_stack:\n filter_fn = _context.capture_stack[-1]\n if filter_fn and filter_fn(self, fun_name):\n self.sow('intermediates', fun_name, y)\n if add_call_info:\n _args, _kwargs, _y = flax.linen.summary._represent_tree(\n (args, kwargs, y)\n )\n _context.call_info_stack[-1].calls.append(\n _CallInfo(\n call_index,\n self.path,\n self.clone(),\n self.scope.rngs,\n self.scope.mutable,\n fun.__name__,\n _args,\n _kwargs,\n _y,\n )\n )\n return y\n finally:\n _context.module_stack.pop()\n if is_compact_method:\n object.__setattr__(self, 'scope', self.scope.rewound())\n # setup or compact calls can be recurrent for example due to super calls\n # resetting the state would cause is compact/setup method\n # to be set to False prematurely.\n if (is_compact_method or is_setup_method) and not is_recurrent:\n self._state.reset()\n\n def __setattr__(self, name: str, val: Any):\n """"""Sets an attribute on this Module.\n\n We overload setattr solely to support pythonic naming via assignment of\n submodules in the special :meth:`setup` function::\n\n self.submodule_name = MyModule(...)\n\n We also support lists and other general pytrees, e.g.::\n\n self.submodules = [MyModule0(..), MyModule1(..), ...]\n\n Args:\n name: Attribute to set.\n val: Value of the attribute.\n """"""\n fields = self.__dataclass_fields__ # pytype: disable=attribute-error\n is_dataclass_attr = name in fields and fields[name].init\n\n if not self._state.in_setup:\n if not self._state.is_initialized:\n # Setting attributes before end of Module.__post_init__()\n object.__setattr__(self, name, val)\n return\n else:\n # If the attribute is a python special method, we allow setting it (this\n # is useful e.g. for IPython auto-reload).\n if name.startswith('__'):\n object.__setattr__(self, name, val)\n return\n # We're past all initialization and setup logic:\n # Raises a TypeError just like frozen python dataclasses.\n raise errors.SetAttributeFrozenModuleError(\n self.__class__.__name__, name, val\n )\n\n # We're inside the setup() method:\n if is_dataclass_attr:\n # These names are specified as dataclass fields. They should not be\n # initialized within the setup() method, but can be modified freely\n # before it.\n raise errors.SetAttributeInModuleSetupError()\n\n # Values (that may be variables or submodules) are being defined and\n # attached in setup(), we run some extra logic in that case.\n self._register_submodules(name, val)\n\n def __getattr__(self, name: str) -> Any:\n """"""Call setup() before getting any setup-defined attributes.""""""\n # We don't want to return anything for python copy / pickle methods.\n if name in _UNDEFINED_COPY_PICKLE_METHODS:\n raise AttributeError()\n self._try_setup()\n if name in self.__dict__:\n return self.__dict__[name]\n else:\n msg = f'""{self.__class__.__name__}"" object has no attribute ""{name}"".'\n if self.scope is None:\n msg += (\n f' If ""{name}"" is defined in \'.setup()\', remember these fields '\n ""are only accessible from inside 'init' or 'apply'.""\n )\n raise AttributeError(msg)\n\n def __dir__(self) -> list[str]:\n """"""Call setup() before listing attributes.""""""\n self._try_setup()\n return object.__dir__(self) # type: ignore\n\n def __post_init__(self) -> None:\n # DO NOT REMOVE - Marker for internal logging.\n # In dataclasses, __init__ is overridden to process dataclass arguments,\n # and __post_init__ is called immediately afterwards. Here, depending on the\n # type of `parent` passed to initialize the Module, we either defer\n # initialization, attach this Module as a submodule of a parent, or bind\n # this Module at the top-level to variables and rngs.\n\n object.__setattr__(self, '_id', uuid())\n object.__setattr__(self, '_state', _ModuleInternalState())\n\n # Typically we set the parent based on the dynamic module context.\n if self.parent is _unspecified_parent: # pytype: disable=attribute-error\n object.__setattr__(self, 'parent', _context.module_stack[-1])\n\n # Initialization is deferred for top level Modules or any other ""orphan""\n # Modules until attachment by __setattr__ i.e. MyModule(..., parent=None)\n if self.parent is None:\n return\n\n # Register submodule on parent Module.\n if isinstance(self.parent, Module):\n # When initializing an unnamed Module inside setup()\n # initialization is deferred until attachment by __setattr__\n # i.e. self.mymodule = MyModule(...)\n self.name: str | None\n if (\n self.parent._state.in_setup and self.name is None\n ): # pytype: disable=attribute-error\n return\n if not self.parent._initialization_allowed:\n raise errors.AssignSubModuleError(self.__class__.__name__)\n # Autonaming of submodules.\n if self.name is None: # pytype: disable=attribute-error\n prefix = f'{self.__class__.__name__}'\n cursor = self.parent._state.autoname_cursor.get(prefix, 0)\n self.name = f'{prefix}_{cursor}'\n self.parent._state.autoname_cursor[prefix] = cursor + 1\n # Allow scope aliasing under transforms for submodules defined in setup.\n reuse_scopes = (\n self.parent._state.in_setup\n and self.parent._state.setup_called == SetupState.TRANSFORMED\n )\n # Perform name-collision check.\n if self.parent._name_taken(self.name, reuse_scopes=reuse_scopes):\n parent_class = self.parent.__class__.__name__\n raise errors.NameInUseError('submodule', self.name, parent_class)\n # Finalize attachment to parent and scope initialization.\n self.parent._state.children[self.name] = self\n assert self.parent.scope is not None\n object.__setattr__(\n self, 'scope', self.parent.scope.push(self.name, reuse=reuse_scopes)\n )\n\n # Top-level invocation with a functional Scope.\n elif isinstance(self.parent, Scope):\n object.__setattr__(self, 'scope', self.parent)\n else:\n raise ValueError('parent must be None, Module or Scope')\n\n # eagerly bind submodules if scope is available\n if self.scope is not None:\n for field in dataclasses.fields(self):\n if field.name not in ('parent', 'name') and field.init:\n self._register_submodules(field.name, getattr(self, field.name))\n\n self._state.is_initialized = True\n\n def __repr__(self) -> str:\n return _module_repr(self)\n\n def setup(self) -> None:\n """"""Initializes a Module lazily (similar to a lazy ``__init__``).\n\n ``setup`` is called once lazily on a module instance when a module\n is bound, immediately before any other methods like ``__call__`` are\n invoked, or before a ``setup``-defined attribute on ``self`` is accessed.\n\n This can happen in three cases:\n\n 1. Immediately when invoking :meth:`apply`, :meth:`init` or\n :meth:`init_and_output`.\n\n 2. Once the module is given a name by being assigned to an attribute of\n another module inside the other module's ``setup`` method\n (see :meth:`__setattr__`)::\n\n >>> class MyModule(nn.Module):\n ... def setup(self):\n ... submodule = nn.Conv(...)\n\n ... # Accessing `submodule` attributes does not yet work here.\n\n ... # The following line invokes `self.__setattr__`, which gives\n ... # `submodule` the name ""conv1"".\n ... self.conv1 = submodule\n\n ... # Accessing `submodule` attributes or methods is now safe and\n ... # either causes setup() to be called once.\n\n 3. Once a module is constructed inside a method wrapped with\n :meth:`compact`, immediately before another method is called or\n ``setup`` defined attribute is accessed.\n """"""\n pass\n\n def _register_submodules(self, name, val):\n """"""Registers a submodule.""""""\n assert self.scope, 'Trying to register submodules on unbound scope.'\n root = self.scope.root\n cache = _caches.get(root, weakref.WeakValueDictionary())\n _caches[root] = cache\n queue = []\n preserve_adopted_names = config.flax_preserve_adopted_names\n if hasattr(type(self), 'preserve_adopted_names'):\n preserve_adopted_names = type(self).preserve_adopted_names\n\n def adopt_attr_modules(cache, queue, suffix, subvalue):\n if isinstance(subvalue, Module):\n current_name = subvalue.name\n adopted_name = None\n if subvalue.parent is None:\n # Preserve sharing-by-reference relationships during adoption\n # via cache keyed on unique instance ids.\n key = subvalue._id\n # Module was passed from outside. It needs to be cloned.\n # Outside modules are named by attachment, not an outer name,\n # UNLESS we're using new adopted name policy, in which case an existing\n # name will be used, as is often supplied by config systems.\n if preserve_adopted_names:\n adopted_name = object.__getattribute__(subvalue, 'name')\n if key in cache:\n subvalue = cache[key]\n else:\n subvalue = subvalue.clone(name=None)\n cache[key] = subvalue\n if subvalue.name is None:\n object.__setattr__(subvalue, 'parent', self)\n if adopted_name is None:\n adopted_name = (\n f'{name}{suffix}'\n if not isinstance(subvalue, CompactNameScope)\n else current_name\n )\n object.__setattr__(subvalue, 'name', adopted_name)\n queue.append(subvalue)\n return subvalue\n\n val = _freeze_attr(\n _map_over_modules_in_tree(\n functools.partial(adopt_attr_modules, cache, queue), val\n )\n )\n object.__setattr__(self, name, val)\n for x in queue:\n x.__post_init__()\n\n def _try_setup(self, shallow: bool = False) -> None:\n """"""Tries to setup module if scope is available and setup has not been called yet.""""""\n if (\n self.scope\n and not self._state.in_setup\n and self._state.setup_called != SetupState.DONE\n ):\n try:\n self._state.in_setup = True\n # A shallow setup will only register attribute submodules but it does\n # not call the user's setup. This avoids running before a\n # transformation.\n for field in dataclasses.fields(self):\n if field.name not in ('parent', 'name') and field.init:\n self._register_submodules(field.name, getattr(self, field.name))\n if not shallow:\n self.setup()\n # create NonTransparent Modules\n self._compact_name_scope_modules = {\n name: CompactNameScope(\n getattr(type(self), name).inner_fun, lambda: self, name=name\n )\n for name in self._compact_name_scope_methods\n }\n\n # We run static checks abstractly once for setup before any transforms\n # to detect name collisions and other python errors.\n elif self._state.setup_called == SetupState.NEW:\n self._validate_setup()\n finally:\n self._state.in_setup = False\n if not shallow:\n self._state.setup_called = SetupState.DONE\n\n def _validate_setup(self) -> None:\n """"""Abstractly evaluates setup only to run static checks.""""""\n\n def run_setup_only(x):\n wrapped_id = wrap_method_once(lambda m, x: x)\n with TestScope({}, rngs={}, mutable=True).temporary() as root:\n return wrapped_id(self.clone(parent=root), x)\n\n _ = jax.eval_shape(run_setup_only, 0)\n\n def _name_taken(\n self,\n name: str,\n reuse_scopes: bool = False,\n collection: str | None = None,\n ) -> bool:\n assert self.scope is not None\n if reuse_scopes:\n return False\n return self.scope.name_reserved(name, collection)\n\n @property\n def _initialization_allowed(self):\n return (\n not self._state.is_initialized # allow eager attachment in post-init\n or self._state.in_setup\n or self._state.in_compact_method\n )\n\n @property\n def path(self):\n """"""Get the path of this Module. Top-level root modules have an empty path ``()``.\n Note that this method can only be used on bound modules that have a valid scope.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n\n >>> class SubModel(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... print(f'SubModel path: {self.path}')\n ... return x\n\n >>> class Model(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... print(f'Model path: {self.path}')\n ... return SubModel()(x)\n\n >>> model = Model()\n >>> variables = model.init(jax.random.key(0), jnp.ones((1, 2)))\n Model path: ()\n SubModel path: ('SubModel_0',)\n """"""\n\n if self.scope is None:\n raise ValueError(""Can't access module paths on unbound modules."")\n\n return self.scope.path\n\n def clone(\n self: M,\n *,\n parent: Union[Scope, 'Module', _Sentinel] | None = None,\n _deep_clone: bool | weakref.WeakValueDictionary = False,\n _reset_names: bool = False,\n **updates,\n ) -> M:\n """"""Creates a clone of this Module, with optionally updated arguments.\n\n NOTE: end users are encouraged to use the ``copy`` method. ``clone`` is used\n primarily for internal routines, and ``copy`` offers simpler arguments and\n better defaults.\n\n Args:\n parent: The parent of the clone. The clone will have no parent if no\n explicit parent is specified.\n _deep_clone: A boolean or a weak value dictionary to control deep cloning\n of submodules. If True, submodules will be cloned recursively. If a weak\n value dictionary is passed, it will be used to cache cloned submodules.\n This flag is used by init/apply/bind to avoid scope leakage.\n _reset_names: If True, ``name=None`` is also passed to submodules when\n cloning. Resetting names in submodules is necessary when calling ``.unbind``.\n **updates: Attribute updates.\n\n Returns:\n A clone of the this Module with the updated attributes and parent.\n """"""\n attrs = {\n f.name: getattr(self, f.name) for f in dataclasses.fields(self) if f.init\n }\n\n attrs.update(parent=parent, **updates)\n\n # Here we implement deep cloning of submodules, this is necessary to avoid scope leakage\n # from external submodules into init/apply/bind while preserving sharing-by-reference\n # relationships between submodules.\n if _deep_clone != False:\n # We use a weak value dictionary to cache cloned submodules. When a shared\n # submodule is cloned, its only cloned once else its fetched from the cache.\n cache = (\n weakref.WeakValueDictionary()\n if isinstance(_deep_clone, bool)\n else _deep_clone\n )\n\n def clone_fn(m: Module) -> Module:\n if hasattr(m, '_id'):\n key = m._id\n if key in cache:\n return cache[key]\n else:\n if _reset_names:\n clone = m.clone(\n _deep_clone=cache, _reset_names=_reset_names, name=None\n )\n else:\n clone = m.clone(_deep_clone=cache)\n cache[key] = clone\n return clone\n else:\n # If the module doesn't have an _id attribute it could be a mock object\n # so we return it as is.\n return m\n\n # _map_submodules will map over all submodules inside attrs\n # value here can be any pytree, non-module values are ignored\n for field_name, value in attrs.items():\n if field_name == 'parent':\n continue\n attrs[field_name] = _map_submodules(clone_fn, value)\n\n module = self.__class__(**attrs)\n\n return module\n\n def copy(\n self: M,\n *,\n parent: Union[Scope, 'Module', _Sentinel] | None = _unspecified_parent,\n name: str | None = None,\n **updates,\n ) -> M:\n """"""Creates a copy of this Module, with optionally updated arguments.\n\n Args:\n parent: The parent of the copy. By default the current module is taken\n as parent if not explicitly specified.\n name: A new name for the copied Module, by default a new automatic name\n will be given.\n **updates: Attribute updates.\n\n Returns:\n A copy of the this Module with the updated name, parent, and attributes.\n """"""\n return self.clone(\n parent=parent, name=name, _deep_clone=True, _reset_names=False, **updates\n )\n\n @overload\n def variable(\n self,\n col: str,\n name: str,\n init_fn: Callable[..., T] | None = None,\n *init_args,\n ) -> Variable[T]:\n ...\n\n @overload\n def variable(\n self,\n col: str,\n name: str,\n init_fn: Callable[..., T] | None = None,\n *init_args,\n unbox: Literal[True],\n **init_kwargs,\n ) -> Variable[T]:\n ...\n\n @overload\n def variable(\n self,\n col: str,\n name: str,\n init_fn: Callable[..., T] | None = None,\n *init_args,\n unbox: Literal[False],\n **init_kwargs,\n ) -> Variable[meta.AxisMetadata[T]]:\n ...\n\n @overload\n def variable(\n self,\n col: str,\n name: str,\n init_fn: Callable[..., T] | None = None,\n *init_args,\n unbox: bool = True,\n **init_kwargs,\n ) -> Variable[T] | Variable[meta.AxisMetadata[T]]:\n ...\n\n def variable(\n self,\n col: str,\n name: str,\n init_fn: Callable[..., T] | None = None,\n *init_args,\n unbox: bool = True,\n **init_kwargs,\n ) -> Variable[T] | Variable[meta.AxisMetadata[T]]:\n """"""Declares and returns a variable in this Module.\n\n See :mod:`flax.core.variables` for more information. See also :meth:`param`\n for a shorthand way to define read-only variables in the ""params""\n collection.\n\n Contrary to :meth:`param`, all arguments passing using ``init_fn`` should be\n passed on explicitly::\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... x = nn.Dense(4)(x)\n ... key = self.make_rng('stats')\n ... mean = self.variable('stats', 'mean', nn.initializers.lecun_normal(), key, x.shape)\n ... ...\n ... return x * mean.value\n >>> variables = Foo().init({'params': jax.random.key(0), 'stats': jax.random.key(1)}, jnp.ones((2, 3)))\n >>> jax.tree_util.tree_map(jnp.shape, variables)\n {'params': {'Dense_0': {'bias': (4,), 'kernel': (3, 4)}}, 'stats': {'mean': (2, 4)}}\n\n In the example above, the function ``lecun_normal`` expects two arguments:\n ``key`` and ``shape``, and both have to be passed on. The PRNG for ``stats``\n has to be provided explicitly when calling :meth:`init` and :meth:`apply`.\n\n Args:\n col: The variable collection name.\n name: The variable name.\n init_fn: The function that will be called to compute the initial value of\n this variable. This function will only be called the first time this\n variable is used in this module. If None, the variable must already be\n initialized otherwise an error is raised.\n *init_args: The positional arguments to pass to init_fn.\n unbox: If True, ``AxisMetadata`` instances are replaced by their unboxed\n value, see ``flax.nn.meta.unbox`` (default: True).\n **init_kwargs: The key-word arguments to pass to init_fn\n\n Returns:\n A :class:`flax.core.variables.Variable` that can be read or set via\n "".value"" attribute. Throws an error if the variable exists already.\n """"""\n if not self._initialization_allowed:\n raise ValueError(\n 'Variables must be initialized in `setup()` or in a method '\n 'wrapped in `@compact`'\n )\n if self._name_taken(name, collection=col):\n raise errors.NameInUseError('variable', name, self.__class__.__name__)\n assert self.scope is not None\n v = self.scope.variable(\n col, name, init_fn, *init_args, unbox=unbox, **init_kwargs\n )\n self._state.children[name] = col\n return v\n\n @overload\n def param(\n self, name: str, init_fn: Callable[..., T], *init_args,\n ) -> T:\n ...\n\n @overload\n def param(\n self,\n name: str,\n init_fn: Callable[..., T],\n *init_args,\n unbox: Literal[True],\n **init_kwargs,\n ) -> T:\n ...\n\n @overload\n def param(\n self,\n name: str,\n init_fn: Callable[..., T],\n *init_args,\n unbox: Literal[False],\n **init_kwargs,\n ) -> meta.AxisMetadata[T]:\n ...\n\n @overload\n def param(\n self,\n name: str,\n init_fn: Callable[..., T],\n *init_args,\n unbox: bool,\n **init_kwargs,\n ) -> T | meta.AxisMetadata[T]:\n ...\n\n def param(\n self,\n name: str,\n init_fn: Callable[..., T],\n *init_args,\n unbox: bool = True,\n **init_kwargs,\n ) -> T | meta.AxisMetadata[T]:\n """"""Declares and returns a parameter in this Module.\n\n Parameters are read-only variables in the collection named ""params"". See\n :mod:`flax.core.variables` for more details on variables.\n\n The first argument of ``init_fn`` is assumed to be a PRNG key, which is\n provided automatically and does not have to be passed using ``init_args``\n or ``init_kwargs``::\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... x = nn.Dense(4)(x)\n ... mean = self.param('mean', nn.initializers.lecun_normal(), x.shape)\n ... ...\n ... return x * mean\n >>> variables = Foo().init({'params': jax.random.key(0), 'stats': jax.random.key(1)}, jnp.ones((2, 3)))\n >>> jax.tree_util.tree_map(jnp.shape, variables)\n {'params': {'Dense_0': {'bias': (4,), 'kernel': (3, 4)}, 'mean': (2, 4)}}\n\n In the example above, the function ``lecun_normal`` expects two arguments:\n ``key`` and ``shape``, but only ``shape`` has to be provided explicitly;\n ``key`` is set automatically using the PRNG for ``params`` that is passed\n when initializing the module using :meth:`init`.\n\n Args:\n name: The parameter name.\n init_fn: The function that will be called to compute the initial value of\n this variable. This function will only be called the first time this\n parameter is used in this module.\n *init_args: The positional arguments to pass to init_fn.\n unbox: If True, ``AxisMetadata`` instances are replaced by their unboxed\n value, see ``flax.nn.meta.unbox`` (default: True).\n **init_kwargs: The key-word arguments to pass to init_fn.\n\n Returns:\n The value of the initialized parameter. Throws an error if the parameter\n exists already.\n """"""\n if not self._initialization_allowed:\n raise ValueError(\n 'Parameters must be initialized in `setup()` or in a method '\n 'wrapped in `@compact`'\n )\n if self._name_taken(name, collection='params'):\n raise errors.NameInUseError('param', name, self.__class__.__name__)\n assert self.scope is not None\n v = self.scope.param(name, init_fn, *init_args, unbox=unbox, **init_kwargs)\n self._state.children[name] = 'params'\n return v\n\n def has_variable(self, col: str, name: str) -> bool:\n """"""Checks if a variable of given collection and name exists in this Module.\n\n See :mod:`flax.core.variables` for more explanation on variables and\n collections.\n\n Args:\n col: The variable collection name.\n name: The name of the variable.\n\n Returns:\n True if the variable exists.\n """"""\n if self.scope is None:\n raise ValueError(""Can't access variables on unbound modules"")\n return self.scope.has_variable(col, name)\n\n def is_mutable_collection(self, col: str) -> bool:\n """"""Returns true if the collection ``col`` is mutable.""""""\n if self.scope is None:\n raise ValueError(""Can't check mutability on unbound modules"")\n return self.scope.is_mutable_collection(col)\n\n def has_rng(self, name: str) -> bool:\n """"""Returns true if a PRNGSequence with name ``name`` exists.""""""\n if self.scope is None:\n raise ValueError(""Can't query for RNGs on unbound modules"")\n return self.scope.has_rng(name)\n\n def make_rng(self, name: str = 'params') -> PRNGKey:\n """"""Returns a new RNG key from a given RNG sequence for this Module.\n\n The new RNG key is split from the previous one. Thus, every call to\n ``make_rng`` returns a new RNG key, while still guaranteeing full\n reproducibility.\n\n .. note::\n If an invalid name is passed (i.e. no RNG key was passed by\n the user in ``.init`` or ``.apply`` for this name), then ``name``\n will default to ``'params'``.\n\n Example::\n\n >>> import jax\n >>> import flax.linen as nn\n\n >>> class ParamsModule(nn.Module):\n ... def __call__(self):\n ... return self.make_rng('params')\n >>> class OtherModule(nn.Module):\n ... def __call__(self):\n ... return self.make_rng('other')\n\n >>> key = jax.random.key(0)\n >>> params_out, _ = ParamsModule().init_with_output({'params': key})\n >>> # self.make_rng('other') will default to using the 'params' RNG stream\n >>> other_out, _ = OtherModule().init_with_output({'params': key})\n >>> assert params_out == other_out\n\n Learn more about RNG's by reading the Flax RNG guide:\n https://flax.readthedocs.io/en/latest/guides/flax_fundamentals/rng_guide.html\n\n Args:\n name: The RNG sequence name.\n\n Returns:\n The newly generated RNG key.\n """"""\n if self.scope is None:\n raise ValueError(""Can't use RNGs on unbound modules"")\n return self.scope.make_rng(name)\n\n def is_initializing(self) -> bool:\n """"""Returns True if running under self.init(...) or nn.init(...)().\n\n This is a helper method to handle the common case of simple initialization\n where we wish to have setup logic occur when only called under\n ``module.init`` or ``nn.init``. For more complicated multi-phase\n initialization scenarios it is better to test for the mutability of\n particular variable collections or for the presence of particular\n variables that potentially need to be initialized.\n """"""\n if self.scope is None:\n raise ValueError(""Can't check if running under init() on unbound modules"")\n return self.scope.get_flag('initializing', False)\n\n def _module_checks(self):\n """"""Run standard runtime checks.""""""\n\n if not isinstance(self, Module):\n raise errors.InvalidInstanceModuleError()\n\n overridden_post_init = self.__post_init__ != Module.__post_init__\n if overridden_post_init and not hasattr(self, '_id'):\n raise errors.IncorrectPostInitOverrideError()\n\n @traceback_util.api_boundary\n def bind(\n self: M,\n variables: VariableDict,\n *args,\n rngs: RNGSequences | None = None,\n mutable: CollectionFilter = False,\n ) -> M:\n """"""Creates an interactive Module instance by binding variables and RNGs.\n\n ``bind`` provides an ""interactive"" instance of a Module directly without\n transforming a function with ``apply``. This is particularly useful for\n debugging and interactive use cases like notebooks where a function would\n limit the ability to split up code into different cells.\n\n Once the variables (and optionally RNGs) are bound to a ``Module`` it\n becomes a stateful object. Note that idiomatic JAX is functional and\n therefore an interactive instance does not mix well with vanilla JAX APIs.\n ``bind()`` should only be used for interactive experimentation, and in all\n other cases we strongly encourage users to use ``apply()`` instead.\n\n Example::\n\n >>> import jax\n >>> import jax.numpy as jnp\n >>> import flax.linen as nn\n\n >>> class AutoEncoder(nn.Module):\n ... def setup(self):\n ... self.encoder = nn.Dense(3)\n ... self.decoder = nn.Dense(5)\n ...\n ... def __call__(self, x):\n ... return self.decoder(self.encoder(x))\n\n >>> x = jnp.ones((16, 9))\n >>> ae = AutoEncoder()\n >>> variables = ae.init(jax.random.key(0), x)\n >>> model = ae.bind(variables)\n >>> z = model.encoder(x)\n >>> x_reconstructed = model.decoder(z)\n\n Args:\n variables: A dictionary containing variables keyed by variable\n collections. See :mod:`flax.core.variables` for more details about\n variables.\n *args: Named arguments (not used).\n rngs: a dict of PRNGKeys to initialize the PRNG sequences.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections.\n\n Returns:\n A copy of this instance with bound variables and RNGs.\n """"""\n Module._module_checks(self)\n\n del args\n scope = core.bind(variables, rngs=rngs, mutable=mutable)\n return self.clone(parent=scope, _deep_clone=True)\n\n def unbind(self: M) -> tuple[M, VariableDict]:\n """"""Returns an unbound copy of a Module and its variables.\n\n ``unbind`` helps create a stateless version of a bound Module.\n\n An example of a common use case: to extract a sub-Module defined inside\n ``setup()`` and its corresponding variables: 1) temporarily ``bind`` the\n parent Module; and then 2) ``unbind`` the desired sub-Module. (Recall that\n ``setup()`` is only called when the Module is bound.)::\n\n >>> class Encoder(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... ...\n ... return nn.Dense(256)(x)\n\n >>> class Decoder(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... ...\n ... return nn.Dense(784)(x)\n\n >>> class AutoEncoder(nn.Module):\n ... def setup(self):\n ... self.encoder = Encoder()\n ... self.decoder = Decoder()\n ...\n ... def __call__(self, x):\n ... return self.decoder(self.encoder(x))\n\n >>> module = AutoEncoder()\n >>> variables = module.init(jax.random.key(0), jnp.ones((1, 784)))\n\n >>> # Extract the Encoder sub-Module and its variables\n >>> encoder, encoder_vars = module.bind(variables).encoder.unbind()\n\n Returns:\n A tuple with an unbound copy of this Module and its variables.\n """"""\n Module._module_checks(self)\n\n if self.scope is None:\n raise errors.CallUnbindOnUnboundModuleError()\n\n variables = self.variables\n module = self.clone(_deep_clone=True, _reset_names=True, name=None)\n return module, variables\n\n @traceback_util.api_boundary\n def apply(\n self,\n variables: VariableDict,\n *args,\n rngs: PRNGKey | RNGSequences | None = None,\n method: Callable[..., Any] | str | None = None,\n mutable: CollectionFilter = False,\n capture_intermediates: bool | Callable[['Module', str], bool] = False,\n **kwargs,\n ) -> Any | tuple[Any, FrozenVariableDict | dict[str, Any]]:\n """"""Applies a module method to variables and returns output and modified variables.\n\n Note that ``method`` should be set if one would like to call ``apply`` on a\n different class method than ``__call__``. For instance, suppose a\n Transformer modules has a method called ``encode``, then the following calls\n ``apply`` on that method::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> import numpy as np\n\n >>> class Transformer(nn.Module):\n ... def encode(self, x):\n ... ...\n\n >>> x = jnp.ones((16, 9))\n >>> model = Transformer()\n >>> variables = model.init(jax.random.key(0), x, method=Transformer.encode)\n\n >>> encoded = model.apply(variables, x, method=Transformer.encode)\n\n If a function instance is provided, the unbound function is used. For\n instance, the example below is equivalent to the one above::\n\n >>> encoded = model.apply(variables, x, method=model.encode)\n\n You can also pass a string to a callable attribute of the module. For\n example, the previous can be written as::\n\n >>> encoded = model.apply(variables, x, method='encode')\n\n Note ``method`` can also be a function that is not defined in\n ``Transformer``. In that case, the function should have at least one\n argument representing an instance of the Module class::\n\n >>> def other_fn(instance, x):\n ... # instance.some_module_attr(...)\n ... instance.encode\n ... ...\n\n >>> model.apply(variables, x, method=other_fn)\n\n If you pass a single ``PRNGKey``, Flax will use it to feed the ``'params'``\n RNG stream. If you want to use a different RNG stream or need to use\n multiple streams, you can pass a dictionary mapping each RNG stream name\n to its corresponding ``PRNGKey`` to ``apply``. If ``self.make_rng(name)``\n is called on an RNG stream name that isn't passed by the user, it will\n default to using the ``'params'`` RNG stream.\n\n Example::\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x, add_noise=False):\n ... x = nn.Dense(16)(x)\n ... x = nn.relu(x)\n ...\n ... if add_noise:\n ... # Add gaussian noise\n ... noise_key = self.make_rng('noise')\n ... x = x + jax.random.normal(noise_key, x.shape)\n ...\n ... return nn.Dense(1)(x)\n\n >>> x = jnp.empty((1, 7))\n >>> module = Foo()\n >>> rngs = {'params': jax.random.key(0), 'noise': jax.random.key(1)}\n >>> variables = module.init(rngs, x)\n >>> out0 = module.apply(variables, x, add_noise=True, rngs=rngs)\n\n >>> rngs['noise'] = jax.random.key(0)\n >>> out1 = module.apply(variables, x, add_noise=True, rngs=rngs)\n >>> # different output (key(1) vs key(0))\n >>> np.testing.assert_raises(AssertionError, np.testing.assert_allclose, out0, out1)\n\n >>> del rngs['noise']\n >>> # self.make_rng('noise') will default to using the 'params' RNG stream\n >>> out2 = module.apply(variables, x, add_noise=True, rngs=rngs)\n >>> # same output (key(0))\n >>> np.testing.assert_allclose(out1, out2)\n\n >>> # passing in a single key is equivalent to passing in {'params': key}\n >>> out3 = module.apply(variables, x, add_noise=True, rngs=jax.random.key(0))\n >>> # same output (key(0))\n >>> np.testing.assert_allclose(out2, out3)\n\n Args:\n variables: A dictionary containing variables keyed by variable\n collections. See :mod:`flax.core.variables` for more details about\n variables.\n *args: Named arguments passed to the specified apply method.\n rngs: a dict of PRNGKeys to initialize the PRNG sequences. The ""params""\n PRNG sequence is used to initialize parameters.\n method: A function to call apply on. This is generally a function in the\n module. If provided, applies this method. If not provided, applies the\n ``__call__`` method of the module. A string can also be provided to\n specify a method by name.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections.\n capture_intermediates: If ``True``, captures intermediate return values of\n all Modules inside the ""intermediates"" collection. By default, only the\n return values of all ``__call__`` methods are stored. A function can be\n passed to change the filter behavior. The filter function takes the\n Module instance and method name and returns a bool indicating whether\n the output of that method invocation should be stored.\n **kwargs: Keyword arguments passed to the specified apply method.\n\n Returns:\n If ``mutable`` is False, returns output. If any collections are\n mutable, returns ``(output, vars)``, where ``vars`` are is a dict\n of the modified collections.\n """"""\n Module._module_checks(self)\n\n if rngs is not None and not isinstance(rngs, dict):\n if not core.scope._is_valid_rng(rngs):\n raise errors.InvalidRngError(\n 'RNGs should be of shape (2,) or PRNGKey in Module '\n f'{self.__class__.__name__}, but rngs are: {rngs}'\n )\n rngs = {'params': rngs}\n\n if isinstance(method, str):\n attribute_name = method\n method = getattr(self, attribute_name)\n if not callable(method):\n class_name = type(self).__name__\n raise TypeError(\n f""'{class_name}.{attribute_name}' must be a callable, got""\n f' {type(method)}.'\n )\n # if the `method` string is a submodule, we create a lambda function\n # that calls the submodule, forwarding all arguments.\n if isinstance(method, Module):\n method = lambda self, *args, **kwargs: getattr(self, attribute_name)(\n *args, **kwargs\n )\n elif method is None:\n method = self.__call__\n method = _get_unbound_fn(method)\n return apply(\n method,\n self,\n mutable=mutable,\n capture_intermediates=capture_intermediates,\n )(variables, *args, **kwargs, rngs=rngs)\n\n @traceback_util.api_boundary\n def init_with_output(\n self,\n rngs: PRNGKey | RNGSequences,\n *args,\n method: Callable[..., Any] | str | None = None,\n mutable: CollectionFilter = DenyList('intermediates'),\n capture_intermediates: bool | Callable[['Module', str], bool] = False,\n **kwargs,\n ) -> tuple[Any, FrozenVariableDict | dict[str, Any]]:\n """"""Initializes a module method with variables and returns output and modified variables.\n\n Args:\n rngs: The rngs for the variable collections.\n *args: Named arguments passed to the init function.\n method: An optional method. If provided, applies this method. If not\n provided, applies the ``__call__`` method. A string can also be\n provided to specify a method by name.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections. By default, all collections except ""intermediates""\n are mutable.\n capture_intermediates: If ``True``, captures intermediate return values of\n all Modules inside the ""intermediates"" collection. By default only the\n return values of all ``__call__`` methods are stored. A function can be\n passed to change the filter behavior. The filter function takes the\n Module instance and method name and returns a bool indicating whether\n the output of that method invocation should be stored.\n **kwargs: Keyword arguments passed to the init function.\n\n Returns:\n ``(output, vars)``, where ``vars`` are is a dict of the modified\n collections.\n """"""\n Module._module_checks(self)\n\n if not isinstance(rngs, dict):\n if not core.scope._is_valid_rng(rngs):\n raise errors.InvalidRngError(\n 'RNGs should be of shape (2,) or PRNGKey in Module '\n f'{self.__class__.__name__}, but rngs are: {rngs}'\n )\n rngs = {'params': rngs}\n\n if isinstance(method, str):\n attribute_name = method\n method = getattr(self, attribute_name)\n if not callable(method):\n class_name = type(self).__name__\n raise TypeError(\n f""'{class_name}.{attribute_name}' must be a callable, got""\n f' {type(method)}.'\n )\n elif method is None:\n method = self.__call__\n method = _get_unbound_fn(method)\n return init_with_output(\n method,\n self,\n mutable=mutable,\n capture_intermediates=capture_intermediates,\n )(rngs, *args, **kwargs)\n\n @traceback_util.api_boundary\n def init(\n self,\n rngs: PRNGKey | RNGSequences,\n *args,\n method: Callable[..., Any] | str | None = None,\n mutable: CollectionFilter = DenyList('intermediates'),\n capture_intermediates: bool | Callable[['Module', str], bool] = False,\n **kwargs,\n ) -> FrozenVariableDict | dict[str, Any]:\n """"""Initializes a module method with variables and returns modified variables.\n\n ``init`` takes as first argument either a single ``PRNGKey``, or a\n dictionary mapping variable collections names to their ``PRNGKeys``, and\n will call ``method`` (which is the module's ``__call__`` function by\n default) passing ``*args`` and ``**kwargs``, and returns\n a dictionary of initialized variables.\n\n Example::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> import numpy as np\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x, train):\n ... x = nn.Dense(16)(x)\n ... x = nn.BatchNorm(use_running_average=not train)(x)\n ... x = nn.relu(x)\n ... return nn.Dense(1)(x)\n\n >>> x = jnp.empty((1, 7))\n >>> module = Foo()\n >>> key = jax.random.key(0)\n >>> variables = module.init(key, x, train=False)\n\n If you pass a single ``PRNGKey``, Flax will use it to feed the ``'params'``\n RNG stream. If you want to use a different RNG stream or need to use\n multiple streams, you can pass a dictionary mapping each RNG stream name\n to its corresponding ``PRNGKey`` to ``init``. If ``self.make_rng(name)``\n is called on an RNG stream name that isn't passed by the user, it will\n default to using the ``'params'`` RNG stream.\n\n Example::\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... x = nn.Dense(16)(x)\n ... x = nn.relu(x)\n ...\n ... other_variable = self.variable(\n ... 'other_collection',\n ... 'other_variable',\n ... lambda x: jax.random.normal(self.make_rng('other_rng'), x.shape),\n ... x,\n ... )\n ... x = x + other_variable.value\n ...\n ... return nn.Dense(1)(x)\n\n >>> module = Foo()\n >>> rngs = {'params': jax.random.key(0), 'other_rng': jax.random.key(1)}\n >>> variables0 = module.init(rngs, x)\n\n >>> rngs['other_rng'] = jax.random.key(0)\n >>> variables1 = module.init(rngs, x)\n >>> # equivalent params (key(0))\n >>> _ = jax.tree_util.tree_map(\n ... np.testing.assert_allclose, variables0['params'], variables1['params']\n ... )\n >>> # different other_variable (key(1) vs key(0))\n >>> np.testing.assert_raises(\n ... AssertionError,\n ... np.testing.assert_allclose,\n ... variables0['other_collection']['other_variable'],\n ... variables1['other_collection']['other_variable'],\n ... )\n\n >>> del rngs['other_rng']\n >>> # self.make_rng('other_rng') will default to using the 'params' RNG stream\n >>> variables2 = module.init(rngs, x)\n >>> # equivalent params (key(0))\n >>> _ = jax.tree_util.tree_map(\n ... np.testing.assert_allclose, variables1['params'], variables2['params']\n ... )\n >>> # equivalent other_variable (key(0))\n >>> np.testing.assert_allclose(\n ... variables1['other_collection']['other_variable'],\n ... variables2['other_collection']['other_variable'],\n ... )\n\n >>> # passing in a single key is equivalent to passing in {'params': key}\n >>> variables3 = module.init(jax.random.key(0), x)\n >>> # equivalent params (key(0))\n >>> _ = jax.tree_util.tree_map(\n ... np.testing.assert_allclose, variables2['params'], variables3['params']\n ... )\n >>> # equivalent other_variable (key(0))\n >>> np.testing.assert_allclose(\n ... variables2['other_collection']['other_variable'],\n ... variables3['other_collection']['other_variable'],\n ... )\n\n Jitting ``init`` initializes a model lazily using only the shapes of the\n provided arguments, and avoids computing the forward pass with actual\n values. Example::\n\n >>> module = nn.Dense(1)\n >>> init_jit = jax.jit(module.init)\n >>> variables = init_jit(jax.random.key(0), x)\n\n ``init`` is a light wrapper over ``apply``, so other ``apply`` arguments\n like ``method``, ``mutable``, and ``capture_intermediates`` are also\n available.\n\n Args:\n rngs: The rngs for the variable collections.\n *args: Named arguments passed to the init function.\n method: An optional method. If provided, applies this method. If not\n provided, applies the ``__call__`` method. A string can also be provided\n to specify a method by name.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections. By default all collections except ""intermediates""\n are mutable.\n capture_intermediates: If ``True``, captures intermediate return values of\n all Modules inside the ""intermediates"" collection. By default only the\n return values of all ``__call__`` methods are stored. A function can be\n passed to change the filter behavior. The filter function takes the\n Module instance and method name and returns a bool indicating whether\n the output of that method invocation should be stored.\n **kwargs: Keyword arguments passed to the init function.\n\n Returns:\n The initialized variable dict.\n """"""\n Module._module_checks(self)\n\n _, v_out = self.init_with_output(\n rngs,\n *args,\n method=method,\n mutable=mutable,\n capture_intermediates=capture_intermediates,\n **kwargs,\n )\n return v_out\n\n @traceback_util.api_boundary\n def lazy_init(\n self,\n rngs: PRNGKey | RNGSequences,\n *args,\n method: Callable[..., Any] | None = None,\n mutable: CollectionFilter = DenyList('intermediates'),\n **kwargs,\n ) -> FrozenVariableDict:\n """"""Initializes a module without computing on an actual input.\n\n lazy_init will initialize the variables without doing unnecessary compute.\n The input data should be passed as a ``jax.ShapeDtypeStruct`` which\n specifies the shape and dtype of the input but no concrete data.\n\n Example::\n\n >>> model = nn.Dense(features=256)\n >>> variables = model.lazy_init(\n ... jax.random.key(0), jax.ShapeDtypeStruct((1, 128), jnp.float32))\n\n The args and kwargs args passed to ``lazy_init`` can be a mix of\n concrete (jax arrays, scalars, bools) and abstract (ShapeDtypeStruct)\n values. Concrete values are only necessary for arguments that affect\n the initialization of variables. For example, the model might expect\n a keyword arg that enables/disables a subpart of the model.\n In this case, an explicit value (True/Flase) should be passed otherwise\n ``lazy_init`` cannot infer which variables should be initialized.\n\n Args:\n rngs: The rngs for the variable collections.\n *args: arguments passed to the init function.\n method: An optional method. If provided, applies this method. If not\n provided, applies the ``__call__`` method.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections. By default all collections except ""intermediates""\n are mutable.\n **kwargs: Keyword arguments passed to the init function.\n\n Returns:\n The initialized variable dict.\n """"""\n Module._module_checks(self)\n\n def lazy_wrapper(rngs, *args, **kwargs):\n return self.init(rngs, *args, method=method, mutable=mutable, **kwargs)\n\n return partial_eval.lazy_init(lazy_wrapper)(rngs, *args, **kwargs)\n\n @property\n def variables(self) -> VariableDict:\n """"""Returns the variables in this module.""""""\n if self.scope is None:\n raise ValueError(""Can't access variables on unbound modules"")\n return self.scope.variables()\n\n def get_variable(self, col: str, name: str, default: T | None = None) -> T:\n """"""Retrieves the value of a Variable.\n\n Args:\n col: the variable collection.\n name: the name of the variable.\n default: the default value to return if the variable does not exist in\n this scope.\n\n Returns:\n The value of the input variable, of the default value if the variable\n doesn't exist in this scope.\n """"""\n if self.scope is None:\n raise ValueError(""Can't access variables on unbound modules"")\n return self.scope.get_variable(col, name, default)\n\n def put_variable(self, col: str, name: str, value: Any):\n """"""Updates the value of the given variable if it is mutable, or an error otherwise.\n\n Args:\n col: the variable collection.\n name: the name of the variable.\n value: the new value of the variable.\n """"""\n if self.scope is None:\n raise ValueError(""Can't access variables on unbound modules"")\n self.scope.put_variable(col, name, value)\n\n @overload\n def sow(self, col: str, name: str, value: Any) -> bool:\n ...\n\n @overload\n def sow(\n self,\n col: str,\n name: str,\n value: T,\n reduce_fn: Callable[[K, T], K] = tuple_reduce,\n init_fn: Callable[[], K] = tuple_init, # type: ignore\n ) -> bool:\n ...\n\n def sow(\n self,\n col: str,\n name: str,\n value: T,\n reduce_fn: Callable[[K, T], K] = tuple_reduce,\n init_fn: Callable[[], K] = tuple_init, # type: ignore\n ) -> bool:\n """"""Stores a value in a collection.\n\n Collections can be used to collect intermediate values without\n the overhead of explicitly passing a container through each Module call.\n\n If the target collection is not mutable ``sow`` behaves like a no-op\n and returns ``False``.\n\n Example::\n\n >>> import jax\n >>> import jax.numpy as jnp\n >>> import flax.linen as nn\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... h = nn.Dense(4)(x)\n ... self.sow('intermediates', 'h', h)\n ... return nn.Dense(2)(h)\n\n >>> x = jnp.ones((16, 9))\n >>> model = Foo()\n >>> variables = model.init(jax.random.key(0), x)\n >>> y, state = model.apply(variables, x, mutable=['intermediates'])\n >>> jax.tree.map(jnp.shape, state['intermediates'])\n {'h': ((16, 4),)}\n\n By default the values are stored in a tuple and each stored value\n is appended at the end. This way all intermediates can be tracked when\n the same module is called multiple times. Alternatively, a custom\n init/reduce function can be passed::\n\n >>> class Foo2(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... init_fn = lambda: 0\n ... reduce_fn = lambda a, b: a + b\n ... self.sow('intermediates', 'h', x,\n ... init_fn=init_fn, reduce_fn=reduce_fn)\n ... self.sow('intermediates', 'h', x * 2,\n ... init_fn=init_fn, reduce_fn=reduce_fn)\n ... return x\n\n >>> x = jnp.ones((1, 1))\n >>> model = Foo2()\n >>> variables = model.init(jax.random.key(0), x)\n >>> y, state = model.apply(\n ... variables, x, mutable=['intermediates'])\n >>> print(state['intermediates'])\n {'h': Array([[3.]], dtype=float32)}\n\n Args:\n col: The name of the variable collection.\n name: The name of the variable.\n value: The value of the variable.\n reduce_fn: The function used to combine the existing value with the new\n value. The default is to append the value to a tuple.\n init_fn: For the first value stored, ``reduce_fn`` will be passed the result\n of ``init_fn`` together with the value to be stored. The default is an\n empty tuple.\n\n Returns:\n ``True`` if the value has been stored successfully, ``False`` otherwise.\n """"""\n if self.scope is None:\n raise ValueError(""Can't store variables on unbound modules"")\n if not self.scope.is_mutable_collection(col):\n return False\n if self.scope.has_variable(col, name):\n xs = self.scope.get_variable(col, name)\n else:\n self.scope.reserve(name, col)\n self._state.children[name] = col\n xs = init_fn()\n xs = reduce_fn(xs, value)\n self.scope.put_variable(col, name, xs)\n return True\n\n def perturb(\n self, name: str, value: T, collection: str = 'perturbations'\n ) -> T:\n """"""Add an zero-value variable ('perturbation') to the intermediate value.\n\n The gradient of ``value`` would be the same as the gradient of this\n perturbation variable. Therefore, if you define your loss function with\n both params and perturbations as standalone arguments, you can get the\n intermediate gradients of ``value`` by running ``jax.grad`` on the perturbation\n argument.\n\n .. note::\n This is an experimental API and may be tweaked later for better\n performance and usability.\n At its current stage, it creates extra dummy variables that occupies extra\n memory space. Use it only to debug gradients in training.\n\n Example::\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... x = nn.Dense(3)(x)\n ... x = self.perturb('dense3', x)\n ... return nn.Dense(2)(x)\n\n >>> def loss(variables, inputs, targets):\n ... preds = model.apply(variables, inputs)\n ... return jnp.square(preds - targets).mean()\n\n >>> x = jnp.ones((2, 9))\n >>> y = jnp.ones((2, 2))\n >>> model = Foo()\n >>> variables = model.init(jax.random.key(0), x)\n >>> intm_grads = jax.grad(loss, argnums=0)(variables, x, y)\n >>> print(intm_grads['perturbations']['dense3'])\n [[-0.04684732 0.06573904 -0.3194327 ]\n [-0.04684732 0.06573904 -0.3194327 ]]\n\n If perturbations are not passed to ``apply``, ``perturb`` behaves like a no-op\n so you can easily disable the behavior when not needed::\n\n >>> model.apply(variables, x) # works as expected\n Array([[-0.04579116, 0.50412744],\n [-0.04579116, 0.50412744]], dtype=float32)\n >>> model.apply({'params': variables['params']}, x) # behaves like a no-op\n Array([[-0.04579116, 0.50412744],\n [-0.04579116, 0.50412744]], dtype=float32)\n >>> intm_grads = jax.grad(loss, argnums=0)({'params': variables['params']}, x, y)\n >>> 'perturbations' not in intm_grads\n True\n """"""\n if self.scope is None:\n raise ValueError(""Can't store variables on unbound modules"")\n\n if self.is_mutable_collection(collection):\n if not self.scope.has_variable(collection, name):\n self.scope.reserve(name, collection)\n self._state.children[name] = collection\n zeros = jax.tree.map(jnp.zeros_like, value)\n self.scope.put_variable(collection, name, zeros) # type: ignore\n\n if collection in self.scope.root._variables:\n if self.scope.has_variable(collection, name):\n old_value = self.scope.get_variable(collection, name)\n value = jax.tree.map(jnp.add, value, old_value) # type: ignore\n else:\n raise ValueError(f""Perturbation collection {collection} present, but ""\n f""missing perturbation variable {name}"")\n\n return value\n\n def tabulate(\n self,\n rngs: PRNGKey | RNGSequences,\n *args,\n depth: int | None = None,\n show_repeated: bool = False,\n mutable: CollectionFilter = DenyList('intermediates'),\n console_kwargs: Mapping[str, Any] | None = None,\n table_kwargs: Mapping[str, Any] = MappingProxyType({}),\n column_kwargs: Mapping[str, Any] = MappingProxyType({}),\n compute_flops: bool = False,\n compute_vjp_flops: bool = False,\n **kwargs,\n ) -> str:\n """"""Creates a summary of the Module represented as a table.\n\n This method has the same signature and internally calls ``Module.init``,\n but instead of returning the variables, it returns the string summarizing\n the Module in a table. ``tabulate`` uses ``jax.eval_shape`` to run the forward\n computation without consuming any FLOPs or allocating memory.\n\n Additional arguments can be passed into the ``console_kwargs`` argument, for\n example, ``{'width': 120}``. For a full list of ``console_kwargs`` arguments,\n see:\n https://rich.readthedocs.io/en/stable/reference/console.html#rich.console.Console\n\n Example::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... h = nn.Dense(4)(x)\n ... return nn.Dense(2)(h)\n\n >>> x = jnp.ones((16, 9))\n\n >>> # print(Foo().tabulate(\n >>> # jax.random.key(0), x, compute_flops=True, compute_vjp_flops=True))\n\n This gives the following output::\n\n Foo Summary\n ┏━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓\n ┃ path ┃ module ┃ inputs ┃ outputs ┃ flops ┃ vjp_flops ┃ params ┃\n ┡━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩\n │ │ Foo │ float32[16,9] │ float32[16,2] │ 1504 │ 4460 │ │\n ├─────────┼────────┼───────────────┼───────────────┼───────┼───────────┼─────────────────┤\n │ Dense_0 │ Dense │ float32[16,9] │ float32[16,4] │ 1216 │ 3620 │ bias: │\n │ │ │ │ │ │ │ float32[4] │\n │ │ │ │ │ │ │ kernel: │\n │ │ │ │ │ │ │ float32[9,4] │\n │ │ │ │ │ │ │ │\n │ │ │ │ │ │ │ 40 (160 B) │\n ├─────────┼────────┼───────────────┼───────────────┼───────┼───────────┼─────────────────┤\n │ Dense_1 │ Dense │ float32[16,4] │ float32[16,2] │ 288 │ 840 │ bias: │\n │ │ │ │ │ │ │ float32[2] │\n │ │ │ │ │ │ │ kernel: │\n │ │ │ │ │ │ │ float32[4,2] │\n │ │ │ │ │ │ │ │\n │ │ │ │ │ │ │ 10 (40 B) │\n ├─────────┼────────┼───────────────┼───────────────┼───────┼───────────┼─────────────────┤\n │ │ │ │ │ │ Total │ 50 (200 B) │\n └─────────┴────────┴───────────────┴───────────────┴───────┴───────────┴─────────────────┘\n\n Total Parameters: 50 (200 B)\n\n **Note**: rows order in the table does not represent execution order,\n instead it aligns with the order of keys in ``variables`` which are sorted\n alphabetically.\n\n **Note**: ``vjp_flops`` returns ``0`` if the module is not differentiable.\n\n Args:\n rngs: The rngs for the variable collections as passed to ``Module.init``.\n *args: The arguments to the forward computation.\n depth: controls how many submodule deep the summary can go. By default,\n its ``None`` which means no limit. If a submodule is not shown because of\n the depth limit, its parameter count and bytes will be added to the row\n of its first shown ancestor such that the sum of all rows always adds\n up to the total number of parameters of the Module.\n show_repeated: If ``True``, repeated calls to the same module will be shown\n in the table, otherwise only the first call will be shown. Default is\n ``False``.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``:\n The name of a single mutable collection. ``list``: A list of names of\n mutable collections. By default, all collections except 'intermediates'\n are mutable.\n console_kwargs: An optional dictionary with additional keyword arguments\n that are passed to ``rich.console.Console`` when rendering the table.\n Default arguments are ``{'force_terminal': True, 'force_jupyter':\n False}``.\n table_kwargs: An optional dictionary with additional keyword arguments\n that are passed to ``rich.table.Table`` constructor.\n column_kwargs: An optional dictionary with additional keyword arguments\n that are passed to ``rich.table.Table.add_column`` when adding columns to\n the table.\n compute_flops: whether to include a ``flops`` column in the table listing\n the estimated FLOPs cost of each module forward pass. Does incur actual\n on-device computation / compilation / memory allocation, but still\n introduces overhead for large modules (e.g. extra 20 seconds for a\n Stable Diffusion's UNet, whereas otherwise tabulation would finish in 5\n seconds).\n compute_vjp_flops: whether to include a ``vjp_flops`` column in the table\n listing the estimated FLOPs cost of each module backward pass.\n Introduces a compute overhead of about 2-3X of ``compute_flops``.\n **kwargs: keyword arguments to pass to the forward computation.\n\n Returns:\n A string summarizing the Module.\n """"""\n from flax.linen import summary\n\n tabulate_fn = summary.tabulate(\n self,\n rngs,\n depth=depth,\n show_repeated=show_repeated,\n mutable=mutable,\n console_kwargs=console_kwargs,\n table_kwargs=table_kwargs,\n column_kwargs=column_kwargs,\n compute_flops=compute_flops,\n compute_vjp_flops=compute_vjp_flops,\n )\n return tabulate_fn(*args, **kwargs)\n\n def module_paths(\n self,\n rngs: PRNGKey | RNGSequences,\n *args,\n show_repeated: bool = False,\n mutable: CollectionFilter = DenyList('intermediates'),\n **kwargs,\n ) -> dict[str, 'Module']:\n """"""Returns a dictionary mapping module paths to module instances.\n\n This method has the same signature and internally calls ``Module.init``,\n but instead of returning the variables, it returns a dictionary mapping\n module paths to unbounded copies of module instances that were used\n at runtime. ``module_paths`` uses ``jax.eval_shape`` to run the forward\n computation without consuming any FLOPs or allocating memory.\n\n Example::\n\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n\n >>> class Foo(nn.Module):\n ... @nn.compact\n ... def __call__(self, x):\n ... h = nn.Dense(4)(x)\n ... return nn.Dense(2)(h)\n\n >>> x = jnp.ones((16, 9))\n >>> modules = Foo().module_paths(jax.random.key(0), x)\n >>> print({\n ... p: type(m).__name__ for p, m in modules.items()\n ... })\n {'': 'Foo', 'Dense_0': 'Dense', 'Dense_1': 'Dense'}\n\n Args:\n rngs: The rngs for the variable collections as passed to ``Module.init``.\n *args: The arguments to the forward computation.\n show_repeated: If ``True``, repeated calls to the same module will be\n shown in the table, otherwise only the first call will be shown.\n Default is ``False``.\n mutable: Can be bool, str, or list. Specifies which collections should\n be treated as mutable: ``bool``: all/no collections are mutable.\n ``str``: The name of a single mutable collection. ``list``: A list of\n names of mutable collections. By default, all collections except\n 'intermediates' are mutable.\n **kwargs: keyword arguments to pass to the forward computation.\n\n Returns:\n A dict`ionary mapping module paths to module instances.\n """"""\n from flax.linen import summary\n\n table = summary._get_module_table(\n module=self,\n depth=None,\n show_repeated=show_repeated,\n compute_flops=False,\n compute_vjp_flops=False,\n )(rngs, *args, **kwargs, mutable=mutable)\n\n return {'/'.join(row.path): row.module_copy for row in table}\n\n\n_ParentType = Union[Module, Scope, _Sentinel, None]\n\n\ndef merge_param(name: str, a: T | None, b: T | None) -> T:\n """"""Merges construction- and call-time argument.\n\n This is a utility for supporting a pattern where a Module hyperparameter\n can be passed either to ``__init__`` or ``__call__``, and the value that is\n not ``None`` will be used.\n\n Example::\n\n >>> import flax.linen as nn\n >>> from typing import Optional\n\n >>> class Foo(nn.Module):\n ... train: Optional[bool] = None\n\n ... def __call__(self, train: Optional[bool] = None):\n ... train = nn.merge_param('train', self.train, train)\n\n An error is thrown when both arguments are ``None`` or both values are not\n ``None``.\n\n Args:\n name: the name of the parameter. Used for error messages.\n a: option a\n b: option b\n\n Returns:\n a or b whichever is not ``None``.\n """"""\n if a is None and b is None:\n raise ValueError(\n f'Parameter ""{name}"" must be passed to the constructor or at call time.'\n )\n if a is not None and b is not None:\n raise ValueError(\n f'Parameter ""{name}"" was passed to the constructor and at call time.'\n ' Should be passed just once.'\n )\n if a is None:\n assert b is not None\n return b\n return a\n\n\n@traceback_util.api_boundary\ndef apply(\n fn: Callable[..., Any],\n module: Module,\n mutable: CollectionFilter = False,\n capture_intermediates: bool | Callable[[Module, str], bool] = False,\n) -> Callable[..., Any]:\n """"""Creates an apply function to call ``fn`` with a bound module.\n\n Unlike ``Module.apply`` this function returns a new function with the\n signature ``(variables, *args, rngs=None, **kwargs) -> T`` where ``T`` is the\n return type of ``fn``. If ``mutable`` is not ``False`` the return type is a\n tuple where the second item is a ``FrozenDict`` with the mutated variables.\n\n The apply function that is returned can be directly composed with\n JAX transformations like ``jax.jit``::\n\n >>> class Foo(nn.Module):\n ... def encode(self, x):\n ... ...\n ... def decode(self, x):\n ... ...\n\n >>> def f(foo, x):\n ... z = foo.encode(x)\n ... y = foo.decode(z)\n ... # ...\n ... return y\n\n >>> variables = {}\n >>> foo = Foo()\n >>> f_jitted = jax.jit(nn.apply(f, foo))\n >>> f_jitted(variables, jnp.ones((1, 3)))\n\n Args:\n fn: The function that should be applied. The first argument passed will be\n a module instance of the ``module`` with variables and RNGs bound to it.\n module: The ``Module`` that will be used to bind variables and RNGs to. The\n ``Module`` passed as the first argument to ``fn`` will be a clone of\n module.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``: The\n name of a single mutable collection. ``list``: A list of names of mutable\n collections.\n capture_intermediates: If ``True``, captures intermediate return values of all\n Modules inside the ""intermediates"" collection. By default, only the return\n values of all `__call__` methods are stored. A function can be passed to\n change the filter behavior. The filter function takes the Module instance\n and method name and returns a bool indicating whether the output of that\n method invocation should be stored.\n\n Returns:\n The apply function wrapping ``fn``.\n """"""\n\n @functools.wraps(fn)\n def scope_fn(scope, *args, **kwargs):\n _context.capture_stack.append(capture_intermediates)\n try:\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\n finally:\n _context.capture_stack.pop()\n\n if capture_intermediates is True: # pylint: disable=g-bool-id-comparison\n capture_intermediates = capture_call_intermediates\n if capture_intermediates:\n mutable = union_filters(mutable, 'intermediates')\n return core.apply(scope_fn, mutable=mutable)\n\n\n@traceback_util.api_boundary\ndef init_with_output(\n fn: Callable[..., Any],\n module: Module,\n mutable: CollectionFilter = DenyList('intermediates'),\n capture_intermediates: bool | Callable[[Module, str], bool] = False,\n) -> Callable[..., tuple[Any, FrozenVariableDict | dict[str, Any]]]:\n """"""Creates an init function to call ``fn`` with a bound module that also returns the function outputs.\n\n Unlike ``Module.init_with_output`` this function returns a new function with\n the signature ``(rngs, *args, **kwargs) -> (T, variables)`` where ``T`` is the\n return type of ``fn``. The rngs can be a dict of PRNGKeys or a single\n ```PRNGKey`` which is equivalent to passing a dict with one PRNGKey with the\n name ""params"".\n\n The init function that is returned can be directly composed with\n JAX transformations like ``jax.jit``::\n\n >>> class Foo(nn.Module):\n ... def encode(self, x):\n ... ...\n ... def decode(self, x):\n ... ...\n\n >>> def f(foo, x):\n ... z = foo.encode(x)\n ... y = foo.decode(z)\n ... # ...\n ... return y\n\n >>> foo = Foo()\n >>> f_jitted = jax.jit(nn.init_with_output(f, foo))\n >>> y, variables = f_jitted(jax.random.key(0), jnp.ones((1, 3)))\n\n Args:\n fn: The function that should be applied. The first argument passed will be\n a module instance of the ``module`` with variables and RNGs bound to it.\n module: The ``Module`` that will be used to bind variables and RNGs to. The\n ``Module`` passed as the first argument to ``fn`` will be a clone of\n module.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``: The\n name of a single mutable collection. ``list``: A list of names of mutable\n collections. By default, all collections except ""intermediates"" are\n mutable.\n capture_intermediates: If ``True``, captures intermediate return values of all\n Modules inside the ""intermediates"" collection. By default, only the return\n values of all `__call__` methods are stored. A function can be passed to\n change the filter behavior. The filter function takes the Module instance\n and method name and returns a bool indicating whether the output of that\n method invocation should be stored.\n\n Returns:\n The init function wrapping ``fn``.\n """"""\n\n @functools.wraps(fn)\n def scope_fn(scope, *args, **kwargs):\n _context.capture_stack.append(capture_intermediates)\n try:\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\n finally:\n _context.capture_stack.pop()\n\n if capture_intermediates is True: # pylint: disable=g-bool-id-comparison\n capture_intermediates = capture_call_intermediates\n if capture_intermediates:\n mutable = union_filters(mutable, 'intermediates')\n return core.init(scope_fn, mutable=mutable)\n\n\n@traceback_util.api_boundary\ndef init(\n fn: Callable[..., Any],\n module: Module,\n mutable: CollectionFilter = DenyList('intermediates'),\n capture_intermediates: bool | Callable[[Module, str], bool] = False,\n) -> Callable[..., FrozenVariableDict | dict[str, Any]]:\n """"""Creates an init function to call ``fn`` with a bound module.\n\n Unlike ``Module.init`` this function returns a new function with the signature\n ``(rngs, *args, **kwargs) -> variables``.\n The rngs can be a dict of PRNGKeys or a single ```PRNGKey`` which is\n equivalent to passing a dict with one PRNGKey with the name ""params"".\n\n The init function that is returned can be directly composed with\n JAX transformations like ``jax.jit``::\n\n >>> class Foo(nn.Module):\n ... def encode(self, x):\n ... ...\n ... def decode(self, x):\n ... ...\n\n >>> def f(foo, x):\n ... z = foo.encode(x)\n ... y = foo.decode(z)\n ... # ...\n ... return y\n\n >>> foo = Foo()\n >>> f_jitted = jax.jit(nn.init(f, foo))\n >>> variables = f_jitted(jax.random.key(0), jnp.ones((1, 3)))\n\n Args:\n fn: The function that should be applied. The first argument passed will be\n a module instance of the ``module`` with variables and RNGs bound to it.\n module: The ``Module`` that will be used to bind variables and RNGs to. The\n ``Module`` passed as the first argument to ``fn`` will be a clone of\n module.\n mutable: Can be bool, str, or list. Specifies which collections should be\n treated as mutable: ``bool``: all/no collections are mutable. ``str``: The\n name of a single mutable collection. ``list``: A list of names of mutable\n collections. By default, all collections except ""intermediates"" are\n mutable.\n capture_intermediates: If `True`, captures intermediate return values of all\n Modules inside the ""intermediates"" collection. By default, only the return\n values of all `__call__` methods are stored. A function can be passed to\n change the filter behavior. The filter function takes the Module instance\n and method name and returns a bool indicating whether the output of that\n method invocation should be stored.\n\n Returns:\n The init function wrapping ``fn``.\n """"""\n init_fn = init_with_output(fn, module, mutable, capture_intermediates)\n\n @functools.wraps(init_fn)\n def init_wrapper(*args, **kwargs):\n return init_fn(*args, **kwargs)[1]\n\n return init_wrapper\n\n\n# TODO(cgarciae): we are defining CompactNameScope just to\n# avoid a pytype bug with the Flax overlay. We should aim to\n# remove in the at some point as its not ergonomic.\nif not typing.TYPE_CHECKING:\n\n class CompactNameScope(Module):\n fn: Callable\n module_fn: Callable[[], Module]\n\n @compact\n def __call__(self, *args, **kwargs) -> Any:\n return self.fn(self.module_fn(), *args, **kwargs)\nelse:\n\n @dataclasses.dataclass\n class CompactNameScope:\n fn: Callable\n module_fn: Callable\n name: str\n\n def __call__(self, *args, **kwargs) -> Any:\n ...\n\n\ndef share_scope(module: Module, other: Module, /):\n """"""Modifies one of the Modules such that they share the same scope. This is useful\n when you want to wrap a Module and extend its functionality without changing the\n parameter structure.\n\n ``share_scope`` takes two Modules, ``module`` and ``other``. ``module`` will use\n ``other``'s scope if ``other`` has a scope and its not a descendant of``module``'s\n scope::\n\n >>> import flax.linen as nn\n >>> import jax\n >>> from jax import numpy as jnp, random\n ...\n >>> class DenseLoRA(nn.Module):\n ... base: nn.Dense\n ... rank: int\n ...\n ... def setup(self):\n ... nn.share_scope(self, self.base)\n ...\n ... @nn.compact\n ... def __call__(self, x: jax.Array):\n ... din, dout = x.shape[-1], self.base.features\n ... A = self.param('A', nn.zeros_init(), (din, self.rank))\n ... B = self.param('B', nn.zeros_init(), (self.rank, dout))\n ... return self.base(x) + x @ A @ B\n ...\n >>> class Model(nn.Module):\n ... @nn.compact\n ... def __call__(self, x: jax.Array):\n ... dense = nn.Dense(10) # base scope\n ... return DenseLoRA(dense, rank=2)(x) # reuse the base scope\n ...\n >>> model = Model()\n ...\n >>> params = model.init(random.key(0), jnp.ones((1, 5)))['params']\n >>> list(params['Dense_0'].keys())\n ['A', 'B', 'kernel', 'bias']\n\n When ``other``'s scope is a descendant of ``module``'s scope then ``other``\n will use ``module``'s scope instead::\n\n >>> class DenseLoRA(nn.Module):\n ... features: int\n ... rank: int\n ...\n ... def setup(self):\n ... self.child = nn.Dense(self.features)\n ... nn.share_scope(self, self.child)\n ...\n ... @nn.compact\n ... def __call__(self, x: jax.Array):\n ... din, dout = x.shape[-1], self.features\n ... A = self.param('A', nn.zeros_init(), (din, self.rank))\n ... B = self.param('B', nn.zeros_init(), (self.rank, dout))\n ... return self.child(x) + x @ A @ B\n ...\n >>> class Model(nn.Module):\n ... @nn.compact\n ... def __call__(self, x: jax.Array):\n ... return DenseLoRA(10, rank=2)(x)\n ...\n >>> model = Model()\n ...\n >>> params = model.init(random.key(0), jnp.ones((1, 5)))['params']\n >>> list(params['DenseLoRA_0'].keys())\n ['A', 'B', 'kernel', 'bias']\n """"""\n if module.scope is None or other.scope is None:\n raise errors.CallShareScopeOnUnboundModuleError()\n\n def _is_child_scope(scope: Scope, other: Scope) -> bool:\n target: Scope | None = other\n\n while target is not None:\n if target is scope:\n return True\n target = target.parent\n return False\n\n if _is_child_scope(module.scope, other.scope):\n # Child is a true child, overwrite its scope\n module_to_update = other\n new_scope = module.scope\n else:\n # Child has its own independent scope, overwrite\n # parent scope, so that we preserve the sharing\n module_to_update = module\n new_scope = other.scope\n\n old_scope = module_to_update.scope\n object.__setattr__(module_to_update, 'scope', new_scope)\n\n # Reattach all the children to the new scope as well.\n for m in module_to_update._state.children.values():\n if not isinstance(m, Module):\n continue\n # Should we go recursively to check if any of the ancestors point to the old\n # scope?\n if m.scope and m.scope.parent == old_scope:\n # Reserve the scope, so that if there is a conflict we can raise an error.\n if isinstance(m.scope.name, str):\n new_scope.reserve(m.scope.name)\n m.scope.parent = new_scope\n",python,tab +4325,3482354,"genie.py",0,0,"",python,tab +4326,3483316,"genie.py",7881,0,"",python,selection_mouse +4327,3484833,"genie.py",7320,0,"",python,selection_mouse +4328,3487383,"TERMINAL",0,0,"2025-07-03 17:13:44.281276: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4329,3489996,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = nn.sigmoid(recon)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = nn.sigmoid(recon)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +4330,3490553,"TERMINAL",0,0,"2025-07-03 17:13:47.451986: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4331,3493554,"genie.py",0,0,"",python,tab +4332,3498998,"TERMINAL",0,0,"2025-07-03 17:13:55.863375: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4333,3500646,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +4334,3500867,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4335,3500995,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +4336,3501250,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4337,3502300,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 128)\r\n",,terminal_output +4338,3502311,"TERMINAL",0,0,"vid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4339,3502710,"TERMINAL",0,0,"2025-07-03 17:13:59.609720: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4340,3503177,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +4341,3505479,"models/dynamics.py",719,0,"",python,selection_mouse +4342,3505607,"models/dynamics.py",719,1,"1",python,selection_mouse +4343,3505768,"models/dynamics.py",719,3,"1, ",python,selection_mouse +4344,3505804,"models/dynamics.py",719,5,"1, 1,",python,selection_mouse +4345,3505826,"models/dynamics.py",719,9,"1, 1, 1, ",python,selection_mouse +4346,3505855,"models/dynamics.py",719,13,"1, 1, 1, self",python,selection_mouse +4347,3505947,"models/dynamics.py",719,14,"1, 1, 1, self.",python,selection_mouse +4348,3505974,"models/dynamics.py",719,23,"1, 1, 1, self.model_dim",python,selection_mouse +4349,3506496,"models/dynamics.py",738,0,"",python,selection_mouse +4350,3507785,"genie.py",0,0,"",python,tab +4351,3507800,"genie.py",7893,0,"",python,selection_mouse +4352,3507895,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +4353,3508001,"genie.py",7893,1,"[",python,selection_mouse +4354,3508196,"genie.py",7893,2,"[0",python,selection_mouse +4355,3508391,"genie.py",7893,3,"[0]",python,selection_mouse +4356,3508886,"genie.py",7893,3,"",python,content +4357,3509817,"genie.py",7898,0,"",python,selection_mouse +4358,3510645,"genie.py",7898,1,"1",python,content +4359,3512398,"genie.py",7899,0,"",python,selection_command +4360,3512525,"genie.py",7900,0,"",python,selection_command +4361,3513512,"genie.py",7900,0,",",python,content +4362,3513513,"genie.py",7901,0,"",python,selection_keyboard +4363,3514019,"genie.py",7900,1,"",python,content +4364,3514629,"genie.py",7900,0,"1",python,content +4365,3514631,"genie.py",7901,0,"",python,selection_keyboard +4366,3514826,"genie.py",7901,0,",",python,content +4367,3514827,"genie.py",7902,0,"",python,selection_keyboard +4368,3515494,"genie.py",7901,0,"",python,selection_command +4369,3518835,"genie.py",8185,0,"",python,selection_mouse +4370,3519630,"genie.py",8121,95,"",python,content +4371,3519738,"genie.py",8129,0,"",python,selection_command +4372,3521457,"genie.py",8201,0,"",python,selection_mouse +4373,3521711,"genie.py",8201,1,"_",python,selection_mouse +4374,3521711,"genie.py",8201,4,"_exp",python,selection_mouse +4375,3521711,"genie.py",8201,5,"_expa",python,selection_mouse +4376,3521903,"genie.py",8201,6,"_expan",python,selection_mouse +4377,3521903,"genie.py",8201,7,"_expand",python,selection_mouse +4378,3522019,"genie.py",8201,8,"_expande",python,selection_mouse +4379,3522203,"genie.py",8201,9,"_expanded",python,selection_mouse +4380,3522578,"genie.py",8201,9,"",python,content +4381,3526224,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4382,3526286,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(249)\r\n print(f""mask_expanded.shape: {mask_expanded.shape}"")\r\n print(f""mask_token.shape: {mask_token.shape}"")\r\n mask_token_expanded = self.dynamics.mask_token[0][None, None, None, :] # (1, 1, 1, D)\r\n jax.debug.breakpoint()\r\n tmp = jnp.where(mask_expanded, mask_token_expanded, vid_embed)\r\n print(f""vid_embed.shape: {vid_embed.shape}"")\r\n-> jax.debug.breakpoint()\r\n \r\n # --- Predict transition ---\r\n act_embed = self.dynamics.action_up(action_tokens)\r\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n(jdb) ",,terminal_output +4383,3529539,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4384,3529602,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4385,3529889,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4386,3530181,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4387,3531335,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4388,3531523,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4389,3531817,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +4390,3532225,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4391,3532508,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +4392,3532707,"TERMINAL",0,0,"[?25lh[?25h[?25la[?25h",,terminal_output +4393,3532863,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4394,3532933,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4395,3533004,"TERMINAL",0,0,"\r\n(1, 1, 1, 6, 920, 128)\r\n(jdb) ",,terminal_output +4396,3543089,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4397,3543151,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4398,3543384,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4399,3543583,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +4400,3544076,"TERMINAL",0,0,"[?25l_\r[?25h",,terminal_output +4401,3544129,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +4402,3544336,"TERMINAL",0,0,"[?25li\r[?25h",,terminal_output +4403,3544444,"TERMINAL",0,0,"[?25lc\r[?25h",,terminal_output +4404,3544495,"TERMINAL",0,0,"",,terminal_output +4405,3544678,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +4406,3544780,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4407,3545118,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4408,3545407,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +4409,3545706,"TERMINAL",0,0,"[?25le[?25h[?25lm[?25h",,terminal_output +4410,3546309,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4411,3546378,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4412,3546484,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4413,3546763,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4414,3546958,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4415,3547063,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4416,3547126,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4417,3547273,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4418,3547337,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4419,3547452,"TERMINAL",0,0,"\r\n(1, 6, 920, 128)\r\n(jdb) ",,terminal_output +4420,3554511,"TERMINAL",0,0,"^DERROR:2025-07-03 17:14:51,332:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nE0703 17:14:51.334083 2692700 pjrt_stream_executor_client.cc:2917] Execution of replica 0 failed: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\njaxlib._jax.XlaRuntimeError: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 158, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 154, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\n",,terminal_output +4421,3556039,"TERMINAL",0,0,"]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4422,3556540,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4423,3556696,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4424,3556826,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4425,3559723,"TERMINAL",0,0,"2025-07-03 17:14:56.578018: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4426,3563837,"genie.py",0,0,"",python,tab +4427,3563838,"genie.py",8133,0,"",python,selection_mouse +4428,3564130,"TERMINAL",0,0,"2025-07-03 17:15:01.029477: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4429,3565100,"genie.py",8121,31,"",python,content +4430,3565134,"genie.py",8129,0,"",python,selection_command +4431,3566060,"genie.py",8182,0,"\n ",python,content +4432,3566410,"genie.py",8191,0,"p",python,content +4433,3566411,"genie.py",8192,0,"",python,selection_keyboard +4434,3566578,"genie.py",8192,0,"r",python,content +4435,3566579,"genie.py",8193,0,"",python,selection_keyboard +4436,3566718,"genie.py",8193,0,"i",python,content +4437,3566719,"genie.py",8194,0,"",python,selection_keyboard +4438,3566767,"genie.py",8194,0,"n",python,content +4439,3566768,"genie.py",8195,0,"",python,selection_keyboard +4440,3566902,"genie.py",8195,0,"t",python,content +4441,3566903,"genie.py",8196,0,"",python,selection_keyboard +4442,3567312,"genie.py",8191,5,"print",python,content +4443,3569191,"genie.py",8191,5,"",python,content +4444,3571880,"genie.py",8183,0,"",python,selection_command +4445,3573437,"genie.py",8183,0," print(f""tmp.shape: {tmp.shape}"")\n",python,content +4446,3573440,"genie.py",8224,9,"",python,content +4447,3573517,"TERMINAL",0,0,"2025-07-03 17:15:10.321529: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4448,3578927,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 86, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 76, in __call__\r\n",,terminal_output +4449,3579002,"TERMINAL",0,0," lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 75, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 51, in __call__\r\n z = nn.LayerNorm()(z)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 507, in __call__\r\n mean, var = _compute_stats(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 134, in _compute_stats\r\n mu, mu2 = maybe_distributed_mean(x, _abs_sq(x), mask=mask)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 117, in maybe_distributed_mean\r\n mus = tuple(x.mean(axes, where=mask) for x in xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 117, in \r\n mus = tuple(x.mean(axes, where=mask) for x in xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1087, in meth\r\n return getattr(self.aval, name).fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 247, in _mean\r\n return reductions.mean(self, axis=axis, dtype=dtype, out=out,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/reductions.py"", line 864, in mean\r\n return _mean(a, _ensure_optional_axes(axis), dtype, out, keepdims,\r\njax._src.source_info_util.JaxStackTraceBeforeTransformation: KeyboardInterrupt\r\n\r\nThe preceding stack trace is the source of the JAX operation that, once transformed by JAX, triggered the following exception.\r\n\r\n--------------------\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 86, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2452, in init\r\n _, v_out = self.init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2304, in init_with_output\r\n return init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1115, in wrapper\r\n return apply(fn, mutable=mutable, flags=init_flags)(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3093, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 76, in __call__\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 75, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 433, in wrapped_fn\r\n return trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 319, in wrapper\r\n y, out_variable_groups_xs_t = fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1474, in inner\r\n return rematted(variable_groups, rng_groups, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 333, in fun_remat\r\n out_flat = remat_p.bind(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 514, in remat_impl\r\n return core.eval_jaxpr(jaxpr, (), *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 624, in eval_jaxpr\r\n ans = eqn.primitive.bind(*subfuns, *map(read, eqn.invars), **bind_params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n executable = UnloadedMeshExecutable.from_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 335, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +4450,3579064,"TERMINAL",0,0,"^CException ignored in: .remove at 0x1462acf2f250>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n",,terminal_output +4451,3579120,"TERMINAL",0,0," def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +4452,3579283,"TERMINAL",0,0,"^CException ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3168, in clean_up\r\n clear_backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3158, in clear_backends\r\n pjit._infer_params_cached.cache_clear()\r\nKeyboardInterrupt: \r\n",,terminal_output +4453,3579746,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4454,3580199,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ ",,terminal_output +4455,3580509,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +4456,3580627,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4457,3580736,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4458,3583548,"TERMINAL",0,0,"2025-07-03 17:15:20.447883: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4459,3587781,"TERMINAL",0,0,"2025-07-03 17:15:24.579823: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4460,3589658,"TERMINAL",0,0,"bash",,terminal_focus +4461,3590469,"TERMINAL",0,0,"queue",,terminal_command +4462,3590545,"TERMINAL",0,0,"]633;E;2025-07-03 17:15:27 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 17:15:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3315644 accelerat interact tum_cte0 R58:20\t 1 hkn0704",,terminal_output +4463,3591604,"TERMINAL",0,0,"81",,terminal_output +4464,3592076,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +4465,3593189,"TERMINAL",0,0,"bash",,terminal_focus +4466,3596998,"TERMINAL",0,0,"2025-07-03 17:15:33.873407: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4467,3597267,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G",,terminal_command +4468,3597338,"TERMINAL",0,0,"]633;E;2025-07-03 17:15:34 salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Pending job allocation 3315883\r\nsalloc: job 3315883 queued and waiting for resources\r\n",,terminal_output +4469,3598257,"TERMINAL",0,0,"^Csalloc: Job allocation 3315883 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +4470,3605291,"TERMINAL",0,0,"2025-07-03 17:15:42.105259: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4471,3609018,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +4472,3609080,"TERMINAL",0,0,"]633;E;2025-07-03 17:15:45 salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Pending job allocation 3315884\r\nsalloc: job 3315884 queued and waiting for resources\r\n",,terminal_output +4473,3610240,"TERMINAL",0,0,"bash",,terminal_focus +4474,3612075,"TERMINAL",0,0,"2025-07-03 17:15:48.932693: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4475,3612095,"TERMINAL",0,0,"idling",,terminal_command +4476,3612144,"TERMINAL",0,0,"]633;E;2025-07-03 17:15:49 idling;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C",,terminal_output +4477,3612210,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 17:15:49 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 12 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +4478,3613276,"TERMINAL",0,0,"50",,terminal_output +4479,3613638,"TERMINAL",0,0,"salloc",,terminal_focus +4480,3614280,"TERMINAL",0,0,"1",,terminal_output +4481,3614358,"TERMINAL",0,0,"^Csalloc: Job allocation 3315884 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +4482,3615332,"TERMINAL",0,0,"2",,terminal_output +4483,3616358,"TERMINAL",0,0,"3",,terminal_output +4484,3617396,"TERMINAL",0,0,"4",,terminal_output +4485,3618448,"TERMINAL",0,0,"5",,terminal_output +4486,3618705,"TERMINAL",0,0,"2025-07-03 17:15:55.562760: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4487,3619514,"TERMINAL",0,0,"6",,terminal_output +4488,3620404,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +4489,3620456,"TERMINAL",0,0,"]633;E;2025-07-03 17:15:57 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Granted job allocation 3315885\r\n",,terminal_output +4490,3620583,"TERMINAL",0,0,"71",,terminal_output +4491,3620595,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +4492,3621574,"TERMINAL",0,0,"8",,terminal_output +4493,3622116,"TERMINAL",0,0,"srun",,terminal_focus +4494,3622199,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4495,3622619,"TERMINAL",0,0,"9",,terminal_output +4496,3623657,"TERMINAL",0,0,"6:00",,terminal_output +4497,3624701,"TERMINAL",0,0,"1",,terminal_output +4498,3625015,"TERMINAL",0,0,"watch",,terminal_focus +4499,3625744,"TERMINAL",0,0,"2",,terminal_output +4500,3625962,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +4501,3627791,"TERMINAL",0,0,"srun",,terminal_focus +4502,3628866,"TERMINAL",0,0,"salloc",,terminal_focus +4503,3630276,"TERMINAL",0,0,"2025-07-03 17:16:07.092969: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4504,3630641,"TERMINAL",0,0,"srun",,terminal_focus +4505,3633252,"TERMINAL",0,0,"2025-07-03 17:16:10.152905: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4506,3635156,"TERMINAL",0,0,"bash",,terminal_focus +4507,3636237,"TERMINAL",0,0,"queue",,terminal_command +4508,3636288,"TERMINAL",0,0,"]633;E;2025-07-03 17:16:13 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C",,terminal_output +4509,3636355,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 17:16:13 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3315644 accelerat interact tum_cte0 R59:06\t 1 hkn07043315885 dev_accel interact tum_cte0 R\t0:16\t 1 hkn0401",,terminal_output +4510,3637445,"TERMINAL",0,0,"477",,terminal_output +4511,3638469,"TERMINAL",0,0,"588",,terminal_output +4512,3639492,"TERMINAL",0,0,"699",,terminal_output +4513,3640619,"TERMINAL",0,0,"71020",,terminal_output +4514,3641337,"TERMINAL",0,0,"2025-07-03 17:16:18.180076: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4515,3641643,"TERMINAL",0,0,"811",,terminal_output +4516,3642667,"TERMINAL",0,0,"922",,terminal_output +4517,3642863,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +4518,3642984,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +4519,3643123,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4520,3643282,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +4521,3643345,"TERMINAL",0,0,"tmp.shape: (1, 6, 920, 128)\r\nvid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4522,3643700,"TERMINAL",0,0,"2033",,terminal_output +4523,3644510,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\ntmp.shape: (1, 6, 920, 128)\r\nvid_embed.shape: (1, 6, 920, 128)\r\n",,terminal_output +4524,3644724,"TERMINAL",0,0,"144",,terminal_output +4525,3644908,"TERMINAL",0,0,"2025-07-03 17:16:21.782704: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4526,3645772,"TERMINAL",0,0,"255",,terminal_output +4527,3646823,"TERMINAL",0,0,"366",,terminal_output +4528,3647685,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +4529,3647872,"TERMINAL",0,0,"477",,terminal_output +4530,3648036,"TERMINAL",0,0,"srun",,terminal_focus +4531,3648747,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h[tum_cte0515@hkn0401 jafar]$ ",,terminal_output +4532,3648924,"TERMINAL",0,0,"588",,terminal_output +4533,3649966,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +4534,3650001,"TERMINAL",0,0,"699",,terminal_output +4535,3651066,"TERMINAL",0,0,"72030",,terminal_output +4536,3652089,"TERMINAL",0,0,"811",,terminal_output +4537,3653149,"TERMINAL",0,0,"933",,terminal_output +4538,3654186,"TERMINAL",0,0,"3144",,terminal_output +4539,3655261,"TERMINAL",0,0,"255",,terminal_output +4540,3656291,"TERMINAL",0,0,"366",,terminal_output +4541,3656761,"genie.py",0,0,"",python,tab +4542,3656762,"genie.py",8300,0,"",python,selection_mouse +4543,3657355,"TERMINAL",0,0,"477",,terminal_output +4544,3658440,"TERMINAL",0,0,"588",,terminal_output +4545,3658535,"genie.py",8277,31,"",python,content +4546,3658760,"genie.py",8224,0,"",python,selection_command +4547,3659010,"genie.py",8183,0,"",python,selection_command +4548,3659154,"genie.py",8121,0,"",python,selection_command +4549,3659427,"TERMINAL",0,0,"699",,terminal_output +4550,3659679,"genie.py",8122,0,"",python,selection_command +4551,3659822,"genie.py",8123,0,"",python,selection_command +4552,3659979,"genie.py",8124,0,"",python,selection_command +4553,3660112,"genie.py",8125,0,"",python,selection_command +4554,3660236,"genie.py",8126,0,"",python,selection_command +4555,3660378,"genie.py",8127,0,"",python,selection_command +4556,3660476,"TERMINAL",0,0,"73040",,terminal_output +4557,3660518,"genie.py",8128,0,"",python,selection_command +4558,3660667,"genie.py",8129,0,"",python,selection_command +4559,3660987,"genie.py",8129,4,"",python,content +4560,3661573,"TERMINAL",0,0,"811",,terminal_output +4561,3661658,"genie.py",8129,0,"v",python,content +4562,3661658,"genie.py",8130,0,"",python,selection_keyboard +4563,3661847,"genie.py",8130,0,"i",python,content +4564,3661847,"genie.py",8131,0,"",python,selection_keyboard +4565,3662102,"genie.py",8131,0,"d",python,content +4566,3662102,"genie.py",8132,0,"",python,selection_keyboard +4567,3662372,"genie.py",8132,0,"_",python,content +4568,3662373,"genie.py",8133,0,"",python,selection_keyboard +4569,3662581,"TERMINAL",0,0,"922",,terminal_output +4570,3662892,"genie.py",8129,4,"vid_embed",python,content +4571,3663632,"genie.py",8138,0," ",python,content +4572,3663633,"genie.py",8139,0,"",python,selection_keyboard +4573,3663660,"TERMINAL",0,0,"4033",,terminal_output +4574,3663760,"genie.py",8138,0,"",python,selection_command +4575,3664711,"TERMINAL",0,0,"144",,terminal_output +4576,3665128,"genie.py",8220,0,"",python,selection_mouse +4577,3665741,"genie.py",8250,0,"",python,selection_mouse +4578,3665809,"TERMINAL",0,0,"255",,terminal_output +4579,3666206,"genie.py",8211,0,"",python,selection_mouse +4580,3666778,"TERMINAL",0,0,"366",,terminal_output +4581,3667208,"genie.py",8189,94,"",python,content +4582,3667577,"genie.py",8143,0,"",python,selection_command +4583,3667793,"genie.py",8088,0,"",python,selection_command +4584,3667969,"TERMINAL",0,0,"477",,terminal_output +4585,3668136,"genie.py",8143,0,"",python,selection_command +4586,3668859,"genie.py",8188,0,"\n print(f""tmp.shape: {tmp.shape}"")\n print(f""vid_embed.shape: {vid_embed.shape}"")",python,content +4587,3668897,"genie.py",8197,0,"",python,selection_command +4588,3668974,"TERMINAL",0,0,"588",,terminal_output +4589,3669662,"genie.py",8189,41,"",python,content +4590,3669727,"genie.py",8197,0,"",python,selection_command +4591,3670011,"TERMINAL",0,0,"699",,terminal_output +4592,3670655,"genie.py",8189,53,"",python,content +4593,3671067,"TERMINAL",0,0,"74050",,terminal_output +4594,3672205,"TERMINAL",0,0,"822",,terminal_output +4595,3673158,"TERMINAL",0,0,"5033",,terminal_output +4596,3673770,"genie.py",8109,0,"",python,selection_mouse +4597,3673936,"genie.py",8101,10,"mask_token",python,selection_mouse +4598,3674216,"TERMINAL",0,0,"144",,terminal_output +4599,3675303,"TERMINAL",0,0,"255",,terminal_output +4600,3675610,"genie.py",7980,0,"",python,selection_mouse +4601,3675738,"genie.py",7973,13,"mask_expanded",python,selection_mouse +4602,3676307,"TERMINAL",0,0,"366",,terminal_output +4603,3677381,"TERMINAL",0,0,"477",,terminal_output +4604,3678507,"TERMINAL",0,0,"588",,terminal_output +4605,3679530,"TERMINAL",0,0,"699",,terminal_output +4606,3679842,"genie.py",7860,0,"",python,selection_mouse +4607,3680555,"TERMINAL",0,0,"7501:00",,terminal_output +4608,3680762,"genie.py",8238,0,"",python,selection_mouse +4609,3680889,"genie.py",8235,9,"act_embed",python,selection_mouse +4610,3681463,"genie.py",8298,0,"",python,selection_mouse +4611,3681568,"TERMINAL",0,0,"811",,terminal_output +4612,3681599,"genie.py",8294,9,"vid_embed",python,selection_mouse +4613,3682705,"TERMINAL",0,0,"922",,terminal_output +4614,3683651,"TERMINAL",0,0,"7:0033",,terminal_output +4615,3684754,"TERMINAL",0,0,"144",,terminal_output +4616,3685750,"TERMINAL",0,0,"255",,terminal_output +4617,3686803,"TERMINAL",0,0,"366",,terminal_output +4618,3687834,"TERMINAL",0,0,"477",,terminal_output +4619,3688881,"TERMINAL",0,0,"588",,terminal_output +4620,3689976,"TERMINAL",0,0,"699",,terminal_output +4621,3691038,"TERMINAL",0,0,"71:00:0010",,terminal_output +4622,3692127,"TERMINAL",0,0,"811",,terminal_output +4623,3693313,"TERMINAL",0,0,"922",,terminal_output +4624,3694235,"TERMINAL",0,0,"salloc: Job 3315644 has exceeded its time limit and its allocation has been revoked.\nslurmstepd: error: *** STEP 3315644.interactive ON hkn0704 CANCELLED AT 2025-07-03T17:17:11 DUE TO TIME LIMIT ***\r\nTerminated\r\n]0;tum_cte0515@hkn0704:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0704 jafar]$ srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n",,terminal_output +4625,3694259,"TERMINAL",0,0,"10CG44",,terminal_output +4626,3695198,"TERMINAL",0,0,"25",,terminal_output +4627,3696326,"TERMINAL",0,0,"36",,terminal_output +4628,3697278,"TERMINAL",0,0,"47",,terminal_output +4629,3698358,"TERMINAL",0,0,"58",,terminal_output +4630,3699403,"TERMINAL",0,0,"69",,terminal_output +4631,3700437,"genie.py",7980,0,"",python,selection_mouse +4632,3700497,"TERMINAL",0,0,"720",,terminal_output +4633,3701030,"genie.py",7973,13,"mask_expanded",python,selection_mouse +4634,3701550,"TERMINAL",0,0,"81",,terminal_output +4635,3701713,"genie.py",8081,0,"",python,selection_mouse +4636,3701893,"genie.py",8080,1,"f",python,selection_mouse +4637,3702461,"genie.py",8136,0,"",python,selection_mouse +4638,3702521,"TERMINAL",0,0,"92",,terminal_output +4639,3702599,"genie.py",8129,9,"vid_embed",python,selection_mouse +4640,3703290,"genie.py",7979,0,"",python,selection_mouse +4641,3703408,"genie.py",7973,13,"mask_expanded",python,selection_mouse +4642,3703571,"TERMINAL",0,0,"203",,terminal_output +4643,3704076,"genie.py",7996,0,"",python,selection_mouse +4644,3704192,"genie.py",7993,5,"[...,",python,selection_mouse +4645,3704628,"TERMINAL",0,0,"14",,terminal_output +4646,3704752,"genie.py",7992,0,"",python,selection_mouse +4647,3704913,"genie.py",7989,4,"mask",python,selection_mouse +4648,3705481,"genie.py",8002,0,"",python,selection_mouse +4649,3705670,"TERMINAL",0,0,"25",,terminal_output +4650,3707077,"TERMINAL",0,0,"36",,terminal_output +4651,3708494,"TERMINAL",0,0,"48",,terminal_output +4652,3709522,"TERMINAL",0,0,"69",,terminal_output +4653,3710661,"TERMINAL",0,0,"730",,terminal_output +4654,3711687,"TERMINAL",0,0,"81",,terminal_output +4655,3712674,"TERMINAL",0,0,"92",,terminal_output +4656,3713732,"TERMINAL",0,0,"303",,terminal_output +4657,3714859,"TERMINAL",0,0,"14",,terminal_output +4658,3715803,"TERMINAL",0,0,"25",,terminal_output +4659,3716856,"TERMINAL",0,0,"36",,terminal_output +4660,3717911,"TERMINAL",0,0,"47",,terminal_output +4661,3718977,"TERMINAL",0,0,"58",,terminal_output +4662,3720019,"TERMINAL",0,0,"69",,terminal_output +4663,3721107,"TERMINAL",0,0,"740",,terminal_output +4664,3722131,"TERMINAL",0,0,"82",,terminal_output +4665,3723267,"TERMINAL",0,0,"403",,terminal_output +4666,3723692,"genie.py",0,0,"",python,tab +4667,3723693,"genie.py",7991,0,"",python,selection_mouse +4668,3723795,"genie.py",7989,4,"mask",python,selection_mouse +4669,3723887,"TERMINAL",0,0,"srun: error: hkn0704: task 0: Killed\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;137",,terminal_output +4670,3724317,"TERMINAL",0,0,"14",,terminal_output +4671,3725369,"TERMINAL",0,0,"25",,terminal_output +4672,3726422,"TERMINAL",0,0,"36",,terminal_output +4673,3726968,"TERMINAL",0,0,"watch",,terminal_focus +4674,3726969,"TERMINAL",0,0,"srun",,terminal_focus +4675,3727524,"TERMINAL",0,0,"47",,terminal_output +4676,3728582,"TERMINAL",0,0,"58",,terminal_output +4677,3728695,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +4678,3728857,"TERMINAL",0,0,"[?25lu[?25h[?25lr[?25h",,terminal_output +4679,3729049,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4680,3729138,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4681,3729243,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4682,3729305,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4683,3729488,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +4684,3729592,"TERMINAL",0,0,"69",,terminal_output +4685,3729656,"TERMINAL",0,0,"env/",,terminal_output +4686,3729867,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4687,3729973,"TERMINAL",0,0,"in/",,terminal_output +4688,3730231,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4689,3730293,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4690,3730434,"TERMINAL",0,0,"tivate",,terminal_output +4691,3730671,"TERMINAL",0,0,"750",,terminal_output +4692,3730682,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +4693,3731646,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +4694,3731678,"TERMINAL",0,0,"81",,terminal_output +4695,3731785,"TERMINAL",0,0,"idling",,terminal_output +4696,3732205,"TERMINAL",0,0,"ls",,terminal_output +4697,3732610,"TERMINAL",0,0,"cd logs_mihir/",,terminal_output +4698,3732726,"TERMINAL",0,0,"92",,terminal_output +4699,3733003,"TERMINAL",0,0,"ls",,terminal_output +4700,3733381,"TERMINAL",0,0,"cd logs/",,terminal_output +4701,3733624,"TERMINAL",0,0,"..",,terminal_output +4702,3733795,"TERMINAL",0,0,"503",,terminal_output +4703,3733816,"TERMINAL",0,0,"checkpoints/",,terminal_output +4704,3734066,"TERMINAL",0,0,"ls",,terminal_output +4705,3734273,"TERMINAL",0,0,"cd ..",,terminal_output +4706,3734540,"TERMINAL",0,0,"$ws_dir",,terminal_output +4707,3734724,"TERMINAL",0,0,"queue",,terminal_output +4708,3734892,"TERMINAL",0,0,"\r1885 dev_accel R 1:5401",,terminal_output +4709,3735179,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_output +4710,3735384,"TERMINAL",0,0,"\rsh scripts_horeka/overfit_sample_tiny/sample.sh \r\n\r",,terminal_output +4711,3735946,"TERMINAL",0,0,"25",,terminal_output +4712,3736998,"TERMINAL",0,0,"36",,terminal_output +4713,3738046,"TERMINAL",0,0,"47",,terminal_output +4714,3739099,"TERMINAL",0,0,"58",,terminal_output +4715,3740255,"TERMINAL",0,0,"62:00",,terminal_output +4716,3740967,"genie.py",0,0,"",python,tab +4717,3740967,"genie.py",9226,0,"",python,selection_mouse +4718,3741075,"genie.py",9216,14,"new_token_idxs",python,selection_mouse +4719,3741243,"TERMINAL",0,0,"81",,terminal_output +4720,3742405,"TERMINAL",0,0,"92",,terminal_output +4721,3743328,"TERMINAL",0,0,"8:003",,terminal_output +4722,3744352,"TERMINAL",0,0,"14",,terminal_output +4723,3745486,"TERMINAL",0,0,"25",,terminal_output +4724,3746501,"TERMINAL",0,0,"36",,terminal_output +4725,3746619,"genie.py",9281,0,"",python,selection_mouse +4726,3747347,"genie.py",9219,0,"",python,selection_mouse +4727,3747486,"TERMINAL",0,0,"47",,terminal_output +4728,3748279,"genie.py",9220,0,"",python,selection_command +4729,3748550,"TERMINAL",0,0,"58",,terminal_output +4730,3749191,"genie.py",9216,4,"",python,content +4731,3749578,"TERMINAL",0,0,"69",,terminal_output +4732,3749610,"genie.py",9215,0,"",python,selection_command +4733,3750639,"TERMINAL",0,0,"710",,terminal_output +4734,3751774,"TERMINAL",0,0,"81",,terminal_output +4735,3752310,"genie.py",9223,0,"",python,selection_mouse +4736,3752464,"genie.py",9216,10,"token_idxs",python,selection_mouse +4737,3752738,"TERMINAL",0,0,"92",,terminal_output +4738,3753596,"genie.py",9223,0,"",python,selection_mouse +4739,3753713,"genie.py",9216,10,"token_idxs",python,selection_mouse +4740,3753796,"TERMINAL",0,0,"103",,terminal_output +4741,3753866,"genie.py",9208,69," token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n",python,selection_mouse +4742,3754514,"genie.py",9223,0,"",python,selection_mouse +4743,3754593,"genie.py",9216,10,"token_idxs",python,selection_mouse +4744,3754835,"TERMINAL",0,0,"14",,terminal_output +4745,3755924,"TERMINAL",0,0,"25",,terminal_output +4746,3756954,"TERMINAL",0,0,"36",,terminal_output +4747,3757978,"TERMINAL",0,0,"47",,terminal_output +4748,3759041,"TERMINAL",0,0,"58",,terminal_output +4749,3760122,"TERMINAL",0,0,"69",,terminal_output +4750,3761156,"TERMINAL",0,0,"721",,terminal_output +4751,3762228,"TERMINAL",0,0,"92",,terminal_output +4752,3763245,"TERMINAL",0,0,"203",,terminal_output +4753,3764847,"TERMINAL",0,0,"14",,terminal_output +4754,3765928,"TERMINAL",0,0,"25",,terminal_output +4755,3766846,"TERMINAL",0,0,"36",,terminal_output +4756,3767894,"TERMINAL",0,0,"47",,terminal_output +4757,3768993,"TERMINAL",0,0,"58",,terminal_output +4758,3770047,"TERMINAL",0,0,"69",,terminal_output +4759,3771181,"TERMINAL",0,0,"730",,terminal_output +4760,3772147,"TERMINAL",0,0,"82",,terminal_output +4761,3773133,"genie.py",4766,0,"",python,selection_mouse +4762,3773190,"TERMINAL",0,0,"303",,terminal_output +4763,3773725,"genie.py",4968,0,"",python,selection_mouse +4764,3774199,"TERMINAL",0,0,"14",,terminal_output +4765,3774211,"genie.py",5019,0,"",python,selection_mouse +4766,3774230,"genie.py",5018,0,"",python,selection_command +4767,3775289,"TERMINAL",0,0,"25",,terminal_output +4768,3776298,"TERMINAL",0,0,"36",,terminal_output +4769,3776816,"genie.py",5308,0,"",python,selection_mouse +4770,3777080,"genie.py",5296,16,"new_frame_pixels",python,selection_mouse +4771,3777426,"TERMINAL",0,0,"47",,terminal_output +4772,3778452,"TERMINAL",0,0,"58",,terminal_output +4773,3779432,"TERMINAL",0,0,"69",,terminal_output +4774,3780416,"sample.py",0,0,"",python,tab +4775,3780576,"TERMINAL",0,0,"740",,terminal_output +4776,3781625,"TERMINAL",0,0,"81",,terminal_output +4777,3782649,"TERMINAL",0,0,"92",,terminal_output +4778,3782799,"sample.py",3747,0,"",python,selection_mouse +4779,3783674,"TERMINAL",0,0,"403",,terminal_output +4780,3784546,"sample.py",3798,0,"",python,selection_mouse +4781,3784683,"TERMINAL",0,0,"14",,terminal_output +4782,3785834,"TERMINAL",0,0,"25",,terminal_output +4783,3786778,"TERMINAL",0,0,"36",,terminal_output +4784,3787872,"TERMINAL",0,0,"47",,terminal_output +4785,3788968,"TERMINAL",0,0,"58",,terminal_output +4786,3790106,"TERMINAL",0,0,"650",,terminal_output +4787,3791251,"TERMINAL",0,0,"81",,terminal_output +4788,3791379,"sample.py",3536,0,"",python,selection_mouse +4789,3792190,"TERMINAL",0,0,"92",,terminal_output +4790,3792864,"sample.py",3736,0,"",python,selection_mouse +4791,3793046,"sample.py",3735,3,"vid",python,selection_mouse +4792,3793239,"TERMINAL",0,0,"503",,terminal_output +4793,3793811,"sample.py",3754,0,"",python,selection_mouse +4794,3794324,"TERMINAL",0,0,"14",,terminal_output +4795,3794428,"sample.py",3759,0,"",python,selection_mouse +4796,3794570,"sample.py",3758,3,"vid",python,selection_mouse +4797,3795390,"TERMINAL",0,0,"25",,terminal_output +4798,3795506,"sample.py",3737,0,"",python,selection_mouse +4799,3795660,"sample.py",3735,3,"vid",python,selection_mouse +4800,3795918,"sample.py",3735,4,"vid ",python,selection_mouse +4801,3796048,"sample.py",3735,5,"vid =",python,selection_mouse +4802,3796380,"TERMINAL",0,0,"36",,terminal_output +4803,3796740,"sample.py",3735,6,"vid = ",python,selection_mouse +4804,3797498,"TERMINAL",0,0,"47",,terminal_output +4805,3798522,"TERMINAL",0,0,"58",,terminal_output +4806,3799649,"TERMINAL",0,0,"69",,terminal_output +4807,3800055,"sample.py",3798,0,"",python,selection_mouse +4808,3800671,"TERMINAL",0,0,"73:00",,terminal_output +4809,3801463,"sample.py",3730,0,"",python,selection_mouse +4810,3801656,"TERMINAL",0,0,"81",,terminal_output +4811,3801731,"sample.py",3730,0,"\n ",python,content +4812,3802715,"TERMINAL",0,0,"92",,terminal_output +4813,3803095,"sample.py",3735,0,"r",python,content +4814,3803096,"sample.py",3736,0,"",python,selection_keyboard +4815,3803271,"sample.py",3736,0,"e",python,content +4816,3803271,"sample.py",3737,0,"",python,selection_keyboard +4817,3803379,"sample.py",3737,0,"t",python,content +4818,3803380,"sample.py",3738,0,"",python,selection_keyboard +4819,3803743,"TERMINAL",0,0,"9:003",,terminal_output +4820,3803897,"sample.py",3737,1,"",python,content +4821,3804034,"sample.py",3736,1,"",python,content +4822,3804156,"sample.py",3735,1,"",python,content +4823,3804789,"TERMINAL",0,0,"14",,terminal_output +4824,3805091,"sample.py",3735,0,"j",python,content +4825,3805092,"sample.py",3736,0,"",python,selection_keyboard +4826,3805185,"sample.py",3736,0,"a",python,content +4827,3805186,"sample.py",3737,0,"",python,selection_keyboard +4828,3805324,"sample.py",3737,0,"x",python,content +4829,3805325,"sample.py",3738,0,"",python,selection_keyboard +4830,3805448,"sample.py",3738,0,".",python,content +4831,3805449,"sample.py",3739,0,"",python,selection_keyboard +4832,3805893,"sample.py",3739,0,"d",python,content +4833,3805894,"sample.py",3740,0,"",python,selection_keyboard +4834,3805895,"TERMINAL",0,0,"25",,terminal_output +4835,3806070,"sample.py",3740,0,"e",python,content +4836,3806072,"sample.py",3741,0,"",python,selection_keyboard +4837,3806189,"sample.py",3741,0,"b",python,content +4838,3806190,"sample.py",3742,0,"",python,selection_keyboard +4839,3806326,"sample.py",3742,0,"u",python,content +4840,3806327,"sample.py",3743,0,"",python,selection_keyboard +4841,3806435,"sample.py",3743,0,"g",python,content +4842,3806435,"sample.py",3744,0,"",python,selection_keyboard +4843,3806915,"TERMINAL",0,0,"36",,terminal_output +4844,3807080,"sample.py",3739,5,"debug",python,content +4845,3807980,"TERMINAL",0,0,"47",,terminal_output +4846,3808557,"sample.py",3744,0,"-",python,content +4847,3808558,"sample.py",3745,0,"",python,selection_keyboard +4848,3808810,"sample.py",3745,0,"b",python,content +4849,3808811,"sample.py",3746,0,"",python,selection_keyboard +4850,3808893,"sample.py",3746,0,"r",python,content +4851,3808894,"sample.py",3747,0,"",python,selection_keyboard +4852,3808980,"TERMINAL",0,0,"58",,terminal_output +4853,3809070,"sample.py",3747,0,"e",python,content +4854,3809071,"sample.py",3748,0,"",python,selection_keyboard +4855,3810024,"TERMINAL",0,0,"69",,terminal_output +4856,3810232,"sample.py",3745,3,"breakpoint",python,content +4857,3811084,"TERMINAL",0,0,"710",,terminal_output +4858,3811112,"sample.py",3755,0,"()",python,content +4859,3811113,"sample.py",3756,0,"",python,selection_keyboard +4860,3811176,"sample.py",3756,1,")",python,content +4861,3811177,"sample.py",3757,0,"",python,selection_keyboard +4862,3811520,"sample.py",3756,0,"",python,selection_command +4863,3812136,"TERMINAL",0,0,"82",,terminal_output +4864,3813207,"TERMINAL",0,0,"103",,terminal_output +4865,3814223,"sample.py",3731,27,"",python,content +4866,3814261,"sample.py",3735,0,"",python,selection_command +4867,3814296,"TERMINAL",0,0,"14",,terminal_output +4868,3814452,"sample.py",3729,0,"",python,selection_command +4869,3814887,"sample.py",3730,0,"\n ",python,content +4870,3815260,"sample.py",3735,0,"b",python,content +4871,3815261,"sample.py",3736,0,"",python,selection_keyboard +4872,3815334,"TERMINAL",0,0,"25",,terminal_output +4873,3815380,"sample.py",3736,0,"r",python,content +4874,3815381,"sample.py",3737,0,"",python,selection_keyboard +4875,3815579,"sample.py",3737,0,"e",python,content +4876,3815580,"sample.py",3738,0,"",python,selection_keyboard +4877,3815697,"sample.py",3738,0,"a",python,content +4878,3815698,"sample.py",3739,0,"",python,selection_keyboard +4879,3816087,"sample.py",3735,4,"breakpoint",python,content +4880,3816349,"TERMINAL",0,0,"36",,terminal_output +4881,3816796,"sample.py",3745,0,"()",python,content +4882,3816797,"sample.py",3746,0,"",python,selection_keyboard +4883,3816938,"sample.py",3745,0,"",python,selection_command +4884,3817466,"TERMINAL",0,0,"47",,terminal_output +4885,3818460,"TERMINAL",0,0,"58",,terminal_output +4886,3818737,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +4887,3818957,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4888,3819515,"TERMINAL",0,0,"69",,terminal_output +4889,3820554,"TERMINAL",0,0,"720",,terminal_output +4890,3821665,"TERMINAL",0,0,"81",,terminal_output +4891,3822630,"TERMINAL",0,0,"92",,terminal_output +4892,3823713,"TERMINAL",0,0,"203",,terminal_output +4893,3824704,"genie.py",0,0,"",python,tab +4894,3824892,"TERMINAL",0,0,"14",,terminal_output +4895,3825862,"TERMINAL",0,0,"25",,terminal_output +4896,3826831,"TERMINAL",0,0,"36",,terminal_output +4897,3827910,"TERMINAL",0,0,"47",,terminal_output +4898,3829139,"TERMINAL",0,0,"58",,terminal_output +4899,3830148,"TERMINAL",0,0,"630",,terminal_output +4900,3831297,"TERMINAL",0,0,"81",,terminal_output +4901,3832327,"TERMINAL",0,0,"92",,terminal_output +4902,3833283,"TERMINAL",0,0,"303",,terminal_output +4903,3834362,"TERMINAL",0,0,"14",,terminal_output +4904,3835382,"TERMINAL",0,0,"25",,terminal_output +4905,3836515,"TERMINAL",0,0,"36",,terminal_output +4906,3837537,"TERMINAL",0,0,"47",,terminal_output +4907,3838561,"TERMINAL",0,0,"58",,terminal_output +4908,3839285,"TERMINAL",0,0,"2025-07-03 17:19:36.099653: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4909,3839585,"TERMINAL",0,0,"69",,terminal_output +4910,3840712,"TERMINAL",0,0,"740",,terminal_output +4911,3841685,"TERMINAL",0,0,"81",,terminal_output +4912,3842760,"TERMINAL",0,0,"92",,terminal_output +4913,3843276,"TERMINAL",0,0,"2025-07-03 17:19:40.176364: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4914,3843885,"TERMINAL",0,0,"403",,terminal_output +4915,3844910,"TERMINAL",0,0,"14",,terminal_output +4916,3845935,"TERMINAL",0,0,"25",,terminal_output +4917,3846967,"TERMINAL",0,0,"36",,terminal_output +4918,3847982,"TERMINAL",0,0,"47",,terminal_output +4919,3849029,"TERMINAL",0,0,"58",,terminal_output +4920,3850138,"TERMINAL",0,0,"69",,terminal_output +4921,3851125,"TERMINAL",0,0,"751",,terminal_output +4922,3852166,"TERMINAL",0,0,"92",,terminal_output +4923,3852488,"TERMINAL",0,0,"2025-07-03 17:19:49.362873: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4924,3853219,"TERMINAL",0,0,"503",,terminal_output +4925,3854330,"TERMINAL",0,0,"14",,terminal_output +4926,3855354,"TERMINAL",0,0,"25",,terminal_output +4927,3856378,"TERMINAL",0,0,"36",,terminal_output +4928,3857505,"TERMINAL",0,0,"47",,terminal_output +4929,3858530,"TERMINAL",0,0,"58",,terminal_output +4930,3859553,"TERMINAL",0,0,"69",,terminal_output +4931,3860168,"TERMINAL",0,0,"2025-07-03 17:19:57.060066: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4932,3860577,"TERMINAL",0,0,"74:00",,terminal_output +4933,3861598,"TERMINAL",0,0,"81",,terminal_output +4934,3862728,"TERMINAL",0,0,"92",,terminal_output +4935,3863752,"TERMINAL",0,0,"20:003",,terminal_output +4936,3864879,"TERMINAL",0,0,"14",,terminal_output +4937,3865834,"TERMINAL",0,0,"25",,terminal_output +4938,3866622,"TERMINAL",0,0,"2025-07-03 17:20:03.521564: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4939,3866871,"TERMINAL",0,0,"36",,terminal_output +4940,3867980,"TERMINAL",0,0,"47",,terminal_output +4941,3869078,"TERMINAL",0,0,"58",,terminal_output +4942,3870021,"TERMINAL",0,0,"69",,terminal_output +4943,3871122,"TERMINAL",0,0,"710",,terminal_output +4944,3872127,"TERMINAL",0,0,"82",,terminal_output +4945,3872987,"TERMINAL",0,0,"2025-07-03 17:20:09.857641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4946,3873173,"TERMINAL",0,0,"103",,terminal_output +4947,3874299,"TERMINAL",0,0,"14",,terminal_output +4948,3875328,"TERMINAL",0,0,"25",,terminal_output +4949,3876306,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4950,3876338,"TERMINAL",0,0,"36",,terminal_output +4951,3877473,"TERMINAL",0,0,"47",,terminal_output +4952,3878497,"TERMINAL",0,0,"58",,terminal_output +4953,3879522,"TERMINAL",0,0,"69",,terminal_output +4954,3880540,"TERMINAL",0,0,"720",,terminal_output +4955,3881590,"TERMINAL",0,0,"81",,terminal_output +4956,3882696,"TERMINAL",0,0,"92",,terminal_output +4957,3883719,"TERMINAL",0,0,"203",,terminal_output +4958,3884353,"TERMINAL",0,0,"2025-07-03 17:20:21.190964: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4959,3884744,"TERMINAL",0,0,"14",,terminal_output +4960,3885871,"TERMINAL",0,0,"25",,terminal_output +4961,3886836,"TERMINAL",0,0,"36",,terminal_output +4962,3887214,"TERMINAL",0,0,"2025-07-03 17:20:24.085284: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4963,3887924,"TERMINAL",0,0,"47",,terminal_output +4964,3888939,"TERMINAL",0,0,"58",,terminal_output +4965,3890008,"TERMINAL",0,0,"69",,terminal_output +4966,3891094,"TERMINAL",0,0,"730",,terminal_output +4967,3892104,"TERMINAL",0,0,"81",,terminal_output +4968,3893163,"TERMINAL",0,0,"93",,terminal_output +4969,3894194,"TERMINAL",0,0,"314",,terminal_output +4970,3895291,"TERMINAL",0,0,"25",,terminal_output +4971,3895432,"TERMINAL",0,0,"2025-07-03 17:20:32.278359: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4972,3896298,"TERMINAL",0,0,"36",,terminal_output +4973,3897107,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +4974,3897170,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +4975,3897317,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +4976,3897347,"TERMINAL",0,0,"47",,terminal_output +4977,3897467,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +4978,3898390,"TERMINAL",0,0,"58",,terminal_output +4979,3898454,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +4980,3898996,"TERMINAL",0,0,"2025-07-03 17:20:35.772730: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4981,3899490,"TERMINAL",0,0,"69",,terminal_output +4982,3900514,"TERMINAL",0,0,"740",,terminal_output +4983,3901529,"TERMINAL",0,0,"81",,terminal_output +4984,3902664,"TERMINAL",0,0,"92",,terminal_output +4985,3903688,"TERMINAL",0,0,"403",,terminal_output +4986,3904648,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 159, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n new_frames = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 156, in sample_mihir\r\n new_frame_pixels = self.tokenizer.decode(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 67, in decode\r\n z = self.vq.codebook[indices]\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 382, in __getitem__\r\n return indexing.rewriting_take(self, idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 643, in rewriting_take\r\n treedef, static_idx, dynamic_idx = split_index_for_jit(idx, arr.shape)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 736, in split_index_for_jit\r\n idx = _expand_bool_indices(idx, shape)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 1065, in _expand_bool_indices\r\n raise IndexError(f""too many boolean indices at index {dim_number}: got mask of shape ""\r\nIndexError: too many boolean indices at index 0: got mask of shape (1, 1, 6, 920), but only 2 dimensions remain.\r\n",,terminal_output +4987,3904671,"TERMINAL",0,0,"14",,terminal_output +4988,3905748,"TERMINAL",0,0,"25",,terminal_output +4989,3906068,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +4990,3906832,"TERMINAL",0,0,"36",,terminal_output +4991,3907888,"TERMINAL",0,0,"47",,terminal_output +4992,3908909,"TERMINAL",0,0,"58",,terminal_output +4993,3909970,"TERMINAL",0,0,"69",,terminal_output +4994,3910961,"TERMINAL",0,0,"750",,terminal_output +4995,3912020,"TERMINAL",0,0,"81",,terminal_output +4996,3913050,"TERMINAL",0,0,"92",,terminal_output +4997,3914198,"TERMINAL",0,0,"503",,terminal_output +4998,3915143,"TERMINAL",0,0,"15",,terminal_output +4999,3916284,"TERMINAL",0,0,"36",,terminal_output +5000,3917308,"TERMINAL",0,0,"47",,terminal_output +5001,3918439,"TERMINAL",0,0,"58",,terminal_output +5002,3919457,"TERMINAL",0,0,"69",,terminal_output +5003,3920481,"TERMINAL",0,0,"75:00",,terminal_output +5004,3921505,"TERMINAL",0,0,"81",,terminal_output +5005,3922634,"TERMINAL",0,0,"92",,terminal_output +5006,3923765,"TERMINAL",0,0,"1:003",,terminal_output +5007,3924742,"TERMINAL",0,0,"14",,terminal_output +5008,3925787,"TERMINAL",0,0,"25",,terminal_output +5009,3927036,"TERMINAL",0,0,"36",,terminal_output +5010,3928023,"TERMINAL",0,0,"47",,terminal_output +5011,3929082,"TERMINAL",0,0,"58",,terminal_output +5012,3930016,"TERMINAL",0,0,"69",,terminal_output +5013,3931133,"TERMINAL",0,0,"710",,terminal_output +5014,3932360,"TERMINAL",0,0,"82",,terminal_output +5015,3933384,"TERMINAL",0,0,"103",,terminal_output +5016,3934409,"TERMINAL",0,0,"14",,terminal_output +5017,3935389,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample_mihir(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int,\n temperature: float,\n sample_argmax: bool,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]# (B, T, N)\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0] # (B, N) \n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S, A, D)\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0] # (B, N)\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStepMihir(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, mask, token_idxs, action_tokens = carry\n step = x\n B, S, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = token_idxs\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n # Mask vid_embed: set to mask_token where mask==1, else keep vid_embed\n # mask: (B, S, N), vid_embed: (B, S, N, D), mask_token: (D,)\n mask_token = self.dynamics.mask_token # (1,1, 1, D,)\n # Expand mask to (B, S, N, 1) for broadcasting\n mask_expanded = mask[..., None]\n print(f""mask_expanded.shape: {mask_expanded.shape}"")\n print(f""mask_token.shape: {mask_token.shape}"")\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n ) # (B, T+1, N)\n vid_embed = self.dynamics.patch_embed(vid_token_idxs) # (B, T+1, N, D)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +5018,3935702,"TERMINAL",0,0,"25",,terminal_output +5019,3936455,"TERMINAL",0,0,"36",,terminal_output +5020,3937503,"TERMINAL",0,0,"47",,terminal_output +5021,3938606,"TERMINAL",0,0,"58",,terminal_output +5022,3939595,"TERMINAL",0,0,"69",,terminal_output +5023,3940647,"TERMINAL",0,0,"720",,terminal_output +5024,3941717,"TERMINAL",0,0,"81",,terminal_output +5025,3942912,"TERMINAL",0,0,"92",,terminal_output +5026,3943829,"TERMINAL",0,0,"203",,terminal_output +5027,3944956,"TERMINAL",0,0,"14",,terminal_output +5028,3945125,"genie.py",0,0,"",python,tab +5029,3945126,"genie.py",5019,0,"",python,selection_mouse +5030,3945210,"genie.py",5018,0,"",python,selection_command +5031,3945894,"genie.py",5111,0,"",python,selection_mouse +5032,3945954,"TERMINAL",0,0,"25",,terminal_output +5033,3946407,"genie.py",5019,0,"",python,selection_mouse +5034,3946413,"genie.py",5018,0,"",python,selection_command +5035,3946916,"genie.py",5051,0,"",python,selection_mouse +5036,3946975,"TERMINAL",0,0,"36",,terminal_output +5037,3947999,"TERMINAL",0,0,"47",,terminal_output +5038,3949054,"TERMINAL",0,0,"58",,terminal_output +5039,3950105,"TERMINAL",0,0,"69",,terminal_output +5040,3951140,"TERMINAL",0,0,"731",,terminal_output +5041,3952179,"TERMINAL",0,0,"92",,terminal_output +5042,3953966,"TERMINAL",0,0,"303",,terminal_output +5043,3955094,"TERMINAL",0,0,"14",,terminal_output +5044,3956117,"TERMINAL",0,0,"25",,terminal_output +5045,3957086,"TERMINAL",0,0,"36",,terminal_output +5046,3957217,"genie.py",5123,0,"",python,selection_mouse +5047,3957230,"genie.py",5122,0,"",python,selection_command +5048,3958164,"TERMINAL",0,0,"48",,terminal_output +5049,3958812,"genie.py",5123,0,"\n ",python,content +5050,3959184,"TERMINAL",0,0,"69",,terminal_output +5051,3959474,"genie.py",5132,0,"j",python,content +5052,3959475,"genie.py",5133,0,"",python,selection_keyboard +5053,3959548,"genie.py",5133,0,"a",python,content +5054,3959549,"genie.py",5134,0,"",python,selection_keyboard +5055,3960051,"genie.py",5133,1,"",python,content +5056,3960172,"genie.py",5132,1,"",python,content +5057,3960252,"TERMINAL",0,0,"740",,terminal_output +5058,3960404,"genie.py",5132,0,"b",python,content +5059,3960405,"genie.py",5133,0,"",python,selection_keyboard +5060,3960507,"genie.py",5133,0,"r",python,content +5061,3960508,"genie.py",5134,0,"",python,selection_keyboard +5062,3960677,"genie.py",5134,0,"e",python,content +5063,3960680,"genie.py",5135,0,"",python,selection_keyboard +5064,3960895,"genie.py",5135,0,"a",python,content +5065,3960896,"genie.py",5136,0,"",python,selection_keyboard +5066,3961300,"TERMINAL",0,0,"81",,terminal_output +5067,3961662,"genie.py",5132,4,"",python,content +5068,3961883,"genie.py",5132,0,"k",python,content +5069,3961884,"genie.py",5133,0,"",python,selection_keyboard +5070,3962378,"genie.py",5132,1,"",python,content +5071,3962380,"TERMINAL",0,0,"92",,terminal_output +5072,3962499,"genie.py",5132,0,"j",python,content +5073,3962500,"genie.py",5133,0,"",python,selection_keyboard +5074,3962555,"genie.py",5133,0,"a",python,content +5075,3962556,"genie.py",5134,0,"",python,selection_keyboard +5076,3963440,"TERMINAL",0,0,"403",,terminal_output +5077,3963588,"genie.py",5134,0,"x",python,content +5078,3963589,"genie.py",5135,0,"",python,selection_keyboard +5079,3964435,"TERMINAL",0,0,"14",,terminal_output +5080,3964486,"genie.py",5135,0,".",python,content +5081,3964487,"genie.py",5136,0,"",python,selection_keyboard +5082,3964723,"genie.py",5136,0,"d",python,content +5083,3964724,"genie.py",5137,0,"",python,selection_keyboard +5084,3964883,"genie.py",5137,0,"e",python,content +5085,3964884,"genie.py",5138,0,"",python,selection_keyboard +5086,3964952,"genie.py",5138,0,"b",python,content +5087,3964953,"genie.py",5139,0,"",python,selection_keyboard +5088,3965073,"genie.py",5139,0,"u",python,content +5089,3965074,"genie.py",5140,0,"",python,selection_keyboard +5090,3965200,"genie.py",5140,0,"g",python,content +5091,3965200,"genie.py",5141,0,"",python,selection_keyboard +5092,3965298,"genie.py",5141,0,".",python,content +5093,3965299,"genie.py",5142,0,"",python,selection_keyboard +5094,3965608,"genie.py",5142,0,"b",python,content +5095,3965609,"genie.py",5143,0,"",python,selection_keyboard +5096,3965609,"genie.py",5143,0,"r",python,content +5097,3965609,"genie.py",5144,0,"",python,selection_keyboard +5098,3965609,"TERMINAL",0,0,"25",,terminal_output +5099,3965760,"genie.py",5144,0,"e",python,content +5100,3965761,"genie.py",5145,0,"",python,selection_keyboard +5101,3965927,"genie.py",5145,0,"a",python,content +5102,3965929,"genie.py",5146,0,"",python,selection_keyboard +5103,3966171,"genie.py",5142,4,"breakpoint",python,content +5104,3966539,"TERMINAL",0,0,"36",,terminal_output +5105,3966832,"genie.py",5152,0,"()",python,content +5106,3966832,"genie.py",5153,0,"",python,selection_keyboard +5107,3966890,"genie.py",5153,1,")",python,content +5108,3966891,"genie.py",5154,0,"",python,selection_keyboard +5109,3966996,"genie.py",5153,0,"",python,selection_command +5110,3967619,"TERMINAL",0,0,"47",,terminal_output +5111,3968373,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +5112,3968462,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5113,3968610,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +5114,3968643,"TERMINAL",0,0,"58",,terminal_output +5115,3969737,"TERMINAL",0,0,"69",,terminal_output +5116,3970803,"TERMINAL",0,0,"750",,terminal_output +5117,3971375,"TERMINAL",0,0,"2025-07-03 17:21:48.265640: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5118,3971785,"TERMINAL",0,0,"81",,terminal_output +5119,3972809,"TERMINAL",0,0,"92",,terminal_output +5120,3973852,"TERMINAL",0,0,"503",,terminal_output +5121,3974959,"TERMINAL",0,0,"14",,terminal_output +5122,3975574,"TERMINAL",0,0,"2025-07-03 17:21:52.452329: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5123,3975984,"TERMINAL",0,0,"25",,terminal_output +5124,3977008,"TERMINAL",0,0,"36",,terminal_output +5125,3978218,"TERMINAL",0,0,"47",,terminal_output +5126,3979088,"TERMINAL",0,0,"58",,terminal_output +5127,3980134,"TERMINAL",0,0,"66:00",,terminal_output +5128,3981189,"TERMINAL",0,0,"81",,terminal_output +5129,3982264,"TERMINAL",0,0,"92",,terminal_output +5130,3983305,"TERMINAL",0,0,"2:003",,terminal_output +5131,3984379,"TERMINAL",0,0,"14",,terminal_output +5132,3984497,"TERMINAL",0,0,"2025-07-03 17:22:01.398161: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5133,3985384,"TERMINAL",0,0,"25",,terminal_output +5134,3986430,"TERMINAL",0,0,"36",,terminal_output +5135,3987527,"TERMINAL",0,0,"47",,terminal_output +5136,3987867,"genie.py",0,0,"",python,tab +5137,3987867,"genie.py",9710,0,"",python,selection_mouse +5138,3988176,"genie.py",9741,0,"",python,selection_mouse +5139,3988338,"genie.py",9737,8,"new_mask",python,selection_mouse +5140,3988576,"TERMINAL",0,0,"58",,terminal_output +5141,3989210,"genie.py",9753,0,"",python,selection_mouse +5142,3989518,"genie.py",9747,10,"token_idxs",python,selection_mouse +5143,3989629,"TERMINAL",0,0,"69",,terminal_output +5144,3990691,"TERMINAL",0,0,"710",,terminal_output +5145,3991726,"TERMINAL",0,0,"81",,terminal_output +5146,3992168,"TERMINAL",0,0,"2025-07-03 17:22:09.067495: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5147,3992766,"TERMINAL",0,0,"92",,terminal_output +5148,3993836,"genie.py",5121,0,"",python,selection_mouse +5149,3993907,"TERMINAL",0,0,"103",,terminal_output +5150,3994928,"TERMINAL",0,0,"14",,terminal_output +5151,3995078,"genie.py",5121,1,"2",python,content +5152,3995915,"TERMINAL",0,0,"25",,terminal_output +5153,3996046,"genie.py",5154,0,"",python,selection_mouse +5154,3996048,"genie.py",5153,0,"",python,selection_command +5155,3996975,"TERMINAL",0,0,"36",,terminal_output +5156,3998053,"TERMINAL",0,0,"47",,terminal_output +5157,3998922,"TERMINAL",0,0,"2025-07-03 17:22:15.806061: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5158,3999055,"TERMINAL",0,0,"58",,terminal_output +5159,4000091,"TERMINAL",0,0,"69",,terminal_output +5160,4001127,"TERMINAL",0,0,"721",,terminal_output +5161,4002176,"TERMINAL",0,0,"92",,terminal_output +5162,4003325,"TERMINAL",0,0,"203",,terminal_output +5163,4004450,"TERMINAL",0,0,"14",,terminal_output +5164,4005121,"TERMINAL",0,0,"2025-07-03 17:22:22.021030: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5165,4005475,"TERMINAL",0,0,"25",,terminal_output +5166,4006499,"TERMINAL",0,0,"36",,terminal_output +5167,4007523,"TERMINAL",0,0,"47",,terminal_output +5168,4008551,"TERMINAL",0,0,"58",,terminal_output +5169,4008566,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5170,4008806,"genie.py",5238,0,"",python,selection_mouse +5171,4009672,"TERMINAL",0,0,"69",,terminal_output +5172,4010697,"TERMINAL",0,0,"730",,terminal_output +5173,4011720,"TERMINAL",0,0,"81",,terminal_output +5174,4012758,"TERMINAL",0,0,"92",,terminal_output +5175,4013771,"TERMINAL",0,0,"303",,terminal_output +5176,4014806,"TERMINAL",0,0,"14",,terminal_output +5177,4015921,"TERMINAL",0,0,"25",,terminal_output +5178,4016486,"TERMINAL",0,0,"2025-07-03 17:22:33.387304: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5179,4016946,"TERMINAL",0,0,"36",,terminal_output +5180,4017990,"TERMINAL",0,0,"47",,terminal_output +5181,4018993,"TERMINAL",0,0,"58",,terminal_output +5182,4019504,"TERMINAL",0,0,"2025-07-03 17:22:36.320349: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5183,4020028,"TERMINAL",0,0,"69",,terminal_output +5184,4021144,"TERMINAL",0,0,"740",,terminal_output +5185,4022110,"TERMINAL",0,0,"82",,terminal_output +5186,4023180,"TERMINAL",0,0,"403",,terminal_output +5187,4024231,"TERMINAL",0,0,"14",,terminal_output +5188,4025278,"TERMINAL",0,0,"25",,terminal_output +5189,4026365,"TERMINAL",0,0,"36",,terminal_output +5190,4027389,"TERMINAL",0,0,"47",,terminal_output +5191,4027584,"TERMINAL",0,0,"2025-07-03 17:22:44.484198: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5192,4028514,"TERMINAL",0,0,"58",,terminal_output +5193,4029159,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\ntoken_idxs shape: (1, 6, 920)\r\n",,terminal_output +5194,4029384,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +5195,4029501,"TERMINAL",0,0,"69",,terminal_output +5196,4029501,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5197,4030511,"TERMINAL",0,0,"750",,terminal_output +5198,4030521,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5199,4030972,"TERMINAL",0,0,"2025-07-03 17:22:47.850482: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5200,4031589,"TERMINAL",0,0,"81",,terminal_output +5201,4032714,"TERMINAL",0,0,"92",,terminal_output +5202,4033737,"TERMINAL",0,0,"503",,terminal_output +5203,4034660,"genie.py",5376,0,"",python,selection_mouse +5204,4034668,"genie.py",5375,0,"",python,selection_command +5205,4034718,"TERMINAL",0,0,"14",,terminal_output +5206,4035205,"genie.py",5343,0,"",python,selection_mouse +5207,4035230,"genie.py",5342,0,"",python,selection_command +5208,4035788,"TERMINAL",0,0,"25",,terminal_output +5209,4036708,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +5210,4036816,"TERMINAL",0,0,"36",,terminal_output +5211,4037980,"TERMINAL",0,0,"47",,terminal_output +5212,4038013,"genie.py",5249,0,"",python,selection_mouse +5213,4038896,"genie.py",5250,0,"",python,selection_command +5214,4038907,"TERMINAL",0,0,"58",,terminal_output +5215,4039298,"genie.py",5249,1,"",python,content +5216,4039627,"genie.py",5248,1,"",python,content +5217,4039740,"genie.py",5247,1,"",python,content +5218,4039966,"TERMINAL",0,0,"69",,terminal_output +5219,4041056,"TERMINAL",0,0,"77:00",,terminal_output +5220,4041240,"genie.py",5225,0,"",python,selection_mouse +5221,4041369,"genie.py",5221,11,"expand_dims",python,selection_mouse +5222,4041674,"genie.py",5221,11,"",python,content +5223,4042059,"TERMINAL",0,0,"81",,terminal_output +5224,4042221,"genie.py",5220,1,"",python,content +5225,4042693,"genie.py",5219,1,"",python,content +5226,4043139,"TERMINAL",0,0,"93",,terminal_output +5227,4043553,"genie.py",5218,1,"",python,content +5228,4044006,"genie.py",5217,1,"",python,content +5229,4044209,"TERMINAL",0,0,"3:014",,terminal_output +5230,4045154,"genie.py",5217,1,"",python,content +5231,4045225,"TERMINAL",0,0,"25",,terminal_output +5232,4045495,"genie.py",5217,1,"n",python,selection_keyboard +5233,4045722,"genie.py",5217,2,"ne",python,selection_keyboard +5234,4045925,"genie.py",5217,3,"new",python,selection_keyboard +5235,4046260,"TERMINAL",0,0,"36",,terminal_output +5236,4047303,"TERMINAL",0,0,"47",,terminal_output +5237,4048382,"TERMINAL",0,0,"58",,terminal_output +5238,4048491,"genie.py",5231,1,"",python,content +5239,4049387,"TERMINAL",0,0,"69",,terminal_output +5240,4049928,"genie.py",5291,0,"",python,selection_mouse +5241,4050432,"TERMINAL",0,0,"710",,terminal_output +5242,4051556,"TERMINAL",0,0,"81",,terminal_output +5243,4052625,"TERMINAL",0,0,"92",,terminal_output +5244,4053604,"TERMINAL",0,0,"103",,terminal_output +5245,4054611,"TERMINAL",0,0,"14",,terminal_output +5246,4055721,"TERMINAL",0,0,"25",,terminal_output +5247,4056784,"TERMINAL",0,0,"36",,terminal_output +5248,4057801,"TERMINAL",0,0,"47",,terminal_output +5249,4058828,"TERMINAL",0,0,"58",,terminal_output +5250,4059953,"TERMINAL",0,0,"69",,terminal_output +5251,4060127,"TERMINAL",0,0,"l",,terminal_output +5252,4060348,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(156)\r\n temperature=temperature,\r\n sample_argmax=sample_argmax,\r\n steps=steps,\r\n )\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n new_frame_idxs = final_carry[2]\r\n-> jax.debug.breakpoint()\r\n new_frame_pixels = self.tokenizer.decode(\r\n jnp.expand_dims(new_frame_idxs, 1),\r\n video_hw=batch[""videos""].shape[2:4],\r\n )\r\n return new_frame_pixels\r\n(jdb) ",,terminal_output +5253,4060976,"TERMINAL",0,0,"720",,terminal_output +5254,4062001,"TERMINAL",0,0,"81",,terminal_output +5255,4063006,"TERMINAL",0,0,"92",,terminal_output +5256,4064055,"TERMINAL",0,0,"203",,terminal_output +5257,4065175,"TERMINAL",0,0,"15",,terminal_output +5258,4066164,"TERMINAL",0,0,"36",,terminal_output +5259,4066966,"TERMINAL",0,0,"n",,terminal_output +5260,4067121,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5261,4067283,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +5262,4067299,"TERMINAL",0,0,"47",,terminal_output +5263,4067526,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5264,4067754,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5265,4067903,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5266,4068379,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5267,4068391,"TERMINAL",0,0,"58",,terminal_output +5268,4068547,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5269,4068608,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5270,4068730,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5271,4069215,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +5272,4069347,"TERMINAL",0,0,"[?25lm\r[?25h",,terminal_output +5273,4069488,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +5274,4069690,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5275,4069801,"TERMINAL",0,0,"69",,terminal_output +5276,4070235,"TERMINAL",0,0,"[?25lm\r[?25h",,terminal_output +5277,4070298,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +5278,4070409,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5279,4070517,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5280,4070581,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5281,4070778,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5282,4070884,"TERMINAL",0,0,"730",,terminal_output +5283,4071090,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5284,4071153,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5285,4071927,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +5286,4071927,"TERMINAL",0,0,"81",,terminal_output +5287,4072047,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5288,4072309,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5289,4072517,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5290,4072580,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5291,4072727,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5292,4072799,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5293,4072861,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5294,4073000,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +5295,4073005,"TERMINAL",0,0,"92",,terminal_output +5296,4074085,"TERMINAL",0,0,"303",,terminal_output +5297,4075107,"TERMINAL",0,0,"14",,terminal_output +5298,4076133,"TERMINAL",0,0,"26",,terminal_output +5299,4076290,"TERMINAL",0,0,"f",,terminal_output +5300,4076352,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5301,4076416,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5302,4076476,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5303,4076625,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5304,4076896,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5305,4077107,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5306,4077158,"TERMINAL",0,0,"47",,terminal_output +5307,4077630,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5308,4077834,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5309,4077994,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5310,4078176,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +5311,4078236,"TERMINAL",0,0,"58",,terminal_output +5312,4079094,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +5313,4079254,"TERMINAL",0,0,"69",,terminal_output +5314,4079513,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +5315,4080330,"TERMINAL",0,0,"740",,terminal_output +5316,4080540,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +5317,4080780,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5318,4080841,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5319,4081215,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5320,4081333,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5321,4081343,"TERMINAL",0,0,"81",,terminal_output +5322,4081484,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5323,4081622,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5324,4081897,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +5325,4082411,"TERMINAL",0,0,"92",,terminal_output +5326,4082464,"TERMINAL",0,0,"\rfinal_carry[1].shape",,terminal_output +5327,4082889,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +5328,4083511,"TERMINAL",0,0,"403",,terminal_output +5329,4083617,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5330,4084017,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5331,4084138,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5332,4084313,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5333,4084504,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5334,4084504,"TERMINAL",0,0,"14",,terminal_output +5335,4084615,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5336,4084757,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5337,4084904,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5338,4085043,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5339,4085204,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5340,4085340,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +5341,4085491,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +5342,4085545,"TERMINAL",0,0,"25",,terminal_output +5343,4085860,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +5344,4086075,"TERMINAL",0,0,"[?25l1\r][?25h\r[1@2]",,terminal_output +5345,4086307,"TERMINAL",0,0,"\r\n(1, 6, 920)\r\n(jdb) ",,terminal_output +5346,4086632,"TERMINAL",0,0,"36",,terminal_output +5347,4087702,"TERMINAL",0,0,"47",,terminal_output +5348,4088727,"TERMINAL",0,0,"58",,terminal_output +5349,4089751,"TERMINAL",0,0,"69",,terminal_output +5350,4090878,"TERMINAL",0,0,"750",,terminal_output +5351,4091911,"TERMINAL",0,0,"81",,terminal_output +5352,4092926,"TERMINAL",0,0,"92",,terminal_output +5353,4093952,"TERMINAL",0,0,"503",,terminal_output +5354,4095083,"TERMINAL",0,0,"14",,terminal_output +5355,4096026,"TERMINAL",0,0,"25",,terminal_output +5356,4097129,"TERMINAL",0,0,"36",,terminal_output +5357,4098121,"TERMINAL",0,0,"48",,terminal_output +5358,4099178,"TERMINAL",0,0,"69",,terminal_output +5359,4100227,"TERMINAL",0,0,"78:00",,terminal_output +5360,4101288,"TERMINAL",0,0,"81",,terminal_output +5361,4102321,"TERMINAL",0,0,"92",,terminal_output +5362,4103473,"TERMINAL",0,0,"4:003",,terminal_output +5363,4104497,"TERMINAL",0,0,"14",,terminal_output +5364,4105482,"TERMINAL",0,0,"25",,terminal_output +5365,4106532,"TERMINAL",0,0,"36",,terminal_output +5366,4106689,"TERMINAL",0,0,"^DERROR:2025-07-03 17:24:03,534:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\n",,terminal_output +5367,4107671,"TERMINAL",0,0,"47",,terminal_output +5368,4108067,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +5369,4108299,"genie.py",0,0,"",python,tab +5370,4108299,"genie.py",5147,0,"",python,selection_mouse +5371,4108376,"genie.py",5146,0,"",python,selection_command +5372,4108643,"TERMINAL",0,0,"58",,terminal_output +5373,4109465,"genie.py",5124,31,"",python,content +5374,4109526,"genie.py",5132,0,"",python,selection_command +5375,4109684,"TERMINAL",0,0,"69",,terminal_output +5376,4110742,"TERMINAL",0,0,"710",,terminal_output +5377,4111782,"TERMINAL",0,0,"81",,terminal_output +5378,4112233,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +5379,4112840,"TERMINAL",0,0,"92",,terminal_output +5380,4112977,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5381,4113087,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +5382,4113918,"TERMINAL",0,0,"103",,terminal_output +5383,4115044,"TERMINAL",0,0,"14",,terminal_output +5384,4115966,"TERMINAL",0,0,"2025-07-03 17:24:12.837415: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5385,4115992,"TERMINAL",0,0,"25",,terminal_output +5386,4117107,"TERMINAL",0,0,"36",,terminal_output +5387,4118075,"TERMINAL",0,0,"47",,terminal_output +5388,4119141,"TERMINAL",0,0,"59",,terminal_output +5389,4120179,"TERMINAL",0,0,"2025-07-03 17:24:16.990207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5390,4120190,"TERMINAL",0,0,"720",,terminal_output +5391,4121217,"TERMINAL",0,0,"81",,terminal_output +5392,4122315,"TERMINAL",0,0,"92",,terminal_output +5393,4123323,"TERMINAL",0,0,"203",,terminal_output +5394,4124465,"TERMINAL",0,0,"14",,terminal_output +5395,4125491,"TERMINAL",0,0,"25",,terminal_output +5396,4126467,"TERMINAL",0,0,"36",,terminal_output +5397,4127538,"TERMINAL",0,0,"47",,terminal_output +5398,4128562,"TERMINAL",0,0,"58",,terminal_output +5399,4129074,"TERMINAL",0,0,"2025-07-03 17:24:25.906377: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5400,4129689,"TERMINAL",0,0,"69",,terminal_output +5401,4130712,"TERMINAL",0,0,"730",,terminal_output +5402,4131736,"TERMINAL",0,0,"81",,terminal_output +5403,4132763,"TERMINAL",0,0,"92",,terminal_output +5404,4133886,"TERMINAL",0,0,"303",,terminal_output +5405,4134913,"TERMINAL",0,0,"14",,terminal_output +5406,4135882,"TERMINAL",0,0,"25",,terminal_output +5407,4136784,"TERMINAL",0,0,"2025-07-03 17:24:33.681348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5408,4136962,"TERMINAL",0,0,"36",,terminal_output +5409,4137994,"TERMINAL",0,0,"47",,terminal_output +5410,4139109,"TERMINAL",0,0,"58",,terminal_output +5411,4140136,"TERMINAL",0,0,"69",,terminal_output +5412,4141158,"TERMINAL",0,0,"741",,terminal_output +5413,4142171,"TERMINAL",0,0,"92",,terminal_output +5414,4143219,"TERMINAL",0,0,"403",,terminal_output +5415,4143512,"TERMINAL",0,0,"2025-07-03 17:24:40.335731: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5416,4144275,"TERMINAL",0,0,"14",,terminal_output +5417,4145362,"TERMINAL",0,0,"25",,terminal_output +5418,4146481,"TERMINAL",0,0,"36",,terminal_output +5419,4147505,"TERMINAL",0,0,"47",,terminal_output +5420,4148530,"TERMINAL",0,0,"58",,terminal_output +5421,4149759,"TERMINAL",0,0,"69",,terminal_output +5422,4149912,"TERMINAL",0,0,"2025-07-03 17:24:46.791339: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5423,4150783,"TERMINAL",0,0,"750",,terminal_output +5424,4151808,"TERMINAL",0,0,"81",,terminal_output +5425,4152980,"TERMINAL",0,0,"92",,terminal_output +5426,4153143,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5427,4153957,"TERMINAL",0,0,"503",,terminal_output +5428,4154981,"TERMINAL",0,0,"14",,terminal_output +5429,4156007,"TERMINAL",0,0,"25",,terminal_output +5430,4157132,"TERMINAL",0,0,"36",,terminal_output +5431,4158088,"TERMINAL",0,0,"47",,terminal_output +5432,4159185,"TERMINAL",0,0,"59",,terminal_output +5433,4160182,"TERMINAL",0,0,"79:00",,terminal_output +5434,4161223,"TERMINAL",0,0,"81",,terminal_output +5435,4161234,"TERMINAL",0,0,"2025-07-03 17:24:58.135809: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5436,4162282,"TERMINAL",0,0,"92",,terminal_output +5437,4163378,"TERMINAL",0,0,"5:003",,terminal_output +5438,4164188,"TERMINAL",0,0,"2025-07-03 17:25:01.088328: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5439,4164382,"TERMINAL",0,0,"14",,terminal_output +5440,4165483,"TERMINAL",0,0,"25",,terminal_output +5441,4166479,"TERMINAL",0,0,"36",,terminal_output +5442,4167613,"TERMINAL",0,0,"47",,terminal_output +5443,4168600,"TERMINAL",0,0,"58",,terminal_output +5444,4169620,"TERMINAL",0,0,"69",,terminal_output +5445,4170751,"TERMINAL",0,0,"710",,terminal_output +5446,4171775,"TERMINAL",0,0,"81",,terminal_output +5447,4172117,"TERMINAL",0,0,"2025-07-03 17:25:08.969889: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5448,4172799,"TERMINAL",0,0,"92",,terminal_output +5449,4173600,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +5450,4173728,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +5451,4173833,"TERMINAL",0,0,"103",,terminal_output +5452,4173844,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +5453,4174003,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5454,4174949,"TERMINAL",0,0,"14",,terminal_output +5455,4175057,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5456,4175445,"TERMINAL",0,0,"2025-07-03 17:25:12.344552: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5457,4175973,"TERMINAL",0,0,"25",,terminal_output +5458,4176997,"TERMINAL",0,0,"36",,terminal_output +5459,4178011,"TERMINAL",0,0,"47",,terminal_output +5460,4179084,"TERMINAL",0,0,"58",,terminal_output +5461,4180171,"TERMINAL",0,0,"620",,terminal_output +5462,4181156,"TERMINAL",0,0,"81",,terminal_output +5463,4182207,"TERMINAL",0,0,"92",,terminal_output +5464,4183259,"TERMINAL",0,0,"203",,terminal_output +5465,4184318,"TERMINAL",0,0,"14",,terminal_output +5466,4185237,"TERMINAL",0,0,"> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py(128)_autoreg_sample_mihir()\r\n-> vid = jnp.concatenate([vid, new_frames], axis=1)\r\n(Pdb) ",,terminal_output +5467,4185368,"TERMINAL",0,0,"25",,terminal_output +5468,4186429,"TERMINAL",0,0,"36",,terminal_output +5469,4187483,"TERMINAL",0,0,"47",,terminal_output +5470,4188676,"TERMINAL",0,0,"58",,terminal_output +5471,4189631,"TERMINAL",0,0,"69",,terminal_output +5472,4191139,"TERMINAL",0,0,"730",,terminal_output +5473,4192153,"TERMINAL",0,0,"81",,terminal_output +5474,4193165,"TERMINAL",0,0,"93",,terminal_output +5475,4194202,"TERMINAL",0,0,"314",,terminal_output +5476,4195228,"TERMINAL",0,0,"25",,terminal_output +5477,4196278,"TERMINAL",0,0,"36",,terminal_output +5478,4197375,"TERMINAL",0,0,"47",,terminal_output +5479,4198398,"TERMINAL",0,0,"58",,terminal_output +5480,4199526,"TERMINAL",0,0,"69",,terminal_output +5481,4200550,"TERMINAL",0,0,"740",,terminal_output +5482,4201675,"TERMINAL",0,0,"81",,terminal_output +5483,4202735,"TERMINAL",0,0,"92",,terminal_output +5484,4203827,"TERMINAL",0,0,"403",,terminal_output +5485,4204857,"TERMINAL",0,0,"14",,terminal_output +5486,4205976,"TERMINAL",0,0,"25",,terminal_output +5487,4207000,"TERMINAL",0,0,"36",,terminal_output +5488,4208000,"TERMINAL",0,0,"47",,terminal_output +5489,4209151,"TERMINAL",0,0,"58",,terminal_output +5490,4210096,"TERMINAL",0,0,"69",,terminal_output +5491,4211201,"TERMINAL",0,0,"751",,terminal_output +5492,4212196,"TERMINAL",0,0,"92",,terminal_output +5493,4213248,"TERMINAL",0,0,"503",,terminal_output +5494,4214301,"TERMINAL",0,0,"14",,terminal_output +5495,4215347,"TERMINAL",0,0,"25",,terminal_output +5496,4216402,"TERMINAL",0,0,"36",,terminal_output +5497,4217454,"TERMINAL",0,0,"47",,terminal_output +5498,4218571,"TERMINAL",0,0,"58",,terminal_output +5499,4219595,"TERMINAL",0,0,"69",,terminal_output +5500,4220622,"TERMINAL",0,0,"710:00",,terminal_output +5501,4221649,"TERMINAL",0,0,"81",,terminal_output +5502,4222714,"TERMINAL",0,0,"92",,terminal_output +5503,4223795,"TERMINAL",0,0,"6:003",,terminal_output +5504,4224818,"TERMINAL",0,0,"14",,terminal_output +5505,4225848,"TERMINAL",0,0,"25",,terminal_output +5506,4227074,"TERMINAL",0,0,"36",,terminal_output +5507,4228040,"TERMINAL",0,0,"47",,terminal_output +5508,4229091,"TERMINAL",0,0,"58",,terminal_output +5509,4230183,"TERMINAL",0,0,"610",,terminal_output +5510,4231269,"TERMINAL",0,0,"81",,terminal_output +5511,4232635,"TERMINAL",0,0,"92",,terminal_output +5512,4233682,"TERMINAL",0,0,"103",,terminal_output +5513,4234751,"TERMINAL",0,0,"14",,terminal_output +5514,4235878,"TERMINAL",0,0,"25",,terminal_output +5515,4236845,"TERMINAL",0,0,"36",,terminal_output +5516,4237926,"TERMINAL",0,0,"47",,terminal_output +5517,4238948,"TERMINAL",0,0,"58",,terminal_output +5518,4239986,"TERMINAL",0,0,"69",,terminal_output +5519,4241093,"TERMINAL",0,0,"720",,terminal_output +5520,4242089,"TERMINAL",0,0,"81",,terminal_output +5521,4243189,"TERMINAL",0,0,"93",,terminal_output +5522,4244317,"TERMINAL",0,0,"214",,terminal_output +5523,4245505,"TERMINAL",0,0,"25",,terminal_output +5524,4246422,"TERMINAL",0,0,"l",,terminal_output +5525,4246488,"TERMINAL",0,0,"\r\n123 \t args.temperature,\r\n124 \t args.sample_argmax,\r\n125 \t method=Genie.sample_mihir,\r\n126 \t )\r\n127 \t breakpoint()\r\n128 ->\t vid = jnp.concatenate([vid, new_frames], axis=1)\r\n129 \t return vid\r\n130 \t\r\n131 \t\r\n132 \t\r\n133 \t# --- Get video + latent actions ---\r\n(Pdb) ",,terminal_output +5526,4246541,"TERMINAL",0,0,"36",,terminal_output +5527,4247580,"TERMINAL",0,0,"47",,terminal_output +5528,4248780,"TERMINAL",0,0,"58",,terminal_output +5529,4249804,"TERMINAL",0,0,"69",,terminal_output +5530,4250839,"TERMINAL",0,0,"730",,terminal_output +5531,4251891,"TERMINAL",0,0,"81",,terminal_output +5532,4252950,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.parameter_utils import count_parameters_by_component\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args,\n )\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n seed=args.seed,\n )\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in dataloader) # type: ignore\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +5533,4253123,"TERMINAL",0,0,"92",,terminal_output +5534,4253884,"models/dynamics.py",0,0,"",python,tab +5535,4253996,"TERMINAL",0,0,"303",,terminal_output +5536,4255042,"TERMINAL",0,0,"14",,terminal_output +5537,4256165,"TERMINAL",0,0,"25",,terminal_output +5538,4257073,"sample.py",0,0,"",python,tab +5539,4257255,"TERMINAL",0,0,"37",,terminal_output +5540,4258200,"TERMINAL",0,0,"58",,terminal_output +5541,4259224,"TERMINAL",0,0,"69",,terminal_output +5542,4260360,"TERMINAL",0,0,"740",,terminal_output +5543,4260562,"TERMINAL",0,0,"n",,terminal_output +5544,4261086,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5545,4261260,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +5546,4261364,"TERMINAL",0,0,"81",,terminal_output +5547,4261703,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5548,4261974,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5549,4262219,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5550,4262437,"TERMINAL",0,0,"92",,terminal_output +5551,4262786,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5552,4262976,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5553,4263083,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5554,4263269,"TERMINAL",0,0,"[?25ls[?25h[?25l.[?25h",,terminal_output +5555,4263440,"TERMINAL",0,0,"403",,terminal_output +5556,4263493,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5557,4263546,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5558,4263720,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5559,4263782,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5560,4263899,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5561,4263962,"TERMINAL",0,0,"\r\n(1, 6, 90, 160, 3)\r\n(Pdb) ",,terminal_output +5562,4264479,"TERMINAL",0,0,"14",,terminal_output +5563,4265524,"TERMINAL",0,0,"25",,terminal_output +5564,4266598,"TERMINAL",0,0,"36",,terminal_output +5565,4267726,"TERMINAL",0,0,"47",,terminal_output +5566,4268749,"TERMINAL",0,0,"58",,terminal_output +5567,4269774,"TERMINAL",0,0,"69",,terminal_output +5568,4270801,"TERMINAL",0,0,"750",,terminal_output +5569,4271207,"TERMINAL",0,0,"[?25lvu[?25h",,terminal_output +5570,4271273,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5571,4271797,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5572,4271856,"TERMINAL",0,0,"81",,terminal_output +5573,4272132,"TERMINAL",0,0,"[?25ld\r[?25h",,terminal_output +5574,4272193,"TERMINAL",0,0,"[?25lu\r[?25h",,terminal_output +5575,4272413,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5576,4272520,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5577,4272724,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5578,4272891,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5579,4272922,"TERMINAL",0,0,"92",,terminal_output +5580,4272988,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5581,4273174,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5582,4273237,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5583,4273412,"TERMINAL",0,0,"[?25le[?25h\r\n(1, 1, 90, 160, 3)\r\n(Pdb) ",,terminal_output +5584,4273972,"TERMINAL",0,0,"503",,terminal_output +5585,4275098,"TERMINAL",0,0,"14",,terminal_output +5586,4276044,"TERMINAL",0,0,"25",,terminal_output +5587,4276143,"sample.py",0,0,"",python,tab +5588,4276144,"sample.py",3815,0,"",python,selection_mouse +5589,4276217,"sample.py",3814,0,"",python,selection_command +5590,4276556,"sample.py",3815,0,"",python,selection_mouse +5591,4276569,"sample.py",3814,0,"",python,selection_command +5592,4277190,"sample.py",3760,0,"",python,selection_mouse +5593,4277193,"TERMINAL",0,0,"36",,terminal_output +5594,4278129,"sample.py",3532,0,"",python,selection_mouse +5595,4278183,"TERMINAL",0,0,"48",,terminal_output +5596,4278819,"sample.py",3529,0,"",python,selection_mouse +5597,4279297,"TERMINAL",0,0,"69",,terminal_output +5598,4280331,"TERMINAL",0,0,"71:00",,terminal_output +5599,4280453,"sample.py",3530,0,"",python,selection_command +5600,4280588,"sample.py",3531,0,"",python,selection_command +5601,4280740,"sample.py",3532,0,"",python,selection_command +5602,4280884,"sample.py",3533,0,"",python,selection_command +5603,4281141,"sample.py",3532,0,"",python,selection_command +5604,4281293,"sample.py",3531,0,"",python,selection_command +5605,4281306,"TERMINAL",0,0,"81",,terminal_output +5606,4281440,"sample.py",3530,0,"",python,selection_command +5607,4281593,"sample.py",3529,0,"",python,selection_command +5608,4282109,"sample.py",3529,11,"",python,content +5609,4282343,"TERMINAL",0,0,"92",,terminal_output +5610,4282931,"sample.py",3529,0,"g",python,content +5611,4282932,"sample.py",3530,0,"",python,selection_keyboard +5612,4283029,"sample.py",3530,0,"e",python,content +5613,4283030,"sample.py",3531,0,"",python,selection_keyboard +5614,4283256,"sample.py",3531,0,"e",python,content +5615,4283257,"sample.py",3532,0,"",python,selection_keyboard +5616,4283397,"TERMINAL",0,0,"7:003",,terminal_output +5617,4283641,"sample.py",3531,1,"",python,content +5618,4283800,"sample.py",3531,0,"n",python,content +5619,4283801,"sample.py",3532,0,"",python,selection_keyboard +5620,4283913,"sample.py",3532,0,"e",python,content +5621,4283914,"sample.py",3533,0,"",python,selection_keyboard +5622,4284000,"sample.py",3533,0,"r",python,content +5623,4284001,"sample.py",3534,0,"",python,selection_keyboard +5624,4284470,"TERMINAL",0,0,"14",,terminal_output +5625,4284592,"sample.py",3534,0,"a",python,content +5626,4284593,"sample.py",3535,0,"",python,selection_keyboard +5627,4285224,"sample.py",3535,0,"t",python,content +5628,4285225,"sample.py",3536,0,"",python,selection_keyboard +5629,4285330,"sample.py",3536,0,"e",python,content +5630,4285331,"sample.py",3537,0,"",python,selection_keyboard +5631,4285406,"sample.py",3537,0,"d",python,content +5632,4285407,"sample.py",3538,0,"",python,selection_keyboard +5633,4285502,"TERMINAL",0,0,"25",,terminal_output +5634,4285627,"sample.py",3538,0,"_",python,content +5635,4285628,"sample.py",3539,0,"",python,selection_keyboard +5636,4285999,"sample.py",3539,0,"v",python,content +5637,4285999,"sample.py",3540,0,"",python,selection_keyboard +5638,4286161,"sample.py",3540,0,"i",python,content +5639,4286162,"sample.py",3541,0,"",python,selection_keyboard +5640,4286276,"sample.py",3541,0,"d",python,content +5641,4286277,"sample.py",3542,0,"",python,selection_keyboard +5642,4286419,"sample.py",3542,0," ",python,content +5643,4286420,"sample.py",3543,0,"",python,selection_keyboard +5644,4286553,"TERMINAL",0,0,"36",,terminal_output +5645,4286598,"sample.py",3542,0,"",python,selection_command +5646,4286795,"sample.py",3572,0,"",python,selection_command +5647,4286951,"sample.py",3587,0,"",python,selection_command +5648,4287468,"sample.py",3606,0,"",python,selection_command +5649,4287491,"sample.py",3628,0,"",python,selection_command +5650,4287530,"sample.py",3656,0,"",python,selection_command +5651,4287549,"sample.py",3682,0,"",python,selection_command +5652,4287591,"sample.py",3710,0,"",python,selection_command +5653,4287647,"sample.py",3732,0,"",python,selection_command +5654,4287724,"sample.py",3749,0,"",python,selection_command +5655,4287724,"sample.py",3768,0,"",python,selection_command +5656,4287725,"TERMINAL",0,0,"47",,terminal_output +5657,4288729,"TERMINAL",0,0,"58",,terminal_output +5658,4289674,"TERMINAL",0,0,"69",,terminal_output +5659,4289870,"sample.py",3734,70,"",python,content +5660,4289966,"sample.py",3738,0,"",python,selection_command +5661,4290469,"sample.py",3748,0,"",python,selection_command +5662,4290718,"TERMINAL",0,0,"710",,terminal_output +5663,4290949,"sample.py",3747,0,"",python,selection_command +5664,4291125,"sample.py",3746,0,"",python,selection_command +5665,4291300,"sample.py",3745,0,"",python,selection_command +5666,4291567,"sample.py",3745,0,"g",python,content +5667,4291568,"sample.py",3746,0,"",python,selection_keyboard +5668,4291770,"TERMINAL",0,0,"81",,terminal_output +5669,4292916,"TERMINAL",0,0,"92",,terminal_output +5670,4293786,"sample.py",3745,1,"generated_vid",python,content +5671,4293860,"TERMINAL",0,0,"103",,terminal_output +5672,4294071,"sample.py",3758,1,"",python,content +5673,4294267,"sample.py",3758,1,"",python,content +5674,4294548,"sample.py",3758,1,"",python,content +5675,4294903,"sample.py",3757,0,"",python,selection_command +5676,4294916,"TERMINAL",0,0,"14",,terminal_output +5677,4295904,"sample.py",3760,0,"",python,selection_mouse +5678,4295976,"TERMINAL",0,0,"25",,terminal_output +5679,4297114,"TERMINAL",0,0,"36",,terminal_output +5680,4298061,"TERMINAL",0,0,"47",,terminal_output +5681,4299109,"TERMINAL",0,0,"59",,terminal_output +5682,4299722,"TERMINAL",0,0,"^D\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 159, in \r\n",,terminal_output +5683,4299940,"TERMINAL",0,0," recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 128, in _autoreg_sample_mihir\r\n \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 128, in _autoreg_sample_mihir\r\n \r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +5684,4300157,"TERMINAL",0,0,"720",,terminal_output +5685,4301211,"TERMINAL",0,0,"81",,terminal_output +5686,4301359,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +5687,4301937,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +5688,4302336,"TERMINAL",0,0,"92",,terminal_output +5689,4303371,"TERMINAL",0,0,"203",,terminal_output +5690,4304343,"TERMINAL",0,0,"14",,terminal_output +5691,4305386,"TERMINAL",0,0,"25",,terminal_output +5692,4306439,"TERMINAL",0,0,"36",,terminal_output +5693,4307116,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5694,4307266,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +5695,4307480,"TERMINAL",0,0,"47",,terminal_output +5696,4308532,"TERMINAL",0,0,"58",,terminal_output +5697,4309575,"TERMINAL",0,0,"69",,terminal_output +5698,4310019,"TERMINAL",0,0,"2025-07-03 17:27:26.904310: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5699,4310632,"TERMINAL",0,0,"730",,terminal_output +5700,4311935,"TERMINAL",0,0,"81",,terminal_output +5701,4313007,"TERMINAL",0,0,"92",,terminal_output +5702,4313547,"genie.py",0,0,"",python,tab +5703,4314068,"TERMINAL",0,0,"303",,terminal_output +5704,4314173,"TERMINAL",0,0,"2025-07-03 17:27:31.036438: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5705,4315135,"TERMINAL",0,0,"15",,terminal_output +5706,4315903,"genie.py",5052,0,"",python,selection_mouse +5707,4316166,"TERMINAL",0,0,"36",,terminal_output +5708,4316504,"genie.py",5123,0,"",python,selection_mouse +5709,4316512,"genie.py",5122,0,"",python,selection_command +5710,4317226,"TERMINAL",0,0,"47",,terminal_output +5711,4318257,"TERMINAL",0,0,"58",,terminal_output +5712,4319282,"genie.py",5140,0,"",python,selection_mouse +5713,4319325,"TERMINAL",0,0,"69",,terminal_output +5714,4319845,"genie.py",5102,0,"",python,selection_mouse +5715,4320359,"TERMINAL",0,0,"740",,terminal_output +5716,4321410,"TERMINAL",0,0,"81",,terminal_output +5717,4322509,"TERMINAL",0,0,"92",,terminal_output +5718,4323280,"TERMINAL",0,0,"2025-07-03 17:27:40.182499: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5719,4323501,"TERMINAL",0,0,"403",,terminal_output +5720,4324561,"TERMINAL",0,0,"14",,terminal_output +5721,4325597,"TERMINAL",0,0,"25",,terminal_output +5722,4326643,"TERMINAL",0,0,"36",,terminal_output +5723,4327731,"TERMINAL",0,0,"47",,terminal_output +5724,4328746,"TERMINAL",0,0,"58",,terminal_output +5725,4329134,"genie.py",3253,0,"",python,selection_mouse +5726,4329666,"genie.py",3290,0,"",python,selection_mouse +5727,4329799,"TERMINAL",0,0,"69",,terminal_output +5728,4330519,"genie.py",3374,0,"",python,selection_mouse +5729,4330908,"TERMINAL",0,0,"750",,terminal_output +5730,4331065,"TERMINAL",0,0,"2025-07-03 17:27:47.961670: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5731,4331897,"TERMINAL",0,0,"81",,terminal_output +5732,4332636,"genie.py",5116,0,"",python,selection_mouse +5733,4332979,"TERMINAL",0,0,"92",,terminal_output +5734,4333315,"genie.py",5108,0,"",python,selection_mouse +5735,4333871,"genie.py",5099,0,"",python,selection_mouse +5736,4333985,"TERMINAL",0,0,"503",,terminal_output +5737,4334033,"genie.py",5092,14,"new_frame_idxs",python,selection_mouse +5738,4335041,"TERMINAL",0,0,"14",,terminal_output +5739,4336189,"TERMINAL",0,0,"25",,terminal_output +5740,4337152,"TERMINAL",0,0,"37",,terminal_output +5741,4337600,"TERMINAL",0,0,"2025-07-03 17:27:54.500762: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5742,4338410,"TERMINAL",0,0,"58",,terminal_output +5743,4339306,"TERMINAL",0,0,"69",,terminal_output +5744,4340331,"TERMINAL",0,0,"72:00",,terminal_output +5745,4341322,"TERMINAL",0,0,"81",,terminal_output +5746,4342371,"TERMINAL",0,0,"92",,terminal_output +5747,4343502,"TERMINAL",0,0,"8:003",,terminal_output +5748,4344394,"TERMINAL",0,0,"2025-07-03 17:28:01.289498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5749,4344499,"TERMINAL",0,0,"14",,terminal_output +5750,4345590,"TERMINAL",0,0,"25",,terminal_output +5751,4346596,"TERMINAL",0,0,"36",,terminal_output +5752,4347113,"genie.py",5140,0,"",python,selection_mouse +5753,4347699,"TERMINAL",0,0,"47",,terminal_output +5754,4347805,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5755,4347984,"genie.py",5097,0,"",python,selection_mouse +5756,4348725,"TERMINAL",0,0,"58",,terminal_output +5757,4349740,"TERMINAL",0,0,"69",,terminal_output +5758,4350875,"TERMINAL",0,0,"710",,terminal_output +5759,4352086,"TERMINAL",0,0,"81",,terminal_output +5760,4352922,"TERMINAL",0,0,"92",,terminal_output +5761,4353954,"TERMINAL",0,0,"103",,terminal_output +5762,4355088,"TERMINAL",0,0,"14",,terminal_output +5763,4355894,"TERMINAL",0,0,"2025-07-03 17:28:12.752348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5764,4356055,"TERMINAL",0,0,"25",,terminal_output +5765,4357211,"TERMINAL",0,0,"37",,terminal_output +5766,4358238,"TERMINAL",0,0,"58",,terminal_output +5767,4358966,"TERMINAL",0,0,"2025-07-03 17:28:15.776901: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5768,4359374,"TERMINAL",0,0,"69",,terminal_output +5769,4360355,"TERMINAL",0,0,"720",,terminal_output +5770,4361403,"TERMINAL",0,0,"81",,terminal_output +5771,4362458,"TERMINAL",0,0,"92",,terminal_output +5772,4363572,"TERMINAL",0,0,"203",,terminal_output +5773,4364597,"TERMINAL",0,0,"14",,terminal_output +5774,4365620,"TERMINAL",0,0,"25",,terminal_output +5775,4366746,"TERMINAL",0,0,"36",,terminal_output +5776,4366854,"TERMINAL",0,0,"2025-07-03 17:28:23.738542: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5777,4367770,"TERMINAL",0,0,"47",,terminal_output +5778,4368415,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +5779,4368477,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +5780,4368647,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +5781,4368756,"TERMINAL",0,0,"58",,terminal_output +5782,4368818,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5783,4369781,"TERMINAL",0,0,"69",,terminal_output +5784,4369842,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +5785,4370331,"TERMINAL",0,0,"2025-07-03 17:28:27.137380: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5786,4370834,"TERMINAL",0,0,"730",,terminal_output +5787,4371891,"TERMINAL",0,0,"81",,terminal_output +5788,4372932,"TERMINAL",0,0,"92",,terminal_output +5789,4374017,"TERMINAL",0,0,"303",,terminal_output +5790,4375042,"TERMINAL",0,0,"14",,terminal_output +5791,4376167,"TERMINAL",0,0,"25",,terminal_output +5792,4377192,"TERMINAL",0,0,"37",,terminal_output +5793,4378215,"TERMINAL",0,0,"58",,terminal_output +5794,4379239,"TERMINAL",0,0,"69",,terminal_output +5795,4380279,"TERMINAL",0,0,"740",,terminal_output +5796,4381291,"TERMINAL",0,0,"81",,terminal_output +5797,4382340,"TERMINAL",0,0,"92",,terminal_output +5798,4383114,"TERMINAL",0,0,"SSIM: 0.43240007758140564\r\n",,terminal_output +5799,4383386,"TERMINAL",0,0,"403",,terminal_output +5800,4384435,"TERMINAL",0,0,"14",,terminal_output +5801,4385486,"TERMINAL",0,0,"25",,terminal_output +5802,4385594,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +5803,4386526,"TERMINAL",0,0,"36",,terminal_output +5804,4387685,"TERMINAL",0,0,"47",,terminal_output +5805,4388617,"TERMINAL",0,0,"58",,terminal_output +5806,4389686,"TERMINAL",0,0,"69",,terminal_output +5807,4390811,"TERMINAL",0,0,"750",,terminal_output +5808,4391835,"TERMINAL",0,0,"81",,terminal_output +5809,4391945,"TERMINAL",0,0,"watch",,terminal_focus +5810,4392860,"TERMINAL",0,0,"92",,terminal_output +5811,4393840,"TERMINAL",0,0,"503",,terminal_output +5812,4394916,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 17:28:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3315885 dev_accel interact tum_cte0 R12:54\t 1 hkn0401",,terminal_output +5813,4394926,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +5814,4396126,"TERMINAL",0,0,"25",,terminal_output +5815,4396989,"TERMINAL",0,0,"36",,terminal_output +5816,4397994,"TERMINAL",0,0,"47",,terminal_output +5817,4399035,"TERMINAL",0,0,"58",,terminal_output +5818,4400128,"TERMINAL",0,0,"69",,terminal_output +5819,4401155,"TERMINAL",0,0,"73:01",,terminal_output +5820,4402191,"TERMINAL",0,0,"92",,terminal_output +5821,4403243,"TERMINAL",0,0,"9:003",,terminal_output +5822,4404289,"TERMINAL",0,0,"14",,terminal_output +5823,4405353,"TERMINAL",0,0,"25",,terminal_output +5824,4406365,"TERMINAL",0,0,"36",,terminal_output +5825,4407402,"TERMINAL",0,0,"47",,terminal_output +5826,4408448,"TERMINAL",0,0,"58",,terminal_output +5827,4409518,"TERMINAL",0,0,"69",,terminal_output +5828,4410697,"TERMINAL",0,0,"710",,terminal_output +5829,4411621,"TERMINAL",0,0,"81",,terminal_output +5830,4412726,"TERMINAL",0,0,"92",,terminal_output +5831,4413749,"TERMINAL",0,0,"103",,terminal_output +5832,4414774,"TERMINAL",0,0,"14",,terminal_output +5833,4415802,"TERMINAL",0,0,"25",,terminal_output +5834,4416924,"TERMINAL",0,0,"36",,terminal_output +5835,4417980,"TERMINAL",0,0,"47",,terminal_output +5836,4418976,"TERMINAL",0,0,"58",,terminal_output +5837,4419996,"TERMINAL",0,0,"69",,terminal_output +5838,4421068,"TERMINAL",0,0,"720",,terminal_output +5839,4422139,"TERMINAL",0,0,"81",,terminal_output +5840,4423122,"TERMINAL",0,0,"93",,terminal_output +5841,4424174,"TERMINAL",0,0,"214",,terminal_output +5842,4425205,"TERMINAL",0,0,"25",,terminal_output +5843,4426350,"TERMINAL",0,0,"36",,terminal_output +5844,4427293,"TERMINAL",0,0,"47",,terminal_output +5845,4428395,"TERMINAL",0,0,"58",,terminal_output +5846,4429383,"TERMINAL",0,0,"69",,terminal_output +5847,4430427,"TERMINAL",0,0,"730",,terminal_output +5848,4431470,"TERMINAL",0,0,"81",,terminal_output +5849,4432592,"TERMINAL",0,0,"92",,terminal_output +5850,4433614,"TERMINAL",0,0,"303",,terminal_output +5851,4434741,"TERMINAL",0,0,"14",,terminal_output +5852,4435429,"TERMINAL",0,0,"srun",,terminal_focus +5853,4435678,"TERMINAL",0,0,"25",,terminal_output +5854,4436344,"TERMINAL",0,0,"p",,terminal_output +5855,4436427,"TERMINAL",0,0,"[?25lw[?25h[?25ld[?25h",,terminal_output +5856,4436612,"TERMINAL",0,0,"\r\n[?2004l\r/home/hk-project-p0023960/tum_cte0515/Projects/jafar\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +5857,4436719,"TERMINAL",0,0,"36",,terminal_output +5858,4437846,"TERMINAL",0,0,"47",,terminal_output +5859,4438837,"TERMINAL",0,0,"58",,terminal_output +5860,4439975,"TERMINAL",0,0,"69",,terminal_output +5861,4440989,"TERMINAL",0,0,"740",,terminal_output +5862,4442011,"TERMINAL",0,0,"81",,terminal_output +5863,4443013,"TERMINAL",0,0,"92",,terminal_output +5864,4444165,"TERMINAL",0,0,"403",,terminal_output +5865,4445185,"TERMINAL",0,0,"15",,terminal_output +5866,4446210,"TERMINAL",0,0,"36",,terminal_output +5867,4447198,"TERMINAL",0,0,"47",,terminal_output +5868,4448245,"TERMINAL",0,0,"58",,terminal_output +5869,4449316,"TERMINAL",0,0,"69",,terminal_output +5870,4450339,"TERMINAL",0,0,"750",,terminal_output +5871,4451391,"TERMINAL",0,0,"81",,terminal_output +5872,4452436,"TERMINAL",0,0,"92",,terminal_output +5873,4453481,"TERMINAL",0,0,"503",,terminal_output +5874,4454523,"TERMINAL",0,0,"14",,terminal_output +5875,4455570,"TERMINAL",0,0,"25",,terminal_output +5876,4456655,"TERMINAL",0,0,"36",,terminal_output +5877,4457678,"TERMINAL",0,0,"47",,terminal_output +5878,4458807,"TERMINAL",0,0,"58",,terminal_output +5879,4459832,"TERMINAL",0,0,"69",,terminal_output +5880,4460805,"TERMINAL",0,0,"74:00",,terminal_output +5881,4461880,"TERMINAL",0,0,"81",,terminal_output +5882,4462897,"TERMINAL",0,0,"92",,terminal_output +5883,4464029,"TERMINAL",0,0,"30:003",,terminal_output +5884,4465051,"TERMINAL",0,0,"14",,terminal_output +5885,4466188,"TERMINAL",0,0,"25",,terminal_output +5886,4467087,"TERMINAL",0,0,"36",,terminal_output +5887,4468438,"TERMINAL",0,0,"48",,terminal_output +5888,4469249,"TERMINAL",0,0,"69",,terminal_output +5889,4470274,"TERMINAL",0,0,"710",,terminal_output +5890,4471298,"TERMINAL",0,0,"81",,terminal_output +5891,4472310,"TERMINAL",0,0,"92",,terminal_output +5892,4472824,"genie.py",0,0,"",python,tab +5893,4472825,"genie.py",3454,0,"",python,selection_mouse +5894,4472882,"genie.py",3453,0,"",python,selection_command +5895,4473283,"genie.py",3480,0,"",python,selection_mouse +5896,4473406,"TERMINAL",0,0,"103",,terminal_output +5897,4474375,"genie.py",3716,0,"",python,selection_mouse +5898,4474453,"TERMINAL",0,0,"14",,terminal_output +5899,4475105,"genie.py",4065,0,"",python,selection_mouse +5900,4475112,"genie.py",4064,0,"",python,selection_command +5901,4475498,"TERMINAL",0,0,"25",,terminal_output +5902,4475863,"genie.py",3399,0,"",python,selection_mouse +5903,4475874,"genie.py",3398,0,"",python,selection_command +5904,4476367,"genie.py",3455,0,"",python,selection_mouse +5905,4476492,"TERMINAL",0,0,"36",,terminal_output +5906,4477545,"TERMINAL",0,0,"47",,terminal_output +5907,4478673,"TERMINAL",0,0,"58",,terminal_output +5908,4479696,"TERMINAL",0,0,"69",,terminal_output +5909,4480720,"TERMINAL",0,0,"720",,terminal_output +5910,4481744,"TERMINAL",0,0,"81",,terminal_output +5911,4482871,"TERMINAL",0,0,"92",,terminal_output +5912,4483035,"genie.py",3412,0,"",python,selection_mouse +5913,4483223,"genie.py",3411,1,"T",python,selection_mouse +5914,4483896,"TERMINAL",0,0,"203",,terminal_output +5915,4484187,"genie.py",3455,0,"",python,selection_mouse +5916,4484789,"genie.py",3454,0,"",python,selection_mouse +5917,4484790,"genie.py",3453,0,"",python,selection_command +5918,4484875,"TERMINAL",0,0,"14",,terminal_output +5919,4485750,"genie.py",3455,0,"",python,selection_command +5920,4485911,"TERMINAL",0,0,"25",,terminal_output +5921,4486966,"TERMINAL",0,0,"36",,terminal_output +5922,4487514,"genie.py",3454,0,"",python,selection_mouse +5923,4487525,"genie.py",3453,0,"",python,selection_command +5924,4488003,"TERMINAL",0,0,"47",,terminal_output +5925,4489117,"TERMINAL",0,0,"58",,terminal_output +5926,4490141,"TERMINAL",0,0,"69",,terminal_output +5927,4491380,"TERMINAL",0,0,"831",,terminal_output +5928,4492225,"TERMINAL",0,0,"92",,terminal_output +5929,4493251,"TERMINAL",0,0,"303",,terminal_output +5930,4494338,"TERMINAL",0,0,"14",,terminal_output +5931,4494706,"genie.py",3455,0,"",python,selection_command +5932,4495364,"TERMINAL",0,0,"25",,terminal_output +5933,4495503,"genie.py",3455,0,"\n",python,content +5934,4495787,"genie.py",3456,0," ",python,content +5935,4496124,"genie.py",3460,0," ",python,content +5936,4496413,"TERMINAL",0,0,"36",,terminal_output +5937,4496743,"genie.py",3464,0,"w",python,content +5938,4496744,"genie.py",3465,0,"",python,selection_keyboard +5939,4496845,"genie.py",3465,0,"h",python,content +5940,4496846,"genie.py",3466,0,"",python,selection_keyboard +5941,4497329,"genie.py",3466,0,"i",python,content +5942,4497330,"genie.py",3467,0,"",python,selection_keyboard +5943,4497446,"TERMINAL",0,0,"47",,terminal_output +5944,4497609,"genie.py",3467,0,"e",python,content +5945,4497610,"genie.py",3468,0,"",python,selection_keyboard +5946,4498128,"genie.py",3467,1,"",python,content +5947,4498238,"genie.py",3467,0,"l",python,content +5948,4498239,"genie.py",3468,0,"",python,selection_keyboard +5949,4498372,"genie.py",3468,0,"e",python,content +5950,4498373,"genie.py",3469,0,"",python,selection_keyboard +5951,4498500,"TERMINAL",0,0,"58",,terminal_output +5952,4499527,"TERMINAL",0,0,"69",,terminal_output +5953,4499675,"genie.py",3469,0,"()",python,content +5954,4499675,"genie.py",3470,0,"",python,selection_keyboard +5955,4500080,"genie.py",3470,0,"T",python,content +5956,4500081,"genie.py",3471,0,"",python,selection_keyboard +5957,4500580,"TERMINAL",0,0,"740",,terminal_output +5958,4500704,"genie.py",3471,0,"<",python,content +5959,4500705,"genie.py",3472,0,"",python,selection_keyboard +5960,4501614,"TERMINAL",0,0,"81",,terminal_output +5961,4501745,"genie.py",3472,0,"A",python,content +5962,4501745,"genie.py",3473,0,"",python,selection_keyboard +5963,4502407,"genie.py",3472,1,"",python,content +5964,4502723,"TERMINAL",0,0,"92",,terminal_output +5965,4503043,"genie.py",3472,0,"S",python,content +5966,4503044,"genie.py",3473,0,"",python,selection_keyboard +5967,4503205,"genie.py",3474,0,"",python,selection_command +5968,4503711,"TERMINAL",0,0,"403",,terminal_output +5969,4503834,"genie.py",3474,0,":",python,content +5970,4503835,"genie.py",3475,0,"",python,selection_keyboard +5971,4504213,"genie.py",3474,0,"",python,selection_command +5972,4504683,"genie.py",3494,0,"",python,selection_command +5973,4504770,"TERMINAL",0,0,"14",,terminal_output +5974,4505398,"genie.py",3476,47," # begin potential forloop (from T to S)",python,selection_command +5975,4505745,"genie.py",3476,100," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)",python,selection_command +5976,4505823,"TERMINAL",0,0,"25",,terminal_output +5977,4506296,"genie.py",3476,134," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)",python,selection_command +5978,4506391,"genie.py",3476,193," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)",python,selection_command +5979,4506392,"genie.py",3476,275," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +5980,4506526,"genie.py",3476,276," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n",python,selection_command +5981,4506526,"genie.py",3476,329," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)",python,selection_command +5982,4506526,"genie.py",3476,400," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])",python,selection_command +5983,4506745,"genie.py",3476,474," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)",python,selection_command +5984,4506745,"genie.py",3476,475," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n",python,selection_command +5985,4506745,"genie.py",3476,537," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded",python,selection_command +5986,4506746,"genie.py",3476,609," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len",python,selection_command +5987,4506746,"genie.py",3476,668," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_command +5988,4506746,"genie.py",3476,738," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)",python,selection_command +5989,4506832,"genie.py",3476,808," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)",python,selection_command +5990,4506833,"genie.py",3476,846," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_command +5991,4506833,"genie.py",3476,847," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n",python,selection_command +5992,4506833,"genie.py",3476,911," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""",python,selection_command +5993,4506833,"genie.py",3476,912," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n",python,selection_command +5994,4506834,"genie.py",3476,949," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---",python,selection_command +5995,4506986,"genie.py",3476,972," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (",python,selection_command +5996,4506987,"TERMINAL",0,0,"36",,terminal_output +5997,4507072,"genie.py",3476,998," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],",python,selection_command +5998,4507184,"genie.py",3476,1021," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,",python,selection_command +5999,4507340,"genie.py",3476,1045," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,",python,selection_command +6000,4507474,"genie.py",3476,1072," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,",python,selection_command +6001,4507646,"genie.py",3476,1082," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )",python,selection_command +6002,4507761,"genie.py",3476,1113," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(",python,selection_command +6003,4507879,"TERMINAL",0,0,"47",,terminal_output +6004,4508264,"genie.py",3476,1143," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,",python,selection_command +6005,4508385,"genie.py",3476,1184," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",",python,selection_command +6006,4508385,"genie.py",3476,1226," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},",python,selection_command +6007,4508498,"genie.py",3476,1249," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,",python,selection_command +6008,4508499,"genie.py",3476,1273," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,",python,selection_command +6009,4508711,"genie.py",3476,1299," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,",python,selection_command +6010,4508712,"genie.py",3476,1309," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )",python,selection_command +6011,4508712,"genie.py",3476,1310," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n",python,selection_command +6012,4508712,"genie.py",3476,1345," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---",python,selection_command +6013,4508820,"genie.py",3476,1376," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(",python,selection_command +6014,4508821,"genie.py",3476,1412," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,",python,selection_command +6015,4508821,"genie.py",3476,1450," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,",python,selection_command +6016,4508821,"genie.py",3476,1487," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,",python,selection_command +6017,4508822,"genie.py",3476,1528," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,",python,selection_command +6018,4508866,"genie.py",3476,1553," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,",python,selection_command +6019,4508953,"TERMINAL",0,0,"58",,terminal_output +6020,4509066,"genie.py",3476,1563," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )",python,selection_command +6021,4509156,"genie.py",3476,1627," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))",python,selection_command +6022,4509323,"genie.py",3476,1667," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]",python,selection_command +6023,4509514,"genie.py",3476,1717," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(",python,selection_command +6024,4509579,"genie.py",3476,1745," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(\n new_frame_idxs,",python,selection_command +6025,4509732,"genie.py",3476,1794," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(\n new_frame_idxs,\n video_hw=batch[""videos""].shape[2:4],",python,selection_command +6026,4509884,"genie.py",3476,1804," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(\n new_frame_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )",python,selection_command +6027,4509977,"TERMINAL",0,0,"69",,terminal_output +6028,4510149,"genie.py",3476,1794," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(\n new_frame_idxs,\n video_hw=batch[""videos""].shape[2:4],",python,selection_command +6029,4510285,"genie.py",3476,1745," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(\n new_frame_idxs,",python,selection_command +6030,4510459,"genie.py",3476,1717," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]\n new_frame_pixels = self.tokenizer.decode(",python,selection_command +6031,4510575,"genie.py",3476,1667," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]",python,selection_command +6032,4510715,"genie.py",3476,1627," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))",python,selection_command +6033,4510859,"genie.py",3476,1563," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )",python,selection_command +6034,4511010,"TERMINAL",0,0,"750",,terminal_output +6035,4511458,"genie.py",3476,1627," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))",python,selection_command +6036,4512158,"TERMINAL",0,0,"81",,terminal_output +6037,4513013,"genie.py",3476,1667," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n\n print(""token_idxs shape:"", token_idxs.shape)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n print(""action_tokens.shape:"", action_tokens.shape) # (B, S, A, D)\n\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[2]",python,selection_command +6038,4513105,"TERMINAL",0,0,"93",,terminal_output +6039,4514063,"genie.py",3484,0,"",python,selection_command +6040,4514151,"TERMINAL",0,0,"514",,terminal_output +6041,4515229,"TERMINAL",0,0,"25",,terminal_output +6042,4516253,"TERMINAL",0,0,"36",,terminal_output +6043,4516729,"genie.py",5112,0," ",python,content +6044,4516729,"genie.py",5048,0," ",python,content +6045,4516729,"genie.py",5038,0," ",python,content +6046,4516729,"genie.py",5017,0," ",python,content +6047,4516729,"genie.py",4976,0," ",python,content +6048,4516729,"genie.py",4939,0," ",python,content +6049,4516729,"genie.py",4901,0," ",python,content +6050,4516729,"genie.py",4865,0," ",python,content +6051,4516729,"genie.py",4830,0," ",python,content +6052,4516729,"genie.py",4795,0," ",python,content +6053,4516729,"genie.py",4784,0," ",python,content +6054,4516730,"genie.py",4762,0," ",python,content +6055,4516730,"genie.py",4738,0," ",python,content +6056,4516730,"genie.py",4715,0," ",python,content +6057,4516730,"genie.py",4673,0," ",python,content +6058,4516730,"genie.py",4632,0," ",python,content +6059,4516730,"genie.py",4602,0," ",python,content +6060,4516730,"genie.py",4567,0," ",python,content +6061,4516730,"genie.py",4557,0," ",python,content +6062,4516730,"genie.py",4534,0," ",python,content +6063,4516730,"genie.py",4510,0," ",python,content +6064,4516730,"genie.py",4487,0," ",python,content +6065,4516730,"genie.py",4461,0," ",python,content +6066,4516730,"genie.py",4434,0," ",python,content +6067,4516730,"genie.py",4397,0," ",python,content +6068,4516730,"genie.py",4332,0," ",python,content +6069,4516730,"genie.py",4293,0," ",python,content +6070,4516730,"genie.py",4223,0," ",python,content +6071,4516730,"genie.py",4153,0," ",python,content +6072,4516730,"genie.py",4094,0," ",python,content +6073,4516731,"genie.py",4022,0," ",python,content +6074,4516731,"genie.py",3960,0," ",python,content +6075,4516731,"genie.py",3885,0," ",python,content +6076,4516731,"genie.py",3814,0," ",python,content +6077,4516731,"genie.py",3761,0," ",python,content +6078,4516731,"genie.py",3678,0," ",python,content +6079,4516731,"genie.py",3619,0," ",python,content +6080,4516731,"genie.py",3585,0," ",python,content +6081,4516731,"genie.py",3532,0," ",python,content +6082,4516731,"genie.py",3484,0," ",python,content +6083,4517330,"genie.py",3487,0,"",python,selection_command +6084,4517357,"TERMINAL",0,0,"47",,terminal_output +6085,4517765,"genie.py",3539,0,"",python,selection_command +6086,4518334,"TERMINAL",0,0,"58",,terminal_output +6087,4519409,"TERMINAL",0,0,"69",,terminal_output +6088,4519610,"genie.py",5275,0,"",python,selection_mouse +6089,4519762,"genie.py",5272,14,"new_frame_idxs",python,selection_mouse +6090,4520449,"TERMINAL",0,0,"75:00",,terminal_output +6091,4520641,"genie.py",5273,0,"",python,selection_mouse +6092,4521332,"genie.py",5272,0,"",python,selection_mouse +6093,4521490,"TERMINAL",0,0,"81",,terminal_output +6094,4522536,"TERMINAL",0,0,"92",,terminal_output +6095,4523606,"TERMINAL",0,0,"1:003",,terminal_output +6096,4523884,"genie.py",5272,15,"",python,content +6097,4524606,"TERMINAL",0,0,"14",,terminal_output +6098,4524774,"genie.py",5272,0,"t",python,content +6099,4524775,"genie.py",5273,0,"",python,selection_keyboard +6100,4524970,"genie.py",5273,0,"o",python,content +6101,4524971,"genie.py",5274,0,"",python,selection_keyboard +6102,4525674,"TERMINAL",0,0,"25",,terminal_output +6103,4525930,"genie.py",5272,2,"token_idxs",python,content +6104,4526695,"TERMINAL",0,0,"36",,terminal_output +6105,4526909,"genie.py",5282,0," ",python,content +6106,4526910,"genie.py",5283,0,"",python,selection_keyboard +6107,4527311,"genie.py",5282,0,"",python,selection_command +6108,4527741,"TERMINAL",0,0,"47",,terminal_output +6109,4528119,"genie.py",5299,0,"",python,selection_mouse +6110,4528122,"genie.py",5298,0,"",python,selection_command +6111,4528779,"TERMINAL",0,0,"58",,terminal_output +6112,4529029,"genie.py",5299,0,"\n ",python,content +6113,4529830,"TERMINAL",0,0,"69",,terminal_output +6114,4530896,"TERMINAL",0,0,"710",,terminal_output +6115,4531061,"genie.py",5312,0,"T",python,content +6116,4531061,"genie.py",5313,0,"",python,selection_keyboard +6117,4531646,"genie.py",5313,0," ",python,content +6118,4531647,"genie.py",5314,0,"",python,selection_keyboard +6119,4531785,"genie.py",5314,0,"+",python,content +6120,4531786,"genie.py",5315,0,"",python,selection_keyboard +6121,4531905,"TERMINAL",0,0,"81",,terminal_output +6122,4532137,"genie.py",5315,0,"=",python,content +6123,4532138,"genie.py",5316,0,"",python,selection_keyboard +6124,4532455,"genie.py",5316,0,"1",python,content +6125,4532455,"genie.py",5317,0,"",python,selection_keyboard +6126,4532966,"TERMINAL",0,0,"92",,terminal_output +6127,4533470,"genie.py",5316,0,"",python,selection_command +6128,4533914,"genie.py",5388,0,"",python,selection_mouse +6129,4534012,"TERMINAL",0,0,"103",,terminal_output +6130,4534877,"genie.py",5380,0,"",python,selection_mouse +6131,4535024,"TERMINAL",0,0,"14",,terminal_output +6132,4535408,"genie.py",5380,14,"",python,content +6133,4536134,"genie.py",5380,0,"t",python,content +6134,4536135,"genie.py",5381,0,"",python,selection_keyboard +6135,4536135,"TERMINAL",0,0,"25",,terminal_output +6136,4536175,"genie.py",5381,0,"o",python,content +6137,4536176,"genie.py",5382,0,"",python,selection_keyboard +6138,4536992,"genie.py",5380,2,"token_idxs",python,content +6139,4537113,"TERMINAL",0,0,"37",,terminal_output +6140,4537455,"genie.py",5389,0,"",python,selection_command +6141,4537607,"genie.py",5339,0,"",python,selection_command +6142,4538029,"genie.py",5316,0,"",python,selection_command +6143,4538162,"TERMINAL",0,0,"58",,terminal_output +6144,4538251,"genie.py",5317,0,"\n ",python,content +6145,4538546,"genie.py",5318,12,"",python,content +6146,4539220,"TERMINAL",0,0,"69",,terminal_output +6147,4540359,"TERMINAL",0,0,"720",,terminal_output +6148,4541351,"TERMINAL",0,0,"81",,terminal_output +6149,4542334,"TERMINAL",0,0,"92",,terminal_output +6150,4543388,"TERMINAL",0,0,"203",,terminal_output +6151,4544516,"TERMINAL",0,0,"14",,terminal_output +6152,4544860,"TERMINAL",0,0,"pwd",,terminal_output +6153,4545390,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +6154,4545477,"TERMINAL",0,0,"25",,terminal_output +6155,4546510,"TERMINAL",0,0,"36",,terminal_output +6156,4547547,"TERMINAL",0,0,"47",,terminal_output +6157,4548579,"TERMINAL",0,0,"58",,terminal_output +6158,4549624,"TERMINAL",0,0,"69",,terminal_output +6159,4550668,"TERMINAL",0,0,"730",,terminal_output +6160,4551787,"TERMINAL",0,0,"81",,terminal_output +6161,4553323,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6162,4553462,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +6163,4554245,"TERMINAL",0,0,"94",,terminal_output +6164,4555268,"TERMINAL",0,0,"325",,terminal_output +6165,4556251,"TERMINAL",0,0,"36",,terminal_output +6166,4556279,"TERMINAL",0,0,"2025-07-03 17:31:33.164776: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6167,4557294,"TERMINAL",0,0,"47",,terminal_output +6168,4558348,"TERMINAL",0,0,"58",,terminal_output +6169,4559483,"TERMINAL",0,0,"69",,terminal_output +6170,4560498,"TERMINAL",0,0,"2025-07-03 17:31:37.283818: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6171,4560523,"TERMINAL",0,0,"740",,terminal_output +6172,4561515,"TERMINAL",0,0,"81",,terminal_output +6173,4562542,"TERMINAL",0,0,"92",,terminal_output +6174,4563576,"TERMINAL",0,0,"403",,terminal_output +6175,4564625,"TERMINAL",0,0,"14",,terminal_output +6176,4565714,"TERMINAL",0,0,"25",,terminal_output +6177,4566738,"TERMINAL",0,0,"36",,terminal_output +6178,4567864,"TERMINAL",0,0,"47",,terminal_output +6179,4568809,"TERMINAL",0,0,"58",,terminal_output +6180,4569502,"TERMINAL",0,0,"2025-07-03 17:31:46.391272: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6181,4569910,"TERMINAL",0,0,"69",,terminal_output +6182,4571003,"TERMINAL",0,0,"750",,terminal_output +6183,4572064,"TERMINAL",0,0,"81",,terminal_output +6184,4573018,"TERMINAL",0,0,"92",,terminal_output +6185,4574111,"TERMINAL",0,0,"503",,terminal_output +6186,4575194,"TERMINAL",0,0,"15",,terminal_output +6187,4576271,"TERMINAL",0,0,"36",,terminal_output +6188,4577307,"TERMINAL",0,0,"47",,terminal_output +6189,4577308,"TERMINAL",0,0,"2025-07-03 17:31:54.119231: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6190,4578297,"TERMINAL",0,0,"58",,terminal_output +6191,4579391,"TERMINAL",0,0,"69",,terminal_output +6192,4580374,"TERMINAL",0,0,"76:00",,terminal_output +6193,4581422,"TERMINAL",0,0,"81",,terminal_output +6194,4582717,"TERMINAL",0,0,"92",,terminal_output +6195,4583562,"TERMINAL",0,0,"2:003",,terminal_output +6196,4584225,"TERMINAL",0,0,"2025-07-03 17:32:00.916424: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6197,4584724,"TERMINAL",0,0,"14",,terminal_output +6198,4585556,"TERMINAL",0,0,"25",,terminal_output +6199,4586606,"TERMINAL",0,0,"36",,terminal_output +6200,4587657,"TERMINAL",0,0,"47",,terminal_output +6201,4588707,"TERMINAL",0,0,"58",,terminal_output +6202,4589776,"TERMINAL",0,0,"69",,terminal_output +6203,4590396,"TERMINAL",0,0,"2025-07-03 17:32:07.237283: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6204,4590909,"TERMINAL",0,0,"710",,terminal_output +6205,4591928,"TERMINAL",0,0,"81",,terminal_output +6206,4592980,"TERMINAL",0,0,"92",,terminal_output +6207,4593772,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6208,4593941,"TERMINAL",0,0,"103",,terminal_output +6209,4594998,"TERMINAL",0,0,"14",,terminal_output +6210,4596140,"TERMINAL",0,0,"25",,terminal_output +6211,4597149,"TERMINAL",0,0,"36",,terminal_output +6212,4598130,"TERMINAL",0,0,"48",,terminal_output +6213,4599187,"TERMINAL",0,0,"69",,terminal_output +6214,4600222,"TERMINAL",0,0,"720",,terminal_output +6215,4601350,"TERMINAL",0,0,"81",,terminal_output +6216,4602066,"TERMINAL",0,0,"2025-07-03 17:32:18.914851: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6217,4602372,"TERMINAL",0,0,"92",,terminal_output +6218,4603398,"TERMINAL",0,0,"203",,terminal_output +6219,4604420,"TERMINAL",0,0,"14",,terminal_output +6220,4605139,"TERMINAL",0,0,"2025-07-03 17:32:21.942940: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6221,4605446,"TERMINAL",0,0,"25",,terminal_output +6222,4606475,"TERMINAL",0,0,"36",,terminal_output +6223,4607587,"TERMINAL",0,0,"47",,terminal_output +6224,4608566,"TERMINAL",0,0,"58",,terminal_output +6225,4609616,"TERMINAL",0,0,"69",,terminal_output +6226,4610770,"TERMINAL",0,0,"730",,terminal_output +6227,4611794,"TERMINAL",0,0,"81",,terminal_output +6228,4612819,"TERMINAL",0,0,"92",,terminal_output +6229,4613034,"TERMINAL",0,0,"2025-07-03 17:32:29.935485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6230,4613841,"TERMINAL",0,0,"303",,terminal_output +6231,4614726,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +6232,4614832,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +6233,4614842,"TERMINAL",0,0,"14",,terminal_output +6234,4614980,"TERMINAL",0,0,"action_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +6235,4615146,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6236,4615889,"TERMINAL",0,0,"25",,terminal_output +6237,4616182,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6238,4616577,"TERMINAL",0,0,"2025-07-03 17:32:33.475956: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6239,4616913,"TERMINAL",0,0,"36",,terminal_output +6240,4617980,"TERMINAL",0,0,"47",,terminal_output +6241,4619065,"TERMINAL",0,0,"58",,terminal_output +6242,4620089,"TERMINAL",0,0,"69",,terminal_output +6243,4621112,"TERMINAL",0,0,"740",,terminal_output +6244,4622238,"TERMINAL",0,0,"82",,terminal_output +6245,4622252,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\n",,terminal_output +6246,4622357,"TERMINAL",0,0,"token_idxs shape: (1, 10, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\nmask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6247,4622603,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 6, 920, 1), (1, 1, 1, 128), (1, 10, 920, 128).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 196, in broadcast_shapes\r\n return _broadcast_shapes_cached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 294, in wrapper\r\n return cached(config.trace_context() if trace_context_in_key else _ignore(),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 288, in cached\r\n return f(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 202, in _broadcast_shapes_cached\r\n return _broadcast_shapes_uncached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 221, in _broadcast_shapes_uncached\r\n raise ValueError(f""Incompatible shapes for broadcasting: shapes={list(shapes)}"") from err\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 6, 920, 1), (1, 1, 1, 128), (1, 10, 920, 128)]\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 6, 920, 1), (1, 1, 1, 128), (1, 10, 920, 128).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 248, in __call__\r\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 2850, in where\r\n return util._where(condition, x, y)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 311, in _where\r\n condition, x_arr, y_arr = _broadcast_arrays(condition, x, y)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 264, in _broadcast_arrays\r\n result_shape = lax.broadcast_shapes(*shapes)\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 6, 920, 1), (1, 1, 1, 128), (1, 10, 920, 128)]\r\n",,terminal_output +6248,4623176,"TERMINAL",0,0,"403",,terminal_output +6249,4623992,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +6250,4624219,"TERMINAL",0,0,"14",,terminal_output +6251,4625311,"TERMINAL",0,0,"25",,terminal_output +6252,4626335,"TERMINAL",0,0,"36",,terminal_output +6253,4627356,"TERMINAL",0,0,"47",,terminal_output +6254,4628406,"TERMINAL",0,0,"58",,terminal_output +6255,4629509,"TERMINAL",0,0,"69",,terminal_output +6256,4630534,"TERMINAL",0,0,"750",,terminal_output +6257,4631558,"TERMINAL",0,0,"81",,terminal_output +6258,4632565,"TERMINAL",0,0,"92",,terminal_output +6259,4633608,"TERMINAL",0,0,"503",,terminal_output +6260,4634733,"TERMINAL",0,0,"14",,terminal_output +6261,4635691,"TERMINAL",0,0,"25",,terminal_output +6262,4636780,"TERMINAL",0,0,"36",,terminal_output +6263,4637777,"TERMINAL",0,0,"47",,terminal_output +6264,4638930,"TERMINAL",0,0,"58",,terminal_output +6265,4640012,"TERMINAL",0,0,"69",,terminal_output +6266,4640978,"TERMINAL",0,0,"77:00",,terminal_output +6267,4641864,"genie.py",0,0,"",python,tab +6268,4641865,"genie.py",3429,0,"",python,selection_mouse +6269,4641866,"genie.py",3429,5,"shape",python,selection_mouse +6270,4642018,"TERMINAL",0,0,"81",,terminal_output +6271,4642387,"genie.py",3454,0,"",python,selection_mouse +6272,4642444,"genie.py",3453,0,"",python,selection_command +6273,4642547,"genie.py",3447,7,"seq_len",python,selection_mouse +6274,4642549,"genie.py",3448,6,"eq_len",python,selection_command +6275,4642999,"TERMINAL",0,0,"92",,terminal_output +6276,4643390,"genie.py",3618,0,"",python,selection_mouse +6277,4644031,"genie.py",3614,0,"",python,selection_mouse +6278,4644084,"TERMINAL",0,0,"3:003",,terminal_output +6279,4644572,"genie.py",3620,0,"",python,selection_mouse +6280,4645175,"TERMINAL",0,0,"14",,terminal_output +6281,4646138,"TERMINAL",0,0,"26",,terminal_output +6282,4647267,"TERMINAL",0,0,"47",,terminal_output +6283,4648290,"TERMINAL",0,0,"58",,terminal_output +6284,4649189,"genie.py",3658,0,"",python,selection_command +6285,4649356,"genie.py",3721,0,"",python,selection_command +6286,4649370,"TERMINAL",0,0,"69",,terminal_output +6287,4650412,"TERMINAL",0,0,"710",,terminal_output +6288,4651421,"TERMINAL",0,0,"81",,terminal_output +6289,4651746,"genie.py",3686,85," token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6290,4651981,"genie.py",3623,148," pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6291,4652104,"genie.py",3585,186," pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6292,4652248,"genie.py",3528,243," print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6293,4652432,"genie.py",3476,295," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6294,4652483,"TERMINAL",0,0,"92",,terminal_output +6295,4653575,"TERMINAL",0,0,"103",,terminal_output +6296,4654388,"genie.py",3476,296,"",python,content +6297,4654535,"genie.py",3456,0,"",python,selection_command +6298,4654560,"TERMINAL",0,0,"14",,terminal_output +6299,4654747,"genie.py",3455,0,"",python,selection_command +6300,4654866,"genie.py",3435,0,"",python,selection_command +6301,4655592,"TERMINAL",0,0,"25",,terminal_output +6302,4655766,"genie.py",3454,0,"\n # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,content +6303,4655862,"genie.py",3467,0,"",python,selection_command +6304,4656683,"TERMINAL",0,0,"36",,terminal_output +6305,4657152,"genie.py",3455,51," # begin potential forloop (from T to S)",python,selection_command +6306,4657410,"genie.py",3455,108," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)",python,selection_command +6307,4657609,"genie.py",3455,146," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)",python,selection_command +6308,4657703,"genie.py",3455,209," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)",python,selection_command +6309,4657704,"TERMINAL",0,0,"47",,terminal_output +6310,4658050,"genie.py",3455,295," # begin potential forloop (from T to S)\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)",python,selection_command +6311,4658278,"genie.py",3467,0,"",python,selection_command +6312,4658517,"genie.py",3673,4,"",python,content +6313,4658518,"genie.py",3610,4,"",python,content +6314,4658518,"genie.py",3572,4,"",python,content +6315,4658518,"genie.py",3515,4,"",python,content +6316,4658518,"genie.py",3463,4,"",python,content +6317,4658743,"TERMINAL",0,0,"58",,terminal_output +6318,4658912,"genie.py",3462,0,"",python,selection_command +6319,4659585,"genie.py",3455,48,"",python,content +6320,4659610,"genie.py",3463,0,"",python,selection_command +6321,4659669,"genie.py",3516,0,"",python,selection_command +6322,4659795,"TERMINAL",0,0,"69",,terminal_output +6323,4659824,"genie.py",3550,0,"",python,selection_command +6324,4659959,"genie.py",3609,0,"",python,selection_command +6325,4660106,"genie.py",3683,0,"",python,selection_command +6326,4660233,"genie.py",3692,0,"",python,selection_command +6327,4660381,"genie.py",3704,0,"",python,selection_command +6328,4660802,"genie.py",3704,0,"\n # begin potential forloop (from T to S)",python,content +6329,4660835,"genie.py",3713,0,"",python,selection_command +6330,4660936,"TERMINAL",0,0,"720",,terminal_output +6331,4661883,"genie.py",3713,0," ",python,content +6332,4661891,"TERMINAL",0,0,"81",,terminal_output +6333,4662593,"genie.py",3716,0,"",python,selection_command +6334,4662922,"TERMINAL",0,0,"92",,terminal_output +6335,4663575,"genie.py",3852,0,"",python,selection_mouse +6336,4664017,"TERMINAL",0,0,"203",,terminal_output +6337,4664301,"genie.py",3814,75,"",python,content +6338,4664395,"genie.py",3826,0,"",python,selection_command +6339,4664437,"genie.py",3769,0,"",python,selection_command +6340,4664561,"genie.py",3717,0,"",python,selection_command +6341,4664691,"genie.py",3704,0,"",python,selection_command +6342,4664832,"genie.py",3696,0,"",python,selection_command +6343,4664991,"genie.py",3683,0,"",python,selection_command +6344,4665063,"TERMINAL",0,0,"14",,terminal_output +6345,4665311,"genie.py",3613,0,"",python,selection_command +6346,4666047,"TERMINAL",0,0,"25",,terminal_output +6347,4666717,"genie.py",3682,0,"\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])",python,content +6348,4666790,"genie.py",3695,0,"",python,selection_command +6349,4667195,"TERMINAL",0,0,"36",,terminal_output +6350,4667434,"genie.py",3691,4,"",python,content +6351,4667582,"genie.py",3690,0,"",python,selection_command +6352,4668179,"TERMINAL",0,0,"48",,terminal_output +6353,4668627,"genie.py",3912,0,"",python,selection_mouse +6354,4669194,"genie.py",3775,0,"",python,selection_mouse +6355,4669269,"TERMINAL",0,0,"69",,terminal_output +6356,4669828,"genie.py",3775,1,"",python,content +6357,4669858,"genie.py",3787,0,"",python,selection_command +6358,4670224,"TERMINAL",0,0,"730",,terminal_output +6359,4670902,"genie.py",3839,0,"",python,selection_command +6360,4671010,"genie.py",3896,0,"",python,selection_command +6361,4671298,"genie.py",3839,0,"",python,selection_command +6362,4671306,"TERMINAL",0,0,"81",,terminal_output +6363,4671466,"genie.py",3787,0,"",python,selection_command +6364,4672322,"TERMINAL",0,0,"92",,terminal_output +6365,4672528,"genie.py",3839,0,"",python,selection_command +6366,4672688,"genie.py",3896,0,"",python,selection_command +6367,4672860,"genie.py",3839,0,"",python,selection_command +6368,4673033,"genie.py",3787,0,"",python,selection_command +6369,4673375,"TERMINAL",0,0,"303",,terminal_output +6370,4673543,"genie.py",3775,52,"",python,content +6371,4673582,"genie.py",3787,0,"",python,selection_command +6372,4673643,"genie.py",3767,0,"",python,selection_command +6373,4673743,"genie.py",3754,0,"",python,selection_command +6374,4674895,"TERMINAL",0,0,"14",,terminal_output +6375,4675536,"genie.py",3754,0,"\n # begin potential forloop (from T to S)",python,content +6376,4675625,"genie.py",3767,0,"",python,selection_command +6377,4675935,"TERMINAL",0,0,"25",,terminal_output +6378,4676512,"genie.py",3763,4,"",python,content +6379,4676826,"genie.py",3762,0,"",python,selection_command +6380,4676999,"TERMINAL",0,0,"36",,terminal_output +6381,4677726,"genie.py",3554,0,"",python,selection_mouse +6382,4678051,"TERMINAL",0,0,"47",,terminal_output +6383,4678313,"genie.py",3613,0,"",python,selection_mouse +6384,4678458,"genie.py",3609,10,"token_idxs",python,selection_mouse +6385,4679172,"TERMINAL",0,0,"58",,terminal_output +6386,4679398,"genie.py",3651,0,"",python,selection_mouse +6387,4680150,"TERMINAL",0,0,"640",,terminal_output +6388,4680447,"genie.py",3571,0,"",python,selection_mouse +6389,4681180,"TERMINAL",0,0,"81",,terminal_output +6390,4682217,"TERMINAL",0,0,"92",,terminal_output +6391,4683263,"TERMINAL",0,0,"403",,terminal_output +6392,4683405,"genie.py",4448,0,"",python,selection_mouse +6393,4684395,"TERMINAL",0,0,"14",,terminal_output +6394,4684672,"genie.py",4269,0,"",python,selection_mouse +6395,4685169,"genie.py",4195,0,"",python,selection_mouse +6396,4685377,"TERMINAL",0,0,"25",,terminal_output +6397,4686547,"TERMINAL",0,0,"36",,terminal_output +6398,4687486,"TERMINAL",0,0,"47",,terminal_output +6399,4688517,"TERMINAL",0,0,"58",,terminal_output +6400,4688666,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +6401,4689290,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6402,4689404,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +6403,4689609,"TERMINAL",0,0,"69",,terminal_output +6404,4690621,"TERMINAL",0,0,"750",,terminal_output +6405,4691675,"TERMINAL",0,0,"81",,terminal_output +6406,4692282,"TERMINAL",0,0,"2025-07-03 17:33:49.168357: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6407,4692801,"TERMINAL",0,0,"92",,terminal_output +6408,4693818,"TERMINAL",0,0,"503",,terminal_output +6409,4694885,"TERMINAL",0,0,"14",,terminal_output +6410,4695866,"TERMINAL",0,0,"25",,terminal_output +6411,4696493,"TERMINAL",0,0,"2025-07-03 17:33:53.371662: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6412,4696874,"sample.py",0,0,"",python,tab +6413,4697072,"TERMINAL",0,0,"36",,terminal_output +6414,4697951,"TERMINAL",0,0,"47",,terminal_output +6415,4699040,"TERMINAL",0,0,"58",,terminal_output +6416,4700044,"TERMINAL",0,0,"69",,terminal_output +6417,4700970,"genie.py",0,0,"",python,tab +6418,4701153,"TERMINAL",0,0,"78:00",,terminal_output +6419,4702214,"TERMINAL",0,0,"82",,terminal_output +6420,4703273,"TERMINAL",0,0,"4:003",,terminal_output +6421,4704264,"TERMINAL",0,0,"14",,terminal_output +6422,4705285,"TERMINAL",0,0,"25",,terminal_output +6423,4705713,"TERMINAL",0,0,"2025-07-03 17:34:02.614399: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6424,4706311,"TERMINAL",0,0,"36",,terminal_output +6425,4707376,"TERMINAL",0,0,"47",,terminal_output +6426,4708377,"TERMINAL",0,0,"58",,terminal_output +6427,4709428,"TERMINAL",0,0,"69",,terminal_output +6428,4710519,"TERMINAL",0,0,"710",,terminal_output +6429,4711534,"TERMINAL",0,0,"81",,terminal_output +6430,4712568,"TERMINAL",0,0,"92",,terminal_output +6431,4713610,"TERMINAL",0,0,"103",,terminal_output +6432,4713721,"TERMINAL",0,0,"2025-07-03 17:34:10.621893: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6433,4714671,"TERMINAL",0,0,"14",,terminal_output +6434,4715731,"TERMINAL",0,0,"25",,terminal_output +6435,4716756,"TERMINAL",0,0,"36",,terminal_output +6436,4717881,"TERMINAL",0,0,"47",,terminal_output +6437,4718906,"TERMINAL",0,0,"58",,terminal_output +6438,4719929,"TERMINAL",0,0,"69",,terminal_output +6439,4720343,"TERMINAL",0,0,"2025-07-03 17:34:17.227869: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6440,4720954,"TERMINAL",0,0,"720",,terminal_output +6441,4722080,"TERMINAL",0,0,"81",,terminal_output +6442,4723027,"TERMINAL",0,0,"92",,terminal_output +6443,4724074,"TERMINAL",0,0,"203",,terminal_output +6444,4725153,"TERMINAL",0,0,"15",,terminal_output +6445,4726171,"TERMINAL",0,0,"36",,terminal_output +6446,4726895,"TERMINAL",0,0,"2025-07-03 17:34:23.775497: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6447,4727302,"TERMINAL",0,0,"47",,terminal_output +6448,4728270,"TERMINAL",0,0,"58",,terminal_output +6449,4729350,"TERMINAL",0,0,"69",,terminal_output +6450,4730273,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6451,4730383,"TERMINAL",0,0,"730",,terminal_output +6452,4731416,"TERMINAL",0,0,"81",,terminal_output +6453,4732468,"TERMINAL",0,0,"92",,terminal_output +6454,4733550,"TERMINAL",0,0,"303",,terminal_output +6455,4734570,"TERMINAL",0,0,"14",,terminal_output +6456,4735610,"TERMINAL",0,0,"25",,terminal_output +6457,4736659,"TERMINAL",0,0,"36",,terminal_output +6458,4737746,"TERMINAL",0,0,"47",,terminal_output +6459,4738570,"TERMINAL",0,0,"2025-07-03 17:34:35.382142: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6460,4738759,"TERMINAL",0,0,"58",,terminal_output +6461,4739904,"TERMINAL",0,0,"69",,terminal_output +6462,4740922,"TERMINAL",0,0,"740",,terminal_output +6463,4741434,"TERMINAL",0,0,"2025-07-03 17:34:38.298302: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6464,4741950,"TERMINAL",0,0,"81",,terminal_output +6465,4742979,"TERMINAL",0,0,"92",,terminal_output +6466,4744095,"TERMINAL",0,0,"403",,terminal_output +6467,4745120,"TERMINAL",0,0,"14",,terminal_output +6468,4746097,"TERMINAL",0,0,"25",,terminal_output +6469,4747168,"TERMINAL",0,0,"37",,terminal_output +6470,4748233,"TERMINAL",0,0,"58",,terminal_output +6471,4749319,"TERMINAL",0,0,"69",,terminal_output +6472,4749427,"TERMINAL",0,0,"2025-07-03 17:34:46.311834: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6473,4750342,"TERMINAL",0,0,"750",,terminal_output +6474,4751068,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +6475,4751367,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\n",,terminal_output +6476,4751389,"TERMINAL",0,0,"81",,terminal_output +6477,4751452,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6478,4752383,"TERMINAL",0,0,"92",,terminal_output +6479,4752523,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6480,4752903,"TERMINAL",0,0,"2025-07-03 17:34:49.778876: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6481,4753522,"TERMINAL",0,0,"503",,terminal_output +6482,4754480,"TERMINAL",0,0,"14",,terminal_output +6483,4755571,"TERMINAL",0,0,"25",,terminal_output +6484,4756603,"TERMINAL",0,0,"36",,terminal_output +6485,4757620,"TERMINAL",0,0,"47",,terminal_output +6486,4758742,"TERMINAL",0,0,"58",,terminal_output +6487,4758795,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\nmask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6488,4758940,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6489,4759709,"TERMINAL",0,0,"69",,terminal_output +6490,4760788,"TERMINAL",0,0,"79:00",,terminal_output +6491,4761812,"TERMINAL",0,0,"81",,terminal_output +6492,4761898,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\nmask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6493,4762049,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6494,4762986,"TERMINAL",0,0,"92",,terminal_output +6495,4763961,"TERMINAL",0,0,"5:003",,terminal_output +6496,4764987,"TERMINAL",0,0,"14",,terminal_output +6497,4764998,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\nmask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6498,4765155,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6499,4766039,"TERMINAL",0,0,"25",,terminal_output +6500,4767049,"TERMINAL",0,0,"36",,terminal_output +6501,4768120,"TERMINAL",0,0,"47",,terminal_output +6502,4768120,"TERMINAL",0,0,"token_idxs shape: (1, 6, 920)\r\naction_tokens.shape: (1, 5, 1, 32)\r\nmask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6503,4768271,"TERMINAL",0,0,"mask_expanded.shape: (1, 6, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +6504,4769184,"TERMINAL",0,0,"59",,terminal_output +6505,4770208,"TERMINAL",0,0,"710",,terminal_output +6506,4771334,"TERMINAL",0,0,"81",,terminal_output +6507,4772358,"TERMINAL",0,0,"92",,terminal_output +6508,4773385,"TERMINAL",0,0,"103",,terminal_output +6509,4774382,"TERMINAL",0,0,"14",,terminal_output +6510,4775533,"TERMINAL",0,0,"25",,terminal_output +6511,4776576,"TERMINAL",0,0,"36",,terminal_output +6512,4777526,"TERMINAL",0,0,"47",,terminal_output +6513,4777588,"TERMINAL",0,0,"SSIM: 0.43240007758140564\r\n",,terminal_output +6514,4778606,"TERMINAL",0,0,"58",,terminal_output +6515,4779772,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +6516,4779773,"TERMINAL",0,0,"69",,terminal_output +6517,4780670,"TERMINAL",0,0,"720",,terminal_output +6518,4781756,"TERMINAL",0,0,"81",,terminal_output +6519,4783084,"TERMINAL",0,0,"92",,terminal_output +6520,4783964,"TERMINAL",0,0,"203",,terminal_output +6521,4784979,"TERMINAL",0,0,"14",,terminal_output +6522,4785977,"TERMINAL",0,0,"25",,terminal_output +6523,4786948,"TERMINAL",0,0,"36",,terminal_output +6524,4787993,"TERMINAL",0,0,"47",,terminal_output +6525,4789055,"TERMINAL",0,0,"58",,terminal_output +6526,4790177,"TERMINAL",0,0,"69",,terminal_output +6527,4791201,"TERMINAL",0,0,"731",,terminal_output +6528,4792225,"TERMINAL",0,0,"92",,terminal_output +6529,4793208,"TERMINAL",0,0,"303",,terminal_output +6530,4794272,"TERMINAL",0,0,"14",,terminal_output +6531,4795717,"TERMINAL",0,0,"25",,terminal_output +6532,4796734,"TERMINAL",0,0,"36",,terminal_output +6533,4797980,"TERMINAL",0,0,"47",,terminal_output +6534,4798983,"TERMINAL",0,0,"58",,terminal_output +6535,4800007,"TERMINAL",0,0,"69",,terminal_output +6536,4800998,"TERMINAL",0,0,"740",,terminal_output +6537,4802055,"TERMINAL",0,0,"81",,terminal_output +6538,4803090,"TERMINAL",0,0,"92",,terminal_output +6539,4804205,"TERMINAL",0,0,"404",,terminal_output +6540,4805230,"TERMINAL",0,0,"25",,terminal_output +6541,4806254,"TERMINAL",0,0,"36",,terminal_output +6542,4807278,"TERMINAL",0,0,"47",,terminal_output +6543,4808307,"TERMINAL",0,0,"58",,terminal_output +6544,4809429,"TERMINAL",0,0,"69",,terminal_output +6545,4810452,"TERMINAL",0,0,"750",,terminal_output +6546,4811476,"TERMINAL",0,0,"81",,terminal_output +6547,4812500,"TERMINAL",0,0,"92",,terminal_output +6548,4813628,"TERMINAL",0,0,"503",,terminal_output +6549,4814565,"TERMINAL",0,0,"14",,terminal_output +6550,4815611,"TERMINAL",0,0,"25",,terminal_output +6551,4816662,"TERMINAL",0,0,"36",,terminal_output +6552,4817710,"TERMINAL",0,0,"47",,terminal_output +6553,4818748,"TERMINAL",0,0,"58",,terminal_output +6554,4819814,"TERMINAL",0,0,"69",,terminal_output +6555,4820898,"TERMINAL",0,0,"720:00",,terminal_output +6556,4821878,"TERMINAL",0,0,"81",,terminal_output +6557,4822979,"TERMINAL",0,0,"92",,terminal_output +6558,4823969,"TERMINAL",0,0,"6:003",,terminal_output +6559,4825102,"TERMINAL",0,0,"14",,terminal_output +6560,4826047,"TERMINAL",0,0,"25",,terminal_output +6561,4827144,"TERMINAL",0,0,"36",,terminal_output +6562,4828168,"TERMINAL",0,0,"48",,terminal_output +6563,4829294,"TERMINAL",0,0,"69",,terminal_output +6564,4830321,"TERMINAL",0,0,"710",,terminal_output +6565,4831342,"TERMINAL",0,0,"81",,terminal_output +6566,4832327,"TERMINAL",0,0,"92",,terminal_output +6567,4833390,"TERMINAL",0,0,"103",,terminal_output +6568,4834414,"TERMINAL",0,0,"14",,terminal_output +6569,4835439,"TERMINAL",0,0,"25",,terminal_output +6570,4836578,"TERMINAL",0,0,"36",,terminal_output +6571,4837589,"TERMINAL",0,0,"47",,terminal_output +6572,4838562,"TERMINAL",0,0,"58",,terminal_output +6573,4839638,"TERMINAL",0,0,"69",,terminal_output +6574,4840646,"TERMINAL",0,0,"720",,terminal_output +6575,4841684,"TERMINAL",0,0,"81",,terminal_output +6576,4842811,"TERMINAL",0,0,"92",,terminal_output +6577,4843835,"TERMINAL",0,0,"203",,terminal_output +6578,4844859,"TERMINAL",0,0,"14",,terminal_output +6579,4845883,"TERMINAL",0,0,"25",,terminal_output +6580,4846902,"TERMINAL",0,0,"36",,terminal_output +6581,4847994,"TERMINAL",0,0,"47",,terminal_output +6582,4849057,"TERMINAL",0,0,"58",,terminal_output +6583,4850082,"TERMINAL",0,0,"69",,terminal_output +6584,4851094,"TERMINAL",0,0,"730",,terminal_output +6585,4852123,"TERMINAL",0,0,"82",,terminal_output +6586,4853200,"TERMINAL",0,0,"303",,terminal_output +6587,4854281,"TERMINAL",0,0,"14",,terminal_output +6588,4855304,"TERMINAL",0,0,"25",,terminal_output +6589,4856310,"TERMINAL",0,0,"36",,terminal_output +6590,4857395,"TERMINAL",0,0,"47",,terminal_output +6591,4858480,"TERMINAL",0,0,"58",,terminal_output +6592,4859502,"TERMINAL",0,0,"69",,terminal_output +6593,4860539,"TERMINAL",0,0,"740",,terminal_output +6594,4861654,"TERMINAL",0,0,"81",,terminal_output +6595,4862593,"TERMINAL",0,0,"92",,terminal_output +6596,4863638,"TERMINAL",0,0,"403",,terminal_output +6597,4864684,"TERMINAL",0,0,"14",,terminal_output +6598,4865736,"TERMINAL",0,0,"25",,terminal_output +6599,4866829,"TERMINAL",0,0,"36",,terminal_output +6600,4868017,"TERMINAL",0,0,"47",,terminal_output +6601,4869128,"TERMINAL",0,0,"58",,terminal_output +6602,4870151,"TERMINAL",0,0,"650",,terminal_output +6603,4871118,"genie.py",0,0,"",python,tab +6604,4871336,"TERMINAL",0,0,"81",,terminal_output +6605,4872302,"TERMINAL",0,0,"92",,terminal_output +6606,4873246,"TERMINAL",0,0,"503",,terminal_output +6607,4873505,"genie.py",4201,0,"",python,selection_mouse +6608,4874164,"genie.py",4049,0,"",python,selection_mouse +6609,4874307,"TERMINAL",0,0,"14",,terminal_output +6610,4875194,"genie.py",3974,0,"",python,selection_mouse +6611,4875338,"genie.py",3973,6,"Create",python,selection_mouse +6612,4875427,"TERMINAL",0,0,"25",,terminal_output +6613,4875563,"genie.py",3973,76,"Create a mask that is 1 (True) where we just padded\n # token_idxs",python,selection_mouse +6614,4875639,"genie.py",3973,82,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape",python,selection_mouse +6615,4875679,"genie.py",3973,84,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: ",python,selection_mouse +6616,4875680,"genie.py",3973,88,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, ",python,selection_mouse +6617,4875720,"genie.py",3973,91,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, ",python,selection_mouse +6618,4875721,"genie.py",3973,94,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N),",python,selection_mouse +6619,4875757,"genie.py",3973,175,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions",python,selection_mouse +6620,4875812,"genie.py",3973,176,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions ",python,selection_mouse +6621,4875864,"genie.py",3973,179,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.",python,selection_mouse +6622,4875940,"genie.py",3973,185,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t ",python,selection_mouse +6623,4876034,"genie.py",3973,189,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6624,4876095,"genie.py",3973,190,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6625,4876251,"genie.py",4163,0,"",python,selection_mouse +6626,4876262,"genie.py",4162,0,"",python,selection_command +6627,4876390,"TERMINAL",0,0,"36",,terminal_output +6628,4876641,"genie.py",4163,0,"",python,selection_mouse +6629,4876651,"genie.py",4162,0,"",python,selection_command +6630,4876878,"genie.py",4162,1,")",python,selection_mouse +6631,4876879,"genie.py",4160,2," T",python,selection_mouse +6632,4876879,"genie.py",4152,10,"e., t >= T",python,selection_mouse +6633,4876909,"genie.py",4163,0,"",python,selection_command +6634,4876910,"genie.py",4139,24,"positions (i.e., t >= T)",python,selection_mouse +6635,4876961,"genie.py",4063,100," N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6636,4876999,"genie.py",4058,105,"B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6637,4877037,"genie.py",4050,113,"shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6638,4877104,"genie.py",4039,124,"token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6639,4877145,"genie.py",3973,190,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6640,4877228,"genie.py",3972,191," Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6641,4877256,"genie.py",3971,192,"# Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6642,4877295,"genie.py",3970,193," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6643,4877335,"genie.py",3969,194," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6644,4877365,"genie.py",3968,195," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6645,4877405,"genie.py",3967,196," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6646,4877448,"genie.py",3966,197," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6647,4877519,"genie.py",3965,198," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6648,4877596,"TERMINAL",0,0,"47",,terminal_output +6649,4877598,"genie.py",3964,199," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6650,4877690,"genie.py",3963,200," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6651,4877768,"genie.py",3962,201," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6652,4877846,"genie.py",4028,135," # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6653,4877846,"genie.py",3962,201," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6654,4878562,"TERMINAL",0,0,"58",,terminal_output +6655,4879369,"genie.py",4026,0,"",python,selection_mouse +6656,4879518,"TERMINAL",0,0,"69",,terminal_output +6657,4880015,"genie.py",3960,0,"",python,selection_mouse +6658,4880159,"genie.py",3959,12," ",python,selection_mouse +6659,4880492,"genie.py",3959,14," # ",python,selection_mouse +6660,4880492,"genie.py",3959,20," # Create",python,selection_mouse +6661,4880492,"genie.py",3959,27," # Create a mask",python,selection_mouse +6662,4880493,"genie.py",3959,32," # Create a mask that",python,selection_mouse +6663,4880561,"genie.py",3959,35," # Create a mask that is",python,selection_mouse +6664,4880562,"genie.py",3959,37," # Create a mask that is 1",python,selection_mouse +6665,4880601,"genie.py",3959,43," # Create a mask that is 1 (True",python,selection_mouse +6666,4880666,"genie.py",3959,112," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T =",python,selection_mouse +6667,4880667,"TERMINAL",0,0,"71:00",,terminal_output +6668,4880691,"genie.py",3959,121," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original",python,selection_mouse +6669,4880779,"genie.py",3959,122," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original ",python,selection_mouse +6670,4880821,"genie.py",3959,199," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t ",python,selection_mouse +6671,4880852,"genie.py",3959,201," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >=",python,selection_mouse +6672,4880888,"genie.py",3959,203," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6673,4880921,"genie.py",3959,204," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6674,4881298,"genie.py",4163,0,"",python,selection_mouse +6675,4881365,"genie.py",4162,0,"",python,selection_command +6676,4881602,"genie.py",4163,0,"",python,selection_mouse +6677,4881614,"genie.py",4162,0,"",python,selection_command +6678,4881635,"TERMINAL",0,0,"81",,terminal_output +6679,4881970,"genie.py",4162,1,")",python,selection_mouse +6680,4881971,"genie.py",4160,2," T",python,selection_mouse +6681,4881971,"genie.py",4072,90,"original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6682,4881971,"genie.py",4067,95," T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6683,4881972,"genie.py",4061,101,"S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6684,4881973,"genie.py",3987,175,"that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6685,4881973,"genie.py",3982,180,"mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6686,4881974,"genie.py",3980,182,"a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6687,4882008,"genie.py",4163,0,"",python,selection_command +6688,4882009,"genie.py",3973,190,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6689,4882059,"genie.py",3958,205,"\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6690,4882153,"genie.py",3965,198," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6691,4882182,"genie.py",3964,199," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6692,4882207,"genie.py",3962,201," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6693,4882263,"genie.py",3961,202," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6694,4882318,"genie.py",3960,203," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6695,4882361,"genie.py",3959,204," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6696,4882653,"TERMINAL",0,0,"92",,terminal_output +6697,4883774,"TERMINAL",0,0,"7:003",,terminal_output +6698,4884026,"genie.py",3959,0,"",python,selection_mouse +6699,4884575,"genie.py",3959,4," ",python,selection_mouse +6700,4884576,"genie.py",3959,74," # Create a mask that is 1 (True) where we just padded\n ",python,selection_mouse +6701,4884576,"genie.py",3959,80," # Create a mask that is 1 (True) where we just padded\n # ",python,selection_mouse +6702,4884576,"genie.py",3959,88," # Create a mask that is 1 (True) where we just padded\n # token_id",python,selection_mouse +6703,4884610,"genie.py",3959,95," # Create a mask that is 1 (True) where we just padded\n # token_idxs shap",python,selection_mouse +6704,4884652,"genie.py",3959,104," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S,",python,selection_mouse +6705,4884695,"genie.py",3959,110," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T",python,selection_mouse +6706,4884733,"genie.py",3959,114," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = o",python,selection_mouse +6707,4884771,"genie.py",3959,194," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e",python,selection_mouse +6708,4884804,"genie.py",3959,197," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., ",python,selection_mouse +6709,4884839,"genie.py",3959,199," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t ",python,selection_mouse +6710,4884873,"genie.py",3959,200," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >",python,selection_mouse +6711,4884907,"genie.py",3959,201," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >=",python,selection_mouse +6712,4884909,"TERMINAL",0,0,"14",,terminal_output +6713,4884941,"genie.py",3959,202," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= ",python,selection_mouse +6714,4884993,"genie.py",3959,203," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6715,4885041,"genie.py",3959,204," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6716,4885483,"genie.py",4163,0,"",python,selection_mouse +6717,4885527,"genie.py",4162,0,"",python,selection_command +6718,4885803,"TERMINAL",0,0,"25",,terminal_output +6719,4885837,"genie.py",4163,0,"",python,selection_mouse +6720,4885849,"genie.py",4162,0,"",python,selection_command +6721,4886145,"genie.py",4162,1,")",python,selection_mouse +6722,4886146,"genie.py",4158,4,">= T",python,selection_mouse +6723,4886147,"genie.py",4069,93," = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6724,4886147,"genie.py",3987,175,"that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6725,4886148,"genie.py",3973,189,"Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T",python,selection_mouse +6726,4886150,"genie.py",4163,0,"",python,selection_command +6727,4886206,"genie.py",3971,192,"# Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6728,4886276,"genie.py",3958,205,"\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6729,4886531,"genie.py",3960,203," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6730,4886578,"genie.py",3959,204," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6731,4886844,"TERMINAL",0,0,"36",,terminal_output +6732,4887273,"genie.py",3959,0,"",python,selection_mouse +6733,4887452,"genie.py",3959,12," ",python,selection_mouse +6734,4887793,"genie.py",3959,20," # Create",python,selection_mouse +6735,4887794,"genie.py",3959,90," # Create a mask that is 1 (True) where we just padded\n # token_idxs",python,selection_mouse +6736,4887794,"genie.py",3959,96," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape",python,selection_mouse +6737,4887794,"genie.py",3959,98," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: ",python,selection_mouse +6738,4887795,"genie.py",3959,103," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S",python,selection_mouse +6739,4887795,"genie.py",3959,106," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N",python,selection_mouse +6740,4887795,"genie.py",3959,109," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), ",python,selection_mouse +6741,4887828,"genie.py",3959,113," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = ",python,selection_mouse +6742,4887861,"genie.py",3959,121," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original",python,selection_mouse +6743,4887930,"genie.py",3959,196," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e.,",python,selection_mouse +6744,4887962,"genie.py",3959,198," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t",python,selection_mouse +6745,4887987,"genie.py",3959,199," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t ",python,selection_mouse +6746,4888022,"genie.py",3959,200," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >",python,selection_mouse +6747,4888023,"TERMINAL",0,0,"47",,terminal_output +6748,4888076,"genie.py",3959,202," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= ",python,selection_mouse +6749,4888088,"genie.py",3959,204," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)",python,selection_mouse +6750,4888147,"genie.py",3959,269," # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape ",python,selection_mouse +6751,4888870,"genie.py",4228,0,"",python,selection_mouse +6752,4888964,"TERMINAL",0,0,"58",,terminal_output +6753,4889561,"genie.py",4491,0,"",python,selection_mouse +6754,4889583,"genie.py",4490,0,"",python,selection_command +6755,4889982,"TERMINAL",0,0,"69",,terminal_output +6756,4891041,"TERMINAL",0,0,"710",,terminal_output +6757,4892168,"TERMINAL",0,0,"81",,terminal_output +6758,4893217,"TERMINAL",0,0,"93",,terminal_output +6759,4894184,"TERMINAL",0,0,"114",,terminal_output +6760,4895240,"TERMINAL",0,0,"25",,terminal_output +6761,4896367,"TERMINAL",0,0,"36",,terminal_output +6762,4897330,"TERMINAL",0,0,"47",,terminal_output +6763,4898367,"TERMINAL",0,0,"58",,terminal_output +6764,4899438,"TERMINAL",0,0,"69",,terminal_output +6765,4900464,"TERMINAL",0,0,"720",,terminal_output +6766,4901501,"TERMINAL",0,0,"81",,terminal_output +6767,4902613,"TERMINAL",0,0,"92",,terminal_output +6768,4903638,"TERMINAL",0,0,"203",,terminal_output +6769,4904663,"TERMINAL",0,0,"14",,terminal_output +6770,4905663,"TERMINAL",0,0,"25",,terminal_output +6771,4906709,"TERMINAL",0,0,"36",,terminal_output +6772,4907758,"TERMINAL",0,0,"47",,terminal_output +6773,4908860,"TERMINAL",0,0,"58",,terminal_output +6774,4909883,"TERMINAL",0,0,"69",,terminal_output +6775,4911010,"TERMINAL",0,0,"730",,terminal_output +6776,4912034,"TERMINAL",0,0,"81",,terminal_output +6777,4913003,"TERMINAL",0,0,"92",,terminal_output +6778,4914087,"TERMINAL",0,0,"303",,terminal_output +6779,4915209,"TERMINAL",0,0,"14",,terminal_output +6780,4916284,"TERMINAL",0,0,"36",,terminal_output +6781,4917359,"TERMINAL",0,0,"47",,terminal_output +6782,4918394,"TERMINAL",0,0,"58",,terminal_output +6783,4919427,"TERMINAL",0,0,"69",,terminal_output +6784,4920580,"TERMINAL",0,0,"740",,terminal_output +6785,4921557,"TERMINAL",0,0,"81",,terminal_output +6786,4922582,"TERMINAL",0,0,"92",,terminal_output +6787,4923604,"TERMINAL",0,0,"403",,terminal_output +6788,4924670,"TERMINAL",0,0,"14",,terminal_output +6789,4925704,"TERMINAL",0,0,"25",,terminal_output +6790,4926751,"TERMINAL",0,0,"36",,terminal_output +6791,4927805,"TERMINAL",0,0,"47",,terminal_output +6792,4928930,"TERMINAL",0,0,"58",,terminal_output +6793,4929954,"TERMINAL",0,0,"69",,terminal_output +6794,4930981,"TERMINAL",0,0,"750",,terminal_output +6795,4932001,"TERMINAL",0,0,"81",,terminal_output +6796,4933019,"TERMINAL",0,0,"92",,terminal_output +6797,4934153,"TERMINAL",0,0,"503",,terminal_output +6798,4935177,"TERMINAL",0,0,"15",,terminal_output +6799,4936157,"TERMINAL",0,0,"36",,terminal_output +6800,4937244,"TERMINAL",0,0,"47",,terminal_output +6801,4938243,"TERMINAL",0,0,"58",,terminal_output +6802,4939381,"TERMINAL",0,0,"69",,terminal_output +6803,4940400,"TERMINAL",0,0,"72:00",,terminal_output +6804,4941423,"TERMINAL",0,0,"81",,terminal_output +6805,4942423,"TERMINAL",0,0,"92",,terminal_output +6806,4943471,"TERMINAL",0,0,"8:003",,terminal_output +6807,4944602,"TERMINAL",0,0,"14",,terminal_output +6808,4945565,"TERMINAL",0,0,"25",,terminal_output +6809,4946647,"TERMINAL",0,0,"36",,terminal_output +6810,4947689,"TERMINAL",0,0,"47",,terminal_output +6811,4948712,"TERMINAL",0,0,"58",,terminal_output +6812,4949764,"TERMINAL",0,0,"69",,terminal_output +6813,4950844,"TERMINAL",0,0,"710",,terminal_output +6814,4951868,"TERMINAL",0,0,"81",,terminal_output +6815,4952994,"TERMINAL",0,0,"92",,terminal_output +6816,4954023,"TERMINAL",0,0,"103",,terminal_output +6817,4955041,"TERMINAL",0,0,"14",,terminal_output +6818,4956067,"TERMINAL",0,0,"25",,terminal_output +6819,4957193,"TERMINAL",0,0,"36",,terminal_output +6820,4958217,"TERMINAL",0,0,"48",,terminal_output +6821,4959243,"TERMINAL",0,0,"69",,terminal_output +6822,4960274,"TERMINAL",0,0,"720",,terminal_output +6823,4960650,"genie.py",0,0,"",python,tab +6824,4961285,"TERMINAL",0,0,"81",,terminal_output +6825,4962357,"TERMINAL",0,0,"92",,terminal_output +6826,4963380,"TERMINAL",0,0,"203",,terminal_output +6827,4964427,"TERMINAL",0,0,"14",,terminal_output +6828,4965488,"TERMINAL",0,0,"25",,terminal_output +6829,4966615,"TERMINAL",0,0,"36",,terminal_output +6830,4967639,"TERMINAL",0,0,"47",,terminal_output +6831,4968662,"TERMINAL",0,0,"58",,terminal_output +6832,4969687,"TERMINAL",0,0,"69",,terminal_output +6833,4970711,"TERMINAL",0,0,"730",,terminal_output +6834,4971701,"TERMINAL",0,0,"81",,terminal_output +6835,4972754,"TERMINAL",0,0,"92",,terminal_output +6836,4973885,"TERMINAL",0,0,"303",,terminal_output +6837,4974847,"TERMINAL",0,0,"14",,terminal_output +6838,4975932,"TERMINAL",0,0,"25",,terminal_output +6839,4976938,"TERMINAL",0,0,"36",,terminal_output +6840,4977994,"TERMINAL",0,0,"47",,terminal_output +6841,4979107,"TERMINAL",0,0,"58",,terminal_output +6842,4980131,"TERMINAL",0,0,"69",,terminal_output +6843,4981156,"TERMINAL",0,0,"741",,terminal_output +6844,4982276,"TERMINAL",0,0,"92",,terminal_output +6845,4983249,"TERMINAL",0,0,"403",,terminal_output +6846,4984331,"TERMINAL",0,0,"14",,terminal_output +6847,4985313,"TERMINAL",0,0,"25",,terminal_output +6848,4986376,"TERMINAL",0,0,"36",,terminal_output +6849,4987306,"genie.py",4177,0,"",python,selection_mouse +6850,4987485,"genie.py",4176,4,"mask",python,selection_mouse +6851,4987576,"TERMINAL",0,0,"47",,terminal_output +6852,4988527,"TERMINAL",0,0,"58",,terminal_output +6853,4989502,"TERMINAL",0,0,"69",,terminal_output +6854,4990575,"TERMINAL",0,0,"750",,terminal_output +6855,4991619,"TERMINAL",0,0,"81",,terminal_output +6856,4992459,"genie.py",8293,0,"",python,selection_mouse +6857,4992586,"genie.py",8291,5,"where",python,selection_mouse +6858,4992684,"TERMINAL",0,0,"92",,terminal_output +6859,4993156,"genie.py",8304,0,"",python,selection_mouse +6860,4993279,"genie.py",8297,13,"mask_expanded",python,selection_mouse +6861,4993684,"TERMINAL",0,0,"503",,terminal_output +6862,4994727,"TERMINAL",0,0,"14",,terminal_output +6863,4994827,"genie.py",8316,0,"",python,selection_mouse +6864,4994981,"genie.py",8312,10,"mask_token",python,selection_mouse +6865,4995773,"TERMINAL",0,0,"25",,terminal_output +6866,4996825,"TERMINAL",0,0,"36",,terminal_output +6867,4997980,"TERMINAL",0,0,"47",,terminal_output +6868,4998985,"TERMINAL",0,0,"58",,terminal_output +6869,4999997,"TERMINAL",0,0,"69",,terminal_output +6870,5001002,"TERMINAL",0,0,"73:00",,terminal_output +6871,5002147,"TERMINAL",0,0,"81",,terminal_output +6872,5003093,"TERMINAL",0,0,"92",,terminal_output +6873,5004196,"TERMINAL",0,0,"9:004",,terminal_output +6874,5005220,"TERMINAL",0,0,"25",,terminal_output +6875,5005387,"genie.py",8309,0,"",python,selection_mouse +6876,5006248,"TERMINAL",0,0,"36",,terminal_output +6877,5007327,"TERMINAL",0,0,"47",,terminal_output +6878,5008332,"TERMINAL",0,0,"58",,terminal_output +6879,5009418,"TERMINAL",0,0,"69",,terminal_output +6880,5010424,"TERMINAL",0,0,"710",,terminal_output +6881,5011568,"TERMINAL",0,0,"81",,terminal_output +6882,5012528,"TERMINAL",0,0,"92",,terminal_output +6883,5013618,"TERMINAL",0,0,"103",,terminal_output +6884,5014641,"TERMINAL",0,0,"14",,terminal_output +6885,5015665,"TERMINAL",0,0,"25",,terminal_output +6886,5016706,"TERMINAL",0,0,"36",,terminal_output +6887,5017755,"TERMINAL",0,0,"47",,terminal_output +6888,5018839,"TERMINAL",0,0,"58",,terminal_output +6889,5019861,"TERMINAL",0,0,"69",,terminal_output +6890,5021044,"TERMINAL",0,0,"720",,terminal_output +6891,5022014,"TERMINAL",0,0,"81",,terminal_output +6892,5022993,"TERMINAL",0,0,"92",,terminal_output +6893,5024063,"TERMINAL",0,0,"203",,terminal_output +6894,5025085,"TERMINAL",0,0,"14",,terminal_output +6895,5026120,"TERMINAL",0,0,"26",,terminal_output +6896,5027234,"TERMINAL",0,0,"47",,terminal_output +6897,5028261,"TERMINAL",0,0,"58",,terminal_output +6898,5029285,"TERMINAL",0,0,"69",,terminal_output +6899,5030410,"TERMINAL",0,0,"730",,terminal_output +6900,5031434,"TERMINAL",0,0,"81",,terminal_output +6901,5032405,"TERMINAL",0,0,"92",,terminal_output +6902,5033485,"TERMINAL",0,0,"303",,terminal_output +6903,5034487,"TERMINAL",0,0,"14",,terminal_output +6904,5035531,"TERMINAL",0,0,"25",,terminal_output +6905,5036927,"TERMINAL",0,0,"36",,terminal_output +6906,5037988,"TERMINAL",0,0,"47",,terminal_output +6907,5039114,"TERMINAL",0,0,"58",,terminal_output +6908,5040140,"TERMINAL",0,0,"69",,terminal_output +6909,5041108,"TERMINAL",0,0,"741",,terminal_output +6910,5042187,"TERMINAL",0,0,"92",,terminal_output +6911,5043211,"TERMINAL",0,0,"403",,terminal_output +6912,5044244,"TERMINAL",0,0,"14",,terminal_output +6913,5045293,"TERMINAL",0,0,"25",,terminal_output +6914,5046335,"TERMINAL",0,0,"36",,terminal_output +6915,5047382,"TERMINAL",0,0,"47",,terminal_output +6916,5048425,"TERMINAL",0,0,"58",,terminal_output +6917,5049559,"TERMINAL",0,0,"69",,terminal_output +6918,5050599,"TERMINAL",0,0,"750",,terminal_output +6919,5051608,"TERMINAL",0,0,"81",,terminal_output +6920,5052636,"TERMINAL",0,0,"92",,terminal_output +6921,5053664,"TERMINAL",0,0,"503",,terminal_output +6922,5054713,"TERMINAL",0,0,"14",,terminal_output +6923,5055750,"TERMINAL",0,0,"25",,terminal_output +6924,5056798,"TERMINAL",0,0,"36",,terminal_output +6925,5057854,"TERMINAL",0,0,"47",,terminal_output +6926,5058981,"TERMINAL",0,0,"58",,terminal_output +6927,5060004,"TERMINAL",0,0,"69",,terminal_output +6928,5060975,"TERMINAL",0,0,"74:00",,terminal_output +6929,5062027,"TERMINAL",0,0,"81",,terminal_output +6930,5063054,"TERMINAL",0,0,"92",,terminal_output +6931,5064093,"TERMINAL",0,0,"40:003",,terminal_output +6932,5065227,"TERMINAL",0,0,"15",,terminal_output +6933,5066196,"TERMINAL",0,0,"36",,terminal_output +6934,5067275,"TERMINAL",0,0,"47",,terminal_output +6935,5068277,"TERMINAL",0,0,"58",,terminal_output +6936,5069426,"TERMINAL",0,0,"69",,terminal_output +6937,5070457,"TERMINAL",0,0,"710",,terminal_output +6938,5071422,"TERMINAL",0,0,"81",,terminal_output +6939,5072466,"TERMINAL",0,0,"92",,terminal_output +6940,5073521,"TERMINAL",0,0,"103",,terminal_output +6941,5074552,"TERMINAL",0,0,"14",,terminal_output +6942,5075671,"TERMINAL",0,0,"25",,terminal_output +6943,5076699,"TERMINAL",0,0,"36",,terminal_output +6944,5077721,"TERMINAL",0,0,"47",,terminal_output +6945,5078747,"TERMINAL",0,0,"58",,terminal_output +6946,5079762,"TERMINAL",0,0,"69",,terminal_output +6947,5080809,"TERMINAL",0,0,"720",,terminal_output +6948,5081856,"TERMINAL",0,0,"81",,terminal_output +6949,5082893,"TERMINAL",0,0,"92",,terminal_output +6950,5083966,"TERMINAL",0,0,"203",,terminal_output +6951,5084990,"TERMINAL",0,0,"14",,terminal_output +6952,5086117,"TERMINAL",0,0,"25",,terminal_output +6953,5087140,"TERMINAL",0,0,"36",,terminal_output +6954,5088165,"TERMINAL",0,0,"48",,terminal_output +6955,5089191,"TERMINAL",0,0,"69",,terminal_output +6956,5090316,"TERMINAL",0,0,"730",,terminal_output +6957,5091339,"TERMINAL",0,0,"81",,terminal_output +6958,5092362,"TERMINAL",0,0,"92",,terminal_output +6959,5093437,"TERMINAL",0,0,"303",,terminal_output +6960,5094421,"TERMINAL",0,0,"14",,terminal_output +6961,5095537,"TERMINAL",0,0,"25",,terminal_output +6962,5096512,"TERMINAL",0,0,"36",,terminal_output +6963,5097596,"TERMINAL",0,0,"47",,terminal_output +6964,5098653,"TERMINAL",0,0,"58",,terminal_output +6965,5099740,"TERMINAL",0,0,"69",,terminal_output +6966,5100743,"TERMINAL",0,0,"740",,terminal_output +6967,5101755,"TERMINAL",0,0,"81",,terminal_output +6968,5102803,"TERMINAL",0,0,"92",,terminal_output +6969,5103844,"TERMINAL",0,0,"403",,terminal_output +6970,5104889,"TERMINAL",0,0,"14",,terminal_output +6971,5105984,"TERMINAL",0,0,"25",,terminal_output +6972,5107007,"TERMINAL",0,0,"36",,terminal_output +6973,5108022,"TERMINAL",0,0,"47",,terminal_output +6974,5109158,"TERMINAL",0,0,"58",,terminal_output +6975,5110181,"TERMINAL",0,0,"650",,terminal_output +6976,5111205,"TERMINAL",0,0,"81",,terminal_output +6977,5112229,"TERMINAL",0,0,"92",,terminal_output +6978,5113253,"TERMINAL",0,0,"503",,terminal_output +6979,5114331,"TERMINAL",0,0,"14",,terminal_output +6980,5115406,"TERMINAL",0,0,"25",,terminal_output +6981,5116404,"TERMINAL",0,0,"36",,terminal_output +6982,5117416,"TERMINAL",0,0,"47",,terminal_output +6983,5118475,"TERMINAL",0,0,"58",,terminal_output +6984,5119603,"TERMINAL",0,0,"69",,terminal_output +6985,5120626,"TERMINAL",0,0,"75:00",,terminal_output +6986,5121615,"TERMINAL",0,0,"81",,terminal_output +6987,5122674,"TERMINAL",0,0,"92",,terminal_output +6988,5123710,"TERMINAL",0,0,"1:003",,terminal_output +6989,5124753,"TERMINAL",0,0,"14",,terminal_output +6990,5125885,"TERMINAL",0,0,"25",,terminal_output +6991,5126846,"TERMINAL",0,0,"36",,terminal_output +6992,5127909,"TERMINAL",0,0,"47",,terminal_output +6993,5128935,"TERMINAL",0,0,"58",,terminal_output +6994,5130047,"TERMINAL",0,0,"69",,terminal_output +6995,5131071,"TERMINAL",0,0,"710",,terminal_output +6996,5132095,"TERMINAL",0,0,"81",,terminal_output +6997,5133112,"TERMINAL",0,0,"93",,terminal_output +6998,5134245,"TERMINAL",0,0,"114",,terminal_output +6999,5135269,"TERMINAL",0,0,"25",,terminal_output +7000,5136294,"TERMINAL",0,0,"36",,terminal_output +7001,5137409,"TERMINAL",0,0,"47",,terminal_output +7002,5138371,"TERMINAL",0,0,"58",,terminal_output +7003,5139468,"TERMINAL",0,0,"69",,terminal_output +7004,5140500,"TERMINAL",0,0,"720",,terminal_output +7005,5141620,"TERMINAL",0,0,"81",,terminal_output +7006,5142643,"TERMINAL",0,0,"92",,terminal_output +7007,5143666,"TERMINAL",0,0,"203",,terminal_output +7008,5144692,"TERMINAL",0,0,"14",,terminal_output +7009,5145716,"TERMINAL",0,0,"25",,terminal_output +7010,5146743,"TERMINAL",0,0,"36",,terminal_output +7011,5147789,"TERMINAL",0,0,"47",,terminal_output +7012,5148835,"TERMINAL",0,0,"58",,terminal_output +7013,5149885,"TERMINAL",0,0,"69",,terminal_output +7014,5151039,"TERMINAL",0,0,"730",,terminal_output +7015,5151975,"TERMINAL",0,0,"81",,terminal_output +7016,5153022,"TERMINAL",0,0,"92",,terminal_output +7017,5154112,"TERMINAL",0,0,"303",,terminal_output +7018,5155135,"TERMINAL",0,0,"15",,terminal_output +7019,5156167,"TERMINAL",0,0,"36",,terminal_output +7020,5157695,"TERMINAL",0,0,"47",,terminal_output +7021,5158728,"TERMINAL",0,0,"58",,terminal_output +7022,5159767,"TERMINAL",0,0,"69",,terminal_output +7023,5160807,"TERMINAL",0,0,"740",,terminal_output +7024,5161851,"TERMINAL",0,0,"81",,terminal_output +7025,5162918,"TERMINAL",0,0,"92",,terminal_output +7026,5163956,"TERMINAL",0,0,"403",,terminal_output +7027,5165068,"TERMINAL",0,0,"14",,terminal_output +7028,5166023,"TERMINAL",0,0,"25",,terminal_output +7029,5167117,"TERMINAL",0,0,"36",,terminal_output +7030,5168141,"TERMINAL",0,0,"48",,terminal_output +7031,5169267,"TERMINAL",0,0,"69",,terminal_output +7032,5170291,"TERMINAL",0,0,"750",,terminal_output +7033,5171315,"TERMINAL",0,0,"81",,terminal_output +7034,5172291,"TERMINAL",0,0,"92",,terminal_output +7035,5173416,"TERMINAL",0,0,"503",,terminal_output +7036,5174488,"TERMINAL",0,0,"14",,terminal_output +7037,5175513,"TERMINAL",0,0,"25",,terminal_output +7038,5176537,"TERMINAL",0,0,"36",,terminal_output +7039,5177561,"TERMINAL",0,0,"47",,terminal_output +7040,5178585,"TERMINAL",0,0,"58",,terminal_output +7041,5179711,"TERMINAL",0,0,"69",,terminal_output +7042,5180736,"TERMINAL",0,0,"76:00",,terminal_output +7043,5181763,"TERMINAL",0,0,"81",,terminal_output +7044,5182785,"TERMINAL",0,0,"92",,terminal_output +7045,5183809,"TERMINAL",0,0,"2:003",,terminal_output +7046,5184838,"TERMINAL",0,0,"14",,terminal_output +7047,5185888,"TERMINAL",0,0,"25",,terminal_output +7048,5186982,"TERMINAL",0,0,"36",,terminal_output +7049,5187995,"TERMINAL",0,0,"47",,terminal_output +7050,5189132,"TERMINAL",0,0,"58",,terminal_output +7051,5190069,"TERMINAL",0,0,"69",,terminal_output +7052,5191188,"TERMINAL",0,0,"711",,terminal_output +7053,5192205,"TERMINAL",0,0,"92",,terminal_output +7054,5193229,"TERMINAL",0,0,"103",,terminal_output +7055,5194355,"TERMINAL",0,0,"14",,terminal_output +7056,5195379,"TERMINAL",0,0,"25",,terminal_output +7057,5196404,"TERMINAL",0,0,"36",,terminal_output +7058,5197372,"TERMINAL",0,0,"47",,terminal_output +7059,5198455,"TERMINAL",0,0,"58",,terminal_output +7060,5199475,"TERMINAL",0,0,"69",,terminal_output +7061,5200505,"TERMINAL",0,0,"720",,terminal_output +7062,5201628,"TERMINAL",0,0,"81",,terminal_output +7063,5202651,"TERMINAL",0,0,"92",,terminal_output +7064,5203674,"TERMINAL",0,0,"203",,terminal_output +7065,5204710,"TERMINAL",0,0,"14",,terminal_output +7066,5205723,"TERMINAL",0,0,"25",,terminal_output +7067,5206774,"TERMINAL",0,0,"36",,terminal_output +7068,5207812,"TERMINAL",0,0,"47",,terminal_output +7069,5208914,"TERMINAL",0,0,"58",,terminal_output +7070,5209920,"TERMINAL",0,0,"69",,terminal_output +7071,5211052,"TERMINAL",0,0,"730",,terminal_output +7072,5211990,"TERMINAL",0,0,"81",,terminal_output +7073,5213032,"TERMINAL",0,0,"92",,terminal_output +7074,5214118,"TERMINAL",0,0,"303",,terminal_output +7075,5215144,"TERMINAL",0,0,"15",,terminal_output +7076,5216236,"TERMINAL",0,0,"36",,terminal_output +7077,5217307,"TERMINAL",0,0,"47",,terminal_output +7078,5218272,"TERMINAL",0,0,"58",,terminal_output +7079,5219341,"TERMINAL",0,0,"69",,terminal_output +7080,5220469,"TERMINAL",0,0,"740",,terminal_output +7081,5221493,"TERMINAL",0,0,"81",,terminal_output +7082,5222457,"TERMINAL",0,0,"92",,terminal_output +7083,5223542,"TERMINAL",0,0,"403",,terminal_output +7084,5224563,"TERMINAL",0,0,"14",,terminal_output +7085,5225643,"TERMINAL",0,0,"25",,terminal_output +7086,5226715,"TERMINAL",0,0,"36",,terminal_output +7087,5227684,"TERMINAL",0,0,"47",,terminal_output +7088,5228762,"TERMINAL",0,0,"58",,terminal_output +7089,5229761,"TERMINAL",0,0,"69",,terminal_output +7090,5230810,"TERMINAL",0,0,"750",,terminal_output +7091,5231853,"TERMINAL",0,0,"81",,terminal_output +7092,5232892,"TERMINAL",0,0,"92",,terminal_output +7093,5233933,"TERMINAL",0,0,"503",,terminal_output +7094,5235008,"TERMINAL",0,0,"14",,terminal_output +7095,5236022,"TERMINAL",0,0,"25",,terminal_output +7096,5237161,"TERMINAL",0,0,"36",,terminal_output +7097,5238183,"TERMINAL",0,0,"48",,terminal_output +7098,5239207,"TERMINAL",0,0,"69",,terminal_output +7099,5240234,"TERMINAL",0,0,"77:00",,terminal_output +7100,5241266,"TERMINAL",0,0,"81",,terminal_output +7101,5242271,"TERMINAL",0,0,"92",,terminal_output +7102,5243312,"TERMINAL",0,0,"3:003",,terminal_output +7103,5244364,"TERMINAL",0,0,"14",,terminal_output +7104,5245425,"TERMINAL",0,0,"25",,terminal_output +7105,5246476,"TERMINAL",0,0,"36",,terminal_output +7106,5247515,"TERMINAL",0,0,"47",,terminal_output +7107,5248550,"TERMINAL",0,0,"58",,terminal_output +7108,5249658,"TERMINAL",0,0,"69",,terminal_output +7109,5250675,"TERMINAL",0,0,"710",,terminal_output +7110,5251715,"TERMINAL",0,0,"81",,terminal_output +7111,5252827,"TERMINAL",0,0,"92",,terminal_output +7112,5253785,"TERMINAL",0,0,"103",,terminal_output +7113,5254825,"TERMINAL",0,0,"14",,terminal_output +7114,5255867,"TERMINAL",0,0,"25",,terminal_output +7115,5256922,"TERMINAL",0,0,"36",,terminal_output +7116,5257976,"TERMINAL",0,0,"47",,terminal_output +7117,5259005,"TERMINAL",0,0,"58",,terminal_output +7118,5260095,"TERMINAL",0,0,"69",,terminal_output +7119,5261120,"TERMINAL",0,0,"720",,terminal_output +7120,5262148,"TERMINAL",0,0,"82",,terminal_output +7121,5263271,"TERMINAL",0,0,"203",,terminal_output +7122,5264295,"TERMINAL",0,0,"14",,terminal_output +7123,5265318,"TERMINAL",0,0,"25",,terminal_output +7124,5266343,"TERMINAL",0,0,"36",,terminal_output +7125,5267368,"TERMINAL",0,0,"47",,terminal_output +7126,5268494,"TERMINAL",0,0,"58",,terminal_output +7127,5269519,"TERMINAL",0,0,"69",,terminal_output +7128,5270544,"TERMINAL",0,0,"730",,terminal_output +7129,5271543,"TERMINAL",0,0,"81",,terminal_output +7130,5272590,"TERMINAL",0,0,"92",,terminal_output +7131,5273733,"TERMINAL",0,0,"303",,terminal_output +7132,5274738,"TERMINAL",0,0,"14",,terminal_output +7133,5275437,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +7134,5275766,"TERMINAL",0,0,"25",,terminal_output +7135,5276763,"TERMINAL",0,0,"36",,terminal_output +7136,5278427,"TERMINAL",0,0,"48",,terminal_output +7137,5279556,"TERMINAL",0,0,"69",,terminal_output +7138,5280124,"scripts_horeka/overfit_sample_tiny/sample.sh",1855,0,"",shellscript,selection_mouse +7139,5280577,"TERMINAL",0,0,"740",,terminal_output +7140,5281225,"scripts_horeka/overfit_sample_tiny/sample.sh",1855,0,"1",shellscript,content +7141,5281226,"scripts_horeka/overfit_sample_tiny/sample.sh",1856,0,"",shellscript,selection_keyboard +7142,5281531,"TERMINAL",0,0,"81",,terminal_output +7143,5281593,"scripts_horeka/overfit_sample_tiny/sample.sh",1855,0,"",shellscript,selection_command +7144,5282615,"TERMINAL",0,0,"92",,terminal_output +7145,5283619,"TERMINAL",0,0,"403",,terminal_output +7146,5284673,"TERMINAL",0,0,"14",,terminal_output +7147,5284904,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +7148,5285529,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +7149,5285645,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +7150,5285707,"TERMINAL",0,0,"25",,terminal_output +7151,5286824,"TERMINAL",0,0,"36",,terminal_output +7152,5287849,"TERMINAL",0,0,"47",,terminal_output +7153,5288380,"TERMINAL",0,0,"2025-07-03 17:43:45.279584: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7154,5288821,"TERMINAL",0,0,"58",,terminal_output +7155,5289871,"TERMINAL",0,0,"69",,terminal_output +7156,5290920,"TERMINAL",0,0,"750",,terminal_output +7157,5291945,"TERMINAL",0,0,"81",,terminal_output +7158,5292994,"TERMINAL",0,0,"92",,terminal_output +7159,5294094,"TERMINAL",0,0,"503",,terminal_output +7160,5295117,"TERMINAL",0,0,"14",,terminal_output +7161,5296176,"TERMINAL",0,0,"26",,terminal_output +7162,5297268,"TERMINAL",0,0,"47",,terminal_output +7163,5298232,"TERMINAL",0,0,"58",,terminal_output +7164,5299316,"TERMINAL",0,0,"69",,terminal_output +7165,5300340,"TERMINAL",0,0,"78:00",,terminal_output +7166,5301363,"TERMINAL",0,0,"81",,terminal_output +7167,5301672,"TERMINAL",0,0,"2025-07-03 17:43:58.529623: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7168,5302388,"TERMINAL",0,0,"92",,terminal_output +7169,5303515,"TERMINAL",0,0,"4:003",,terminal_output +7170,5304488,"TERMINAL",0,0,"14",,terminal_output +7171,5305539,"TERMINAL",0,0,"25",,terminal_output +7172,5306689,"TERMINAL",0,0,"36",,terminal_output +7173,5307713,"TERMINAL",0,0,"47",,terminal_output +7174,5308749,"TERMINAL",0,0,"58",,terminal_output +7175,5309320,"sample.py",0,0,"",python,tab +7176,5309759,"TERMINAL",0,0,"69",,terminal_output +7177,5310785,"TERMINAL",0,0,"710",,terminal_output +7178,5311792,"TERMINAL",0,0,"81",,terminal_output +7179,5312839,"TERMINAL",0,0,"92",,terminal_output +7180,5313879,"TERMINAL",0,0,"103",,terminal_output +7181,5314984,"TERMINAL",0,0,"14",,terminal_output +7182,5316008,"TERMINAL",0,0,"25",,terminal_output +7183,5317031,"TERMINAL",0,0,"36",,terminal_output +7184,5317412,"TERMINAL",0,0,"2025-07-03 17:44:14.311920: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7185,5318052,"TERMINAL",0,0,"47",,terminal_output +7186,5319182,"TERMINAL",0,0,"58",,terminal_output +7187,5320206,"TERMINAL",0,0,"620",,terminal_output +7188,5321244,"TERMINAL",0,0,"81",,terminal_output +7189,5322254,"TERMINAL",0,0,"92",,terminal_output +7190,5323274,"TERMINAL",0,0,"203",,terminal_output +7191,5324404,"TERMINAL",0,0,"14",,terminal_output +7192,5325122,"TERMINAL",0,0,"2025-07-03 17:44:21.927080: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7193,5325485,"TERMINAL",0,0,"25",,terminal_output +7194,5326452,"TERMINAL",0,0,"36",,terminal_output +7195,5327438,"TERMINAL",0,0,"47",,terminal_output +7196,5328500,"TERMINAL",0,0,"58",,terminal_output +7197,5328607,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7198,5329525,"TERMINAL",0,0,"69",,terminal_output +7199,5330651,"TERMINAL",0,0,"730",,terminal_output +7200,5331674,"TERMINAL",0,0,"81",,terminal_output +7201,5332702,"TERMINAL",0,0,"92",,terminal_output +7202,5333724,"TERMINAL",0,0,"303",,terminal_output +7203,5334850,"TERMINAL",0,0,"14",,terminal_output +7204,5335877,"TERMINAL",0,0,"25",,terminal_output +7205,5336225,"TERMINAL",0,0,"2025-07-03 17:44:33.126907: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7206,5336827,"TERMINAL",0,0,"36",,terminal_output +7207,5337877,"TERMINAL",0,0,"47",,terminal_output +7208,5338972,"TERMINAL",0,0,"58",,terminal_output +7209,5339083,"TERMINAL",0,0,"2025-07-03 17:44:35.983150: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7210,5340071,"TERMINAL",0,0,"69",,terminal_output +7211,5341097,"TERMINAL",0,0,"740",,terminal_output +7212,5342121,"TERMINAL",0,0,"81",,terminal_output +7213,5343144,"TERMINAL",0,0,"93",,terminal_output +7214,5344168,"TERMINAL",0,0,"414",,terminal_output +7215,5345294,"TERMINAL",0,0,"25",,terminal_output +7216,5346319,"TERMINAL",0,0,"36",,terminal_output +7217,5347240,"TERMINAL",0,0,"2025-07-03 17:44:44.064905: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7218,5347295,"TERMINAL",0,0,"47",,terminal_output +7219,5348344,"TERMINAL",0,0,"58",,terminal_output +7220,5348782,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +7221,5349083,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +7222,5349148,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7223,5349397,"TERMINAL",0,0,"69",,terminal_output +7224,5350210,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7225,5350518,"TERMINAL",0,0,"750",,terminal_output +7226,5350581,"TERMINAL",0,0,"2025-07-03 17:44:47.480598: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7227,5351541,"TERMINAL",0,0,"81",,terminal_output +7228,5352541,"TERMINAL",0,0,"92",,terminal_output +7229,5353588,"TERMINAL",0,0,"503",,terminal_output +7230,5354715,"TERMINAL",0,0,"14",,terminal_output +7231,5355661,"TERMINAL",0,0,"25",,terminal_output +7232,5356700,"TERMINAL",0,0,"36",,terminal_output +7233,5356991,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7234,5357154,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7235,5357790,"TERMINAL",0,0,"47",,terminal_output +7236,5358812,"TERMINAL",0,0,"58",,terminal_output +7237,5359841,"TERMINAL",0,0,"69",,terminal_output +7238,5360098,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7239,5360256,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7240,5360881,"TERMINAL",0,0,"79:00",,terminal_output +7241,5361930,"TERMINAL",0,0,"81",,terminal_output +7242,5362994,"TERMINAL",0,0,"92",,terminal_output +7243,5363230,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7244,5363393,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7245,5364033,"TERMINAL",0,0,"5:003",,terminal_output +7246,5365160,"TERMINAL",0,0,"14",,terminal_output +7247,5366125,"TERMINAL",0,0,"26",,terminal_output +7248,5366395,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7249,5366554,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7250,5367207,"TERMINAL",0,0,"47",,terminal_output +7251,5368232,"TERMINAL",0,0,"58",,terminal_output +7252,5369256,"TERMINAL",0,0,"69",,terminal_output +7253,5369623,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7254,5369782,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7255,5370298,"TERMINAL",0,0,"710",,terminal_output +7256,5371407,"TERMINAL",0,0,"81",,terminal_output +7257,5372383,"TERMINAL",0,0,"92",,terminal_output +7258,5372792,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7259,5372930,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7260,5373429,"TERMINAL",0,0,"103",,terminal_output +7261,5374489,"TERMINAL",0,0,"14",,terminal_output +7262,5375605,"TERMINAL",0,0,"25",,terminal_output +7263,5375912,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7264,5376061,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7265,5376629,"TERMINAL",0,0,"36",,terminal_output +7266,5377653,"TERMINAL",0,0,"47",,terminal_output +7267,5378635,"TERMINAL",0,0,"58",,terminal_output +7268,5379094,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7269,5379205,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7270,5379701,"TERMINAL",0,0,"69",,terminal_output +7271,5380732,"TERMINAL",0,0,"720",,terminal_output +7272,5381483,"genie.py",0,0,"",python,tab +7273,5381814,"TERMINAL",0,0,"81",,terminal_output +7274,5382162,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7275,5382320,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7276,5382876,"TERMINAL",0,0,"92",,terminal_output +7277,5383901,"TERMINAL",0,0,"203",,terminal_output +7278,5384925,"TERMINAL",0,0,"14",,terminal_output +7279,5385354,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7280,5385487,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7281,5385963,"TERMINAL",0,0,"25",,terminal_output +7282,5386975,"genie.py",8184,0,"",python,selection_mouse +7283,5387032,"TERMINAL",0,0,"36",,terminal_output +7284,5387656,"genie.py",8234,0,"",python,selection_mouse +7285,5387762,"genie.py",8228,10,"mask_token",python,selection_mouse +7286,5388039,"TERMINAL",0,0,"47",,terminal_output +7287,5388404,"genie.py",8239,0,"",python,selection_mouse +7288,5388515,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7289,5388685,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7290,5389075,"genie.py",8176,0,"",python,selection_mouse +7291,5389145,"TERMINAL",0,0,"58",,terminal_output +7292,5390172,"TERMINAL",0,0,"630",,terminal_output +7293,5391373,"TERMINAL",0,0,"81",,terminal_output +7294,5391705,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7295,5391947,"genie.py",8179,0,"",python,selection_mouse +7296,5391968,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7297,5392315,"TERMINAL",0,0,"92",,terminal_output +7298,5392558,"genie.py",8163,0,"",python,selection_mouse +7299,5393281,"TERMINAL",0,0,"303",,terminal_output +7300,5393781,"genie.py",8151,116,"",python,content +7301,5393828,"genie.py",8159,0,"",python,selection_command +7302,5394315,"TERMINAL",0,0,"14",,terminal_output +7303,5394666,"genie.py",8123,0,"",python,selection_command +7304,5394821,"genie.py",8068,0,"",python,selection_command +7305,5394970,"genie.py",8006,0,"",python,selection_command +7306,5395025,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +7307,5395063,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7308,5395108,"genie.py",7937,0,"",python,selection_command +7309,5395166,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7310,5395238,"genie.py",7858,0,"",python,selection_command +7311,5395371,"TERMINAL",0,0,"25",,terminal_output +7312,5395380,"genie.py",7796,0,"",python,selection_command +7313,5395545,"genie.py",7760,0,"",python,selection_command +7314,5395682,"genie.py",7717,0,"",python,selection_command +7315,5395865,"genie.py",7704,0,"",python,selection_command +7316,5395973,"genie.py",7677,0,"",python,selection_command +7317,5396394,"TERMINAL",0,0,"36",,terminal_output +7318,5397500,"TERMINAL",0,0,"47",,terminal_output +7319,5398236,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\nmask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7320,5398372,"TERMINAL",0,0,"mask_expanded.shape: (1, 16, 920, 1)\r\nmask_token.shape: (1, 1, 1, 128)\r\n",,terminal_output +7321,5398638,"genie.py",8010,0,"",python,selection_mouse +7322,5399264,"TERMINAL",0,0,"58",,terminal_output +7323,5400098,"TERMINAL",0,0,"69",,terminal_output +7324,5401123,"TERMINAL",0,0,"741",,terminal_output +7325,5402229,"TERMINAL",0,0,"92",,terminal_output +7326,5402488,"genie.py",8018,0,"",python,selection_mouse +7327,5403206,"TERMINAL",0,0,"403",,terminal_output +7328,5403576,"genie.py",8144,0,"",python,selection_mouse +7329,5404110,"genie.py",8137,0,"",python,selection_mouse +7330,5404242,"TERMINAL",0,0,"14",,terminal_output +7331,5404287,"genie.py",8135,4,"mask",python,selection_mouse +7332,5404545,"genie.py",8135,5,"mask[",python,selection_mouse +7333,5404546,"genie.py",8135,8,"mask[...",python,selection_mouse +7334,5404546,"genie.py",8135,10,"mask[..., ",python,selection_mouse +7335,5404546,"genie.py",8135,14,"mask[..., None",python,selection_mouse +7336,5404592,"genie.py",8135,15,"mask[..., None]",python,selection_mouse +7337,5404933,"genie.py",8149,0,"",python,selection_mouse +7338,5405285,"TERMINAL",0,0,"25",,terminal_output +7339,5406521,"TERMINAL",0,0,"36",,terminal_output +7340,5407362,"TERMINAL",0,0,"47",,terminal_output +7341,5408024,"TERMINAL",0,0,"SSIM: 0.3250141739845276\r\n",,terminal_output +7342,5408488,"TERMINAL",0,0,"58",,terminal_output +7343,5409502,"TERMINAL",0,0,"69",,terminal_output +7344,5410509,"TERMINAL",0,0,"750",,terminal_output +7345,5410521,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +7346,5411650,"TERMINAL",0,0,"81",,terminal_output +7347,5412675,"TERMINAL",0,0,"92",,terminal_output +7348,5413697,"TERMINAL",0,0,"503",,terminal_output +7349,5414723,"TERMINAL",0,0,"14",,terminal_output +7350,5415749,"TERMINAL",0,0,"25",,terminal_output +7351,5416781,"TERMINAL",0,0,"36",,terminal_output +7352,5417892,"TERMINAL",0,0,"47",,terminal_output +7353,5418928,"TERMINAL",0,0,"58",,terminal_output +7354,5419893,"TERMINAL",0,0,"69",,terminal_output +7355,5420944,"TERMINAL",0,0,"730:00",,terminal_output +7356,5421989,"TERMINAL",0,0,"81",,terminal_output +7357,5423040,"TERMINAL",0,0,"92",,terminal_output +7358,5424078,"TERMINAL",0,0,"6:003",,terminal_output +7359,5425168,"TERMINAL",0,0,"15",,terminal_output +7360,5426196,"TERMINAL",0,0,"36",,terminal_output +7361,5427212,"TERMINAL",0,0,"47",,terminal_output +7362,5428283,"TERMINAL",0,0,"58",,terminal_output +7363,5429366,"TERMINAL",0,0,"69",,terminal_output +7364,5430391,"TERMINAL",0,0,"710",,terminal_output +7365,5431415,"TERMINAL",0,0,"81",,terminal_output +7366,5432441,"TERMINAL",0,0,"92",,terminal_output +7367,5433563,"TERMINAL",0,0,"103",,terminal_output +7368,5434588,"TERMINAL",0,0,"14",,terminal_output +7369,5435612,"TERMINAL",0,0,"25",,terminal_output +7370,5436636,"TERMINAL",0,0,"36",,terminal_output +7371,5437766,"TERMINAL",0,0,"47",,terminal_output +7372,5438168,"genie.py",0,0,"",python,tab +7373,5438793,"TERMINAL",0,0,"58",,terminal_output +7374,5439976,"sample.py",0,0,"",python,tab +7375,5440037,"TERMINAL",0,0,"69",,terminal_output +7376,5440803,"TERMINAL",0,0,"720",,terminal_output +7377,5441849,"TERMINAL",0,0,"81",,terminal_output +7378,5442889,"TERMINAL",0,0,"92",,terminal_output +7379,5443356,"genie.py",0,0,"",python,tab +7380,5443979,"TERMINAL",0,0,"203",,terminal_output +7381,5444995,"TERMINAL",0,0,"14",,terminal_output +7382,5445442,"genie.py",3713,0,"",python,selection_mouse +7383,5446035,"TERMINAL",0,0,"25",,terminal_output +7384,5447430,"TERMINAL",0,0,"37",,terminal_output +7385,5448517,"TERMINAL",0,0,"58",,terminal_output +7386,5449546,"TERMINAL",0,0,"69",,terminal_output +7387,5450647,"TERMINAL",0,0,"730",,terminal_output +7388,5451690,"TERMINAL",0,0,"81",,terminal_output +7389,5452671,"TERMINAL",0,0,"92",,terminal_output +7390,5453738,"TERMINAL",0,0,"303",,terminal_output +7391,5454765,"TERMINAL",0,0,"14",,terminal_output +7392,5455890,"TERMINAL",0,0,"25",,terminal_output +7393,5456852,"TERMINAL",0,0,"36",,terminal_output +7394,5457939,"TERMINAL",0,0,"47",,terminal_output +7395,5458955,"TERMINAL",0,0,"58",,terminal_output +7396,5460004,"TERMINAL",0,0,"69",,terminal_output +7397,5461110,"TERMINAL",0,0,"740",,terminal_output +7398,5462101,"TERMINAL",0,0,"81",,terminal_output +7399,5463158,"TERMINAL",0,0,"403",,terminal_output +7400,5464285,"TERMINAL",0,0,"14",,terminal_output +7401,5465309,"TERMINAL",0,0,"25",,terminal_output +7402,5466333,"TERMINAL",0,0,"36",,terminal_output +7403,5467357,"TERMINAL",0,0,"47",,terminal_output +7404,5468484,"TERMINAL",0,0,"58",,terminal_output +7405,5469507,"TERMINAL",0,0,"69",,terminal_output +7406,5470531,"TERMINAL",0,0,"750",,terminal_output +7407,5471556,"TERMINAL",0,0,"81",,terminal_output +7408,5472580,"TERMINAL",0,0,"92",,terminal_output +7409,5473706,"TERMINAL",0,0,"503",,terminal_output +7410,5473961,"genie.py",3312,0,"",python,selection_mouse +7411,5474121,"genie.py",3309,5,"batch",python,selection_mouse +7412,5474730,"TERMINAL",0,0,"14",,terminal_output +7413,5475747,"TERMINAL",0,0,"25",,terminal_output +7414,5475839,"genie.py",3389,0,"",python,selection_mouse +7415,5476129,"genie.py",3389,2," (",python,selection_mouse +7416,5476129,"genie.py",3389,4," (B,",python,selection_mouse +7417,5476130,"genie.py",3389,45," (B, T, N)\n B, T, N = token_idxs.shape",python,selection_mouse +7418,5476754,"TERMINAL",0,0,"36",,terminal_output +7419,5477908,"TERMINAL",0,0,"47",,terminal_output +7420,5478927,"TERMINAL",0,0,"58",,terminal_output +7421,5479952,"TERMINAL",0,0,"69",,terminal_output +7422,5480940,"TERMINAL",0,0,"71:00",,terminal_output +7423,5481856,"genie.py",0,0,"",python,tab +7424,5481856,"genie.py",5234,0,"",python,selection_mouse +7425,5481930,"genie.py",5233,0,"",python,selection_command +7426,5482014,"TERMINAL",0,0,"81",,terminal_output +7427,5483025,"TERMINAL",0,0,"92",,terminal_output +7428,5484203,"TERMINAL",0,0,"7:003",,terminal_output +7429,5485168,"TERMINAL",0,0,"15",,terminal_output +7430,5485472,"genie.py",0,0,"",python,tab +7431,5485473,"genie.py",9835,0,"",python,selection_mouse +7432,5485536,"genie.py",9834,0,"",python,selection_command +7433,5486145,"genie.py",9805,0,"",python,selection_mouse +7434,5486219,"TERMINAL",0,0,"36",,terminal_output +7435,5487222,"TERMINAL",0,0,"47",,terminal_output +7436,5487792,"genie.py",9803,0,"",python,selection_mouse +7437,5487804,"genie.py",9802,0,"",python,selection_command +7438,5488255,"TERMINAL",0,0,"58",,terminal_output +7439,5489314,"TERMINAL",0,0,"69",,terminal_output +7440,5490547,"TERMINAL",0,0,"710",,terminal_output +7441,5491401,"TERMINAL",0,0,"81",,terminal_output +7442,5492434,"TERMINAL",0,0,"92",,terminal_output +7443,5493487,"TERMINAL",0,0,"103",,terminal_output +7444,5494536,"TERMINAL",0,0,"14",,terminal_output +7445,5495620,"TERMINAL",0,0,"25",,terminal_output +7446,5496644,"TERMINAL",0,0,"36",,terminal_output +7447,5497716,"TERMINAL",0,0,"47",,terminal_output +7448,5498724,"TERMINAL",0,0,"58",,terminal_output +7449,5499772,"TERMINAL",0,0,"69",,terminal_output +7450,5500841,"TERMINAL",0,0,"720",,terminal_output +7451,5501880,"TERMINAL",0,0,"81",,terminal_output +7452,5503016,"TERMINAL",0,0,"92",,terminal_output +7453,5503967,"TERMINAL",0,0,"203",,terminal_output +7454,5505004,"TERMINAL",0,0,"14",,terminal_output +7455,5506065,"TERMINAL",0,0,"25",,terminal_output +7456,5507104,"TERMINAL",0,0,"37",,terminal_output +7457,5508157,"TERMINAL",0,0,"58",,terminal_output +7458,5509326,"TERMINAL",0,0,"69",,terminal_output +7459,5510294,"TERMINAL",0,0,"730",,terminal_output +7460,5511285,"TERMINAL",0,0,"81",,terminal_output +7461,5512517,"TERMINAL",0,0,"92",,terminal_output +7462,5513381,"TERMINAL",0,0,"303",,terminal_output +7463,5514475,"TERMINAL",0,0,"14",,terminal_output +7464,5515537,"TERMINAL",0,0,"25",,terminal_output +7465,5516561,"TERMINAL",0,0,"36",,terminal_output +7466,5517582,"TERMINAL",0,0,"47",,terminal_output +7467,5518703,"TERMINAL",0,0,"58",,terminal_output +7468,5519743,"TERMINAL",0,0,"69",,terminal_output +7469,5520736,"TERMINAL",0,0,"740",,terminal_output +7470,5521810,"TERMINAL",0,0,"81",,terminal_output +7471,5522888,"TERMINAL",0,0,"92",,terminal_output +7472,5524028,"TERMINAL",0,0,"403",,terminal_output +7473,5524979,"TERMINAL",0,0,"14",,terminal_output +7474,5525983,"TERMINAL",0,0,"25",,terminal_output +7475,5527056,"TERMINAL",0,0,"36",,terminal_output +7476,5528100,"TERMINAL",0,0,"47",,terminal_output +7477,5529158,"TERMINAL",0,0,"59",,terminal_output +7478,5530219,"TERMINAL",0,0,"750",,terminal_output +7479,5531232,"TERMINAL",0,0,"81",,terminal_output +7480,5532309,"TERMINAL",0,0,"92",,terminal_output +7481,5533431,"TERMINAL",0,0,"503",,terminal_output +7482,5534469,"TERMINAL",0,0,"14",,terminal_output +7483,5535431,"TERMINAL",0,0,"25",,terminal_output +7484,5536596,"TERMINAL",0,0,"36",,terminal_output +7485,5537533,"TERMINAL",0,0,"47",,terminal_output +7486,5538636,"TERMINAL",0,0,"58",,terminal_output +7487,5539687,"TERMINAL",0,0,"69",,terminal_output +7488,5540701,"TERMINAL",0,0,"72:00",,terminal_output +7489,5541771,"TERMINAL",0,0,"81",,terminal_output +7490,5542785,"TERMINAL",0,0,"92",,terminal_output +7491,5543851,"TERMINAL",0,0,"8:003",,terminal_output +7492,5544864,"TERMINAL",0,0,"14",,terminal_output +7493,5545940,"TERMINAL",0,0,"25",,terminal_output +7494,5546938,"TERMINAL",0,0,"36",,terminal_output +7495,5547986,"TERMINAL",0,0,"47",,terminal_output +7496,5549103,"TERMINAL",0,0,"58",,terminal_output +7497,5550214,"TERMINAL",0,0,"69",,terminal_output +7498,5551107,"TERMINAL",0,0,"711",,terminal_output +7499,5552248,"TERMINAL",0,0,"92",,terminal_output +7500,5553319,"TERMINAL",0,0,"103",,terminal_output +7501,5554297,"TERMINAL",0,0,"14",,terminal_output +7502,5555320,"TERMINAL",0,0,"25",,terminal_output +7503,5556341,"TERMINAL",0,0,"36",,terminal_output +7504,5557384,"TERMINAL",0,0,"47",,terminal_output +7505,5558730,"TERMINAL",0,0,"58",,terminal_output +7506,5559629,"TERMINAL",0,0,"69",,terminal_output +7507,5560579,"TERMINAL",0,0,"720",,terminal_output +7508,5561624,"TERMINAL",0,0,"81",,terminal_output +7509,5562724,"TERMINAL",0,0,"92",,terminal_output +7510,5563832,"TERMINAL",0,0,"203",,terminal_output +7511,5565006,"TERMINAL",0,0,"14",,terminal_output +7512,5566036,"TERMINAL",0,0,"25",,terminal_output +7513,5567085,"TERMINAL",0,0,"36",,terminal_output +7514,5568162,"TERMINAL",0,0,"47",,terminal_output +7515,5568961,"TERMINAL",0,0,"58",,terminal_output +7516,5569996,"TERMINAL",0,0,"69",,terminal_output +7517,5571023,"TERMINAL",0,0,"730",,terminal_output +7518,5572103,"TERMINAL",0,0,"81",,terminal_output +7519,5573163,"TERMINAL",0,0,"93",,terminal_output +7520,5574261,"TERMINAL",0,0,"314",,terminal_output +7521,5575294,"TERMINAL",0,0,"25",,terminal_output +7522,5576317,"TERMINAL",0,0,"36",,terminal_output +7523,5577368,"TERMINAL",0,0,"47",,terminal_output +7524,5578422,"TERMINAL",0,0,"58",,terminal_output +7525,5579454,"TERMINAL",0,0,"69",,terminal_output +7526,5580546,"TERMINAL",0,0,"740",,terminal_output +7527,5581760,"TERMINAL",0,0,"81",,terminal_output +7528,5582617,"TERMINAL",0,0,"92",,terminal_output +7529,5583709,"TERMINAL",0,0,"403",,terminal_output +7530,5584696,"TERMINAL",0,0,"14",,terminal_output +7531,5585734,"TERMINAL",0,0,"25",,terminal_output +7532,5586860,"TERMINAL",0,0,"36",,terminal_output +7533,5587927,"TERMINAL",0,0,"47",,terminal_output +7534,5588865,"TERMINAL",0,0,"58",,terminal_output +7535,5589932,"TERMINAL",0,0,"69",,terminal_output +7536,5590947,"TERMINAL",0,0,"750",,terminal_output +7537,5592081,"TERMINAL",0,0,"81",,terminal_output +7538,5593024,"TERMINAL",0,0,"92",,terminal_output +7539,5594071,"TERMINAL",0,0,"503",,terminal_output +7540,5595153,"TERMINAL",0,0,"15",,terminal_output +7541,5596179,"TERMINAL",0,0,"36",,terminal_output +7542,5597306,"TERMINAL",0,0,"47",,terminal_output +7543,5598268,"TERMINAL",0,0,"58",,terminal_output +7544,5599486,"TERMINAL",0,0,"69",,terminal_output +7545,5600377,"TERMINAL",0,0,"73:00",,terminal_output +7546,5601416,"TERMINAL",0,0,"81",,terminal_output +7547,5602467,"TERMINAL",0,0,"92",,terminal_output +7548,5603581,"TERMINAL",0,0,"9:003",,terminal_output +7549,5604536,"TERMINAL",0,0,"14",,terminal_output +7550,5605594,"TERMINAL",0,0,"25",,terminal_output +7551,5606628,"TERMINAL",0,0,"36",,terminal_output +7552,5607680,"TERMINAL",0,0,"47",,terminal_output +7553,5608731,"TERMINAL",0,0,"58",,terminal_output +7554,5609821,"TERMINAL",0,0,"69",,terminal_output +7555,5610434,"sample.py",0,0,"",python,tab +7556,5610869,"TERMINAL",0,0,"710",,terminal_output +7557,5611945,"TERMINAL",0,0,"81",,terminal_output +7558,5612983,"TERMINAL",0,0,"92",,terminal_output +7559,5614112,"genie.py",0,0,"",python,tab +7560,5614249,"TERMINAL",0,0,"103",,terminal_output +7561,5615027,"TERMINAL",0,0,"14",,terminal_output +7562,5616158,"TERMINAL",0,0,"25",,terminal_output +7563,5617181,"TERMINAL",0,0,"36",,terminal_output +7564,5618194,"TERMINAL",0,0,"58",,terminal_output +7565,5619253,"TERMINAL",0,0,"69",,terminal_output +7566,5620254,"genie.py",5287,0,"",python,selection_mouse +7567,5620268,"TERMINAL",0,0,"720",,terminal_output +7568,5620507,"genie.py",5287,2,"T ",python,selection_mouse +7569,5620508,"genie.py",5287,4,"T +=",python,selection_mouse +7570,5620508,"genie.py",5287,5,"T +=1",python,selection_mouse +7571,5621368,"TERMINAL",0,0,"81",,terminal_output +7572,5621792,"genie.py",5289,0,"",python,selection_mouse +7573,5622171,"genie.py",5287,0,"",python,selection_mouse +7574,5622384,"TERMINAL",0,0,"92",,terminal_output +7575,5623411,"TERMINAL",0,0,"203",,terminal_output +7576,5624440,"TERMINAL",0,0,"14",,terminal_output +7577,5625567,"TERMINAL",0,0,"25",,terminal_output +7578,5626599,"TERMINAL",0,0,"36",,terminal_output +7579,5627616,"TERMINAL",0,0,"47",,terminal_output +7580,5628645,"TERMINAL",0,0,"58",,terminal_output +7581,5629691,"TERMINAL",0,0,"69",,terminal_output +7582,5630790,"TERMINAL",0,0,"730",,terminal_output +7583,5631815,"TERMINAL",0,0,"81",,terminal_output +7584,5632837,"TERMINAL",0,0,"92",,terminal_output +7585,5633964,"TERMINAL",0,0,"303",,terminal_output +7586,5634988,"TERMINAL",0,0,"14",,terminal_output +7587,5636012,"TERMINAL",0,0,"25",,terminal_output +7588,5637138,"TERMINAL",0,0,"36",,terminal_output +7589,5638164,"TERMINAL",0,0,"48",,terminal_output +7590,5639188,"TERMINAL",0,0,"69",,terminal_output +7591,5640661,"TERMINAL",0,0,"740",,terminal_output +7592,5641739,"TERMINAL",0,0,"81",,terminal_output +7593,5642736,"TERMINAL",0,0,"92",,terminal_output +7594,5643794,"TERMINAL",0,0,"403",,terminal_output +7595,5644921,"TERMINAL",0,0,"14",,terminal_output +7596,5645945,"TERMINAL",0,0,"25",,terminal_output +7597,5646969,"TERMINAL",0,0,"36",,terminal_output +7598,5647992,"TERMINAL",0,0,"47",,terminal_output +7599,5649012,"TERMINAL",0,0,"58",,terminal_output +7600,5650144,"TERMINAL",0,0,"69",,terminal_output +7601,5651167,"TERMINAL",0,0,"751",,terminal_output +7602,5652191,"TERMINAL",0,0,"92",,terminal_output +7603,5653256,"TERMINAL",0,0,"503",,terminal_output +7604,5654423,"TERMINAL",0,0,"14",,terminal_output +7605,5655365,"TERMINAL",0,0,"25",,terminal_output +7606,5656394,"TERMINAL",0,0,"36",,terminal_output +7607,5657449,"TERMINAL",0,0,"47",,terminal_output +7608,5658437,"TERMINAL",0,0,"58",,terminal_output +7609,5659495,"TERMINAL",0,0,"69",,terminal_output +7610,5660559,"TERMINAL",0,0,"74:00",,terminal_output +7611,5661567,"TERMINAL",0,0,"81",,terminal_output +7612,5662589,"TERMINAL",0,0,"92",,terminal_output +7613,5663702,"TERMINAL",0,0,"50:003",,terminal_output +7614,5664712,"TERMINAL",0,0,"14",,terminal_output +7615,5665719,"TERMINAL",0,0,"25",,terminal_output +7616,5666795,"TERMINAL",0,0,"36",,terminal_output +7617,5667857,"TERMINAL",0,0,"47",,terminal_output +7618,5668885,"TERMINAL",0,0,"58",,terminal_output +7619,5669934,"TERMINAL",0,0,"69",,terminal_output +7620,5670979,"TERMINAL",0,0,"710",,terminal_output +7621,5672044,"TERMINAL",0,0,"81",,terminal_output +7622,5673120,"TERMINAL",0,0,"92",,terminal_output +7623,5674149,"TERMINAL",0,0,"103",,terminal_output +7624,5675277,"TERMINAL",0,0,"15",,terminal_output +7625,5676184,"TERMINAL",0,0,"36",,terminal_output +7626,5677280,"TERMINAL",0,0,"47",,terminal_output +7627,5678358,"TERMINAL",0,0,"58",,terminal_output +7628,5679330,"TERMINAL",0,0,"69",,terminal_output +7629,5680374,"TERMINAL",0,0,"720",,terminal_output +7630,5681428,"TERMINAL",0,0,"81",,terminal_output +7631,5682506,"TERMINAL",0,0,"92",,terminal_output +7632,5683510,"TERMINAL",0,0,"203",,terminal_output +7633,5684653,"TERMINAL",0,0,"14",,terminal_output +7634,5685666,"TERMINAL",0,0,"25",,terminal_output +7635,5686700,"TERMINAL",0,0,"36",,terminal_output +7636,5687781,"TERMINAL",0,0,"47",,terminal_output +7637,5688768,"TERMINAL",0,0,"58",,terminal_output +7638,5689801,"TERMINAL",0,0,"69",,terminal_output +7639,5690866,"TERMINAL",0,0,"730",,terminal_output +7640,5691925,"TERMINAL",0,0,"81",,terminal_output +7641,5693006,"TERMINAL",0,0,"92",,terminal_output +7642,5694034,"TERMINAL",0,0,"303",,terminal_output +7643,5695031,"TERMINAL",0,0,"14",,terminal_output +7644,5696084,"TERMINAL",0,0,"25",,terminal_output +7645,5697170,"TERMINAL",0,0,"37",,terminal_output +7646,5698265,"TERMINAL",0,0,"58",,terminal_output +7647,5699221,"TERMINAL",0,0,"69",,terminal_output +7648,5700306,"TERMINAL",0,0,"740",,terminal_output +7649,5701555,"TERMINAL",0,0,"81",,terminal_output +7650,5702528,"TERMINAL",0,0,"92",,terminal_output +7651,5703506,"TERMINAL",0,0,"403",,terminal_output +7652,5704471,"TERMINAL",0,0,"14",,terminal_output +7653,5705527,"TERMINAL",0,0,"25",,terminal_output +7654,5706545,"TERMINAL",0,0,"36",,terminal_output +7655,5707605,"TERMINAL",0,0,"47",,terminal_output +7656,5708719,"TERMINAL",0,0,"58",,terminal_output +7657,5709685,"TERMINAL",0,0,"69",,terminal_output +7658,5710730,"TERMINAL",0,0,"750",,terminal_output +7659,5711807,"TERMINAL",0,0,"81",,terminal_output +7660,5712824,"TERMINAL",0,0,"92",,terminal_output +7661,5713983,"TERMINAL",0,0,"503",,terminal_output +7662,5714963,"TERMINAL",0,0,"14",,terminal_output +7663,5716010,"TERMINAL",0,0,"25",,terminal_output +7664,5717125,"TERMINAL",0,0,"36",,terminal_output +7665,5718080,"TERMINAL",0,0,"47",,terminal_output +7666,5719109,"TERMINAL",0,0,"59",,terminal_output +7667,5720159,"TERMINAL",0,0,"75:00",,terminal_output +7668,5721245,"TERMINAL",0,0,"81",,terminal_output +7669,5722278,"TERMINAL",0,0,"92",,terminal_output +7670,5723320,"TERMINAL",0,0,"1:003",,terminal_output +7671,5724328,"TERMINAL",0,0,"14",,terminal_output +7672,5725433,"TERMINAL",0,0,"25",,terminal_output +7673,5726453,"TERMINAL",0,0,"36",,terminal_output +7674,5727481,"TERMINAL",0,0,"47",,terminal_output +7675,5728583,"TERMINAL",0,0,"58",,terminal_output +7676,5729607,"TERMINAL",0,0,"69",,terminal_output +7677,5730631,"TERMINAL",0,0,"710",,terminal_output +7678,5731656,"TERMINAL",0,0,"81",,terminal_output +7679,5732705,"TERMINAL",0,0,"92",,terminal_output +7680,5733805,"TERMINAL",0,0,"103",,terminal_output +7681,5734834,"TERMINAL",0,0,"14",,terminal_output +7682,5735847,"TERMINAL",0,0,"25",,terminal_output +7683,5736986,"TERMINAL",0,0,"36",,terminal_output +7684,5737994,"TERMINAL",0,0,"47",,terminal_output +7685,5739028,"TERMINAL",0,0,"58",,terminal_output +7686,5740105,"TERMINAL",0,0,"69",,terminal_output +7687,5741178,"TERMINAL",0,0,"720",,terminal_output +7688,5742203,"TERMINAL",0,0,"82",,terminal_output +7689,5743226,"TERMINAL",0,0,"203",,terminal_output +7690,5744250,"TERMINAL",0,0,"14",,terminal_output +7691,5745285,"TERMINAL",0,0,"25",,terminal_output +7692,5746328,"TERMINAL",0,0,"36",,terminal_output +7693,5747374,"TERMINAL",0,0,"47",,terminal_output +7694,5748450,"TERMINAL",0,0,"58",,terminal_output +7695,5749472,"TERMINAL",0,0,"69",,terminal_output +7696,5750600,"TERMINAL",0,0,"730",,terminal_output +7697,5751623,"TERMINAL",0,0,"81",,terminal_output +7698,5752668,"TERMINAL",0,0,"92",,terminal_output +7699,5753671,"TERMINAL",0,0,"303",,terminal_output +7700,5755311,"TERMINAL",0,0,"15",,terminal_output +7701,5756311,"TERMINAL",0,0,"36",,terminal_output +7702,5757359,"TERMINAL",0,0,"47",,terminal_output +7703,5758401,"TERMINAL",0,0,"58",,terminal_output +7704,5759445,"TERMINAL",0,0,"69",,terminal_output +7705,5760533,"TERMINAL",0,0,"740",,terminal_output +7706,5761534,"TERMINAL",0,0,"81",,terminal_output +7707,5762588,"TERMINAL",0,0,"92",,terminal_output +7708,5763706,"TERMINAL",0,0,"403",,terminal_output +7709,5764732,"TERMINAL",0,0,"14",,terminal_output +7710,5765755,"TERMINAL",0,0,"25",,terminal_output +7711,5766881,"TERMINAL",0,0,"36",,terminal_output +7712,5767905,"TERMINAL",0,0,"47",,terminal_output +7713,5768865,"TERMINAL",0,0,"58",,terminal_output +7714,5769953,"TERMINAL",0,0,"69",,terminal_output +7715,5770948,"TERMINAL",0,0,"750",,terminal_output +7716,5772001,"TERMINAL",0,0,"81",,terminal_output +7717,5773035,"TERMINAL",0,0,"92",,terminal_output +7718,5774151,"TERMINAL",0,0,"503",,terminal_output +7719,5775176,"TERMINAL",0,0,"15",,terminal_output +7720,5776199,"TERMINAL",0,0,"36",,terminal_output +7721,5777224,"TERMINAL",0,0,"47",,terminal_output +7722,5778263,"TERMINAL",0,0,"58",,terminal_output +7723,5779375,"TERMINAL",0,0,"69",,terminal_output +7724,5780342,"TERMINAL",0,0,"76:00",,terminal_output +7725,5781392,"TERMINAL",0,0,"81",,terminal_output +7726,5782428,"TERMINAL",0,0,"92",,terminal_output +7727,5783572,"TERMINAL",0,0,"2:003",,terminal_output +7728,5784596,"TERMINAL",0,0,"14",,terminal_output +7729,5785572,"TERMINAL",0,0,"25",,terminal_output +7730,5786609,"TERMINAL",0,0,"36",,terminal_output +7731,5787670,"TERMINAL",0,0,"47",,terminal_output +7732,5788795,"TERMINAL",0,0,"58",,terminal_output +7733,5789819,"TERMINAL",0,0,"69",,terminal_output +7734,5790810,"TERMINAL",0,0,"710",,terminal_output +7735,5791891,"TERMINAL",0,0,"81",,terminal_output +7736,5792881,"TERMINAL",0,0,"92",,terminal_output +7737,5793934,"TERMINAL",0,0,"103",,terminal_output +7738,5795041,"TERMINAL",0,0,"14",,terminal_output +7739,5796067,"TERMINAL",0,0,"25",,terminal_output +7740,5797091,"TERMINAL",0,0,"36",,terminal_output +7741,5798217,"TERMINAL",0,0,"48",,terminal_output +7742,5799240,"TERMINAL",0,0,"69",,terminal_output +7743,5800202,"TERMINAL",0,0,"720",,terminal_output +7744,5801288,"TERMINAL",0,0,"81",,terminal_output +7745,5802291,"TERMINAL",0,0,"92",,terminal_output +7746,5803336,"TERMINAL",0,0,"203",,terminal_output +7747,5804384,"TERMINAL",0,0,"14",,terminal_output +7748,5805432,"TERMINAL",0,0,"25",,terminal_output +7749,5806510,"TERMINAL",0,0,"36",,terminal_output +7750,5807532,"TERMINAL",0,0,"47",,terminal_output +7751,5808661,"TERMINAL",0,0,"58",,terminal_output +7752,5809684,"TERMINAL",0,0,"69",,terminal_output +7753,5810708,"TERMINAL",0,0,"730",,terminal_output +7754,5811734,"TERMINAL",0,0,"81",,terminal_output +7755,5812757,"TERMINAL",0,0,"92",,terminal_output +7756,5813883,"TERMINAL",0,0,"303",,terminal_output +7757,5814908,"TERMINAL",0,0,"14",,terminal_output +7758,5815931,"TERMINAL",0,0,"25",,terminal_output +7759,5816955,"TERMINAL",0,0,"36",,terminal_output +7760,5817975,"TERMINAL",0,0,"47",,terminal_output +7761,5819106,"TERMINAL",0,0,"58",,terminal_output +7762,5820080,"TERMINAL",0,0,"69",,terminal_output +7763,5821153,"TERMINAL",0,0,"741",,terminal_output +7764,5822167,"TERMINAL",0,0,"92",,terminal_output +7765,5823283,"TERMINAL",0,0,"403",,terminal_output +7766,5824259,"TERMINAL",0,0,"14",,terminal_output +7767,5825328,"TERMINAL",0,0,"25",,terminal_output +7768,5826386,"TERMINAL",0,0,"36",,terminal_output +7769,5827504,"TERMINAL",0,0,"47",,terminal_output +7770,5828528,"TERMINAL",0,0,"58",,terminal_output +7771,5829495,"TERMINAL",0,0,"69",,terminal_output +7772,5830562,"TERMINAL",0,0,"750",,terminal_output +7773,5831606,"TERMINAL",0,0,"81",,terminal_output +7774,5832683,"TERMINAL",0,0,"92",,terminal_output +7775,5833721,"TERMINAL",0,0,"503",,terminal_output +7776,5834816,"TERMINAL",0,0,"14",,terminal_output +7777,5835767,"TERMINAL",0,0,"25",,terminal_output +7778,5836959,"TERMINAL",0,0,"36",,terminal_output +7779,5837873,"TERMINAL",0,0,"47",,terminal_output +7780,5838890,"TERMINAL",0,0,"58",,terminal_output +7781,5839952,"TERMINAL",0,0,"69",,terminal_output +7782,5840999,"TERMINAL",0,0,"77:00",,terminal_output +7783,5842070,"TERMINAL",0,0,"81",,terminal_output +7784,5843130,"TERMINAL",0,0,"92",,terminal_output +7785,5844156,"TERMINAL",0,0,"3:004",,terminal_output +7786,5845206,"TERMINAL",0,0,"25",,terminal_output +7787,5846280,"TERMINAL",0,0,"36",,terminal_output +7788,5847278,"TERMINAL",0,0,"47",,terminal_output +7789,5848348,"TERMINAL",0,0,"58",,terminal_output +7790,5849521,"TERMINAL",0,0,"69",,terminal_output +7791,5850462,"TERMINAL",0,0,"710",,terminal_output +7792,5851497,"TERMINAL",0,0,"81",,terminal_output +7793,5852514,"TERMINAL",0,0,"92",,terminal_output +7794,5853568,"TERMINAL",0,0,"103",,terminal_output +7795,5854595,"TERMINAL",0,0,"14",,terminal_output +7796,5855634,"TERMINAL",0,0,"25",,terminal_output +7797,5856709,"TERMINAL",0,0,"36",,terminal_output +7798,5857734,"TERMINAL",0,0,"47",,terminal_output +7799,5858759,"TERMINAL",0,0,"58",,terminal_output +7800,5859849,"TERMINAL",0,0,"69",,terminal_output +7801,5860891,"TERMINAL",0,0,"720",,terminal_output +7802,5861990,"TERMINAL",0,0,"81",,terminal_output +7803,5862980,"TERMINAL",0,0,"92",,terminal_output +7804,5864060,"TERMINAL",0,0,"203",,terminal_output +7805,5865084,"TERMINAL",0,0,"14",,terminal_output +7806,5866125,"TERMINAL",0,0,"25",,terminal_output +7807,5867142,"TERMINAL",0,0,"37",,terminal_output +7808,5868187,"TERMINAL",0,0,"58",,terminal_output +7809,5869283,"TERMINAL",0,0,"69",,terminal_output +7810,5870307,"TERMINAL",0,0,"730",,terminal_output +7811,5871420,"TERMINAL",0,0,"81",,terminal_output +7812,5872374,"TERMINAL",0,0,"92",,terminal_output +7813,5873482,"TERMINAL",0,0,"303",,terminal_output +7814,5874506,"TERMINAL",0,0,"14",,terminal_output +7815,5875530,"TERMINAL",0,0,"25",,terminal_output +7816,5876568,"TERMINAL",0,0,"36",,terminal_output +7817,5877604,"TERMINAL",0,0,"47",,terminal_output +7818,5878703,"TERMINAL",0,0,"58",,terminal_output +7819,5879699,"TERMINAL",0,0,"69",,terminal_output +7820,5880854,"TERMINAL",0,0,"740",,terminal_output +7821,5881981,"TERMINAL",0,0,"81",,terminal_output +7822,5883084,"TERMINAL",0,0,"92",,terminal_output +7823,5884012,"TERMINAL",0,0,"403",,terminal_output +7824,5885155,"TERMINAL",0,0,"14",,terminal_output +7825,5886180,"TERMINAL",0,0,"26",,terminal_output +7826,5887202,"TERMINAL",0,0,"47",,terminal_output +7827,5888201,"TERMINAL",0,0,"58",,terminal_output +7828,5889246,"TERMINAL",0,0,"69",,terminal_output +7829,5890377,"TERMINAL",0,0,"750",,terminal_output +7830,5891345,"TERMINAL",0,0,"81",,terminal_output +7831,5892426,"TERMINAL",0,0,"92",,terminal_output +7832,5893450,"TERMINAL",0,0,"503",,terminal_output +7833,5894577,"TERMINAL",0,0,"14",,terminal_output +7834,5895560,"TERMINAL",0,0,"25",,terminal_output +7835,5896605,"TERMINAL",0,0,"36",,terminal_output +7836,5897654,"TERMINAL",0,0,"47",,terminal_output +7837,5898690,"TERMINAL",0,0,"58",,terminal_output +7838,5899801,"TERMINAL",0,0,"69",,terminal_output +7839,5900825,"TERMINAL",0,0,"78:00",,terminal_output +7840,5901846,"TERMINAL",0,0,"81",,terminal_output +7841,5902876,"TERMINAL",0,0,"92",,terminal_output +7842,5903912,"TERMINAL",0,0,"4:003",,terminal_output +7843,5905021,"TERMINAL",0,0,"14",,terminal_output +7844,5906007,"TERMINAL",0,0,"25",,terminal_output +7845,5907068,"TERMINAL",0,0,"36",,terminal_output +7846,5908085,"TERMINAL",0,0,"47",,terminal_output +7847,5909218,"TERMINAL",0,0,"59",,terminal_output +7848,5910244,"TERMINAL",0,0,"710",,terminal_output +7849,5911268,"TERMINAL",0,0,"81",,terminal_output +7850,5912292,"TERMINAL",0,0,"92",,terminal_output +7851,5913312,"TERMINAL",0,0,"103",,terminal_output +7852,5914441,"TERMINAL",0,0,"14",,terminal_output +7853,5915483,"TERMINAL",0,0,"25",,terminal_output +7854,5916500,"TERMINAL",0,0,"36",,terminal_output +7855,5917523,"TERMINAL",0,0,"47",,terminal_output +7856,5918549,"TERMINAL",0,0,"58",,terminal_output +7857,5919609,"TERMINAL",0,0,"69",,terminal_output +7858,5920645,"TERMINAL",0,0,"720",,terminal_output +7859,5921716,"TERMINAL",0,0,"81",,terminal_output +7860,5922840,"TERMINAL",0,0,"92",,terminal_output +7861,5923793,"TERMINAL",0,0,"203",,terminal_output +7862,5924887,"TERMINAL",0,0,"14",,terminal_output +7863,5925885,"TERMINAL",0,0,"25",,terminal_output +7864,5926956,"TERMINAL",0,0,"36",,terminal_output +7865,5927986,"TERMINAL",0,0,"47",,terminal_output +7866,5929051,"TERMINAL",0,0,"58",,terminal_output +7867,5930070,"TERMINAL",0,0,"69",,terminal_output +7868,5931116,"TERMINAL",0,0,"731",,terminal_output +7869,5932260,"TERMINAL",0,0,"92",,terminal_output +7870,5933233,"TERMINAL",0,0,"303",,terminal_output +7871,5934272,"TERMINAL",0,0,"14",,terminal_output +7872,5935291,"TERMINAL",0,0,"25",,terminal_output +7873,5936473,"TERMINAL",0,0,"36",,terminal_output +7874,5937534,"TERMINAL",0,0,"47",,terminal_output +7875,5938504,"TERMINAL",0,0,"58",,terminal_output +7876,5939551,"TERMINAL",0,0,"69",,terminal_output +7877,5940563,"TERMINAL",0,0,"740",,terminal_output +7878,5941553,"TERMINAL",0,0,"81",,terminal_output +7879,5942713,"TERMINAL",0,0,"92",,terminal_output +7880,5943685,"TERMINAL",0,0,"403",,terminal_output +7881,5944775,"TERMINAL",0,0,"14",,terminal_output +7882,5945790,"TERMINAL",0,0,"25",,terminal_output +7883,5946803,"TERMINAL",0,0,"36",,terminal_output +7884,5947884,"TERMINAL",0,0,"47",,terminal_output +7885,5948960,"TERMINAL",0,0,"58",,terminal_output +7886,5949922,"TERMINAL",0,0,"69",,terminal_output +7887,5950981,"TERMINAL",0,0,"750",,terminal_output +7888,5952014,"TERMINAL",0,0,"81",,terminal_output +7889,5953186,"TERMINAL",0,0,"92",,terminal_output +7890,5954089,"TERMINAL",0,0,"503",,terminal_output +7891,5955227,"TERMINAL",0,0,"15",,terminal_output +7892,5956279,"TERMINAL",0,0,"36",,terminal_output +7893,5957233,"TERMINAL",0,0,"47",,terminal_output +7894,5958268,"TERMINAL",0,0,"58",,terminal_output +7895,5959317,"TERMINAL",0,0,"69",,terminal_output +7896,5960420,"TERMINAL",0,0,"79:00",,terminal_output +7897,5961469,"TERMINAL",0,0,"81",,terminal_output +7898,5962502,"TERMINAL",0,0,"92",,terminal_output +7899,5963528,"TERMINAL",0,0,"5:003",,terminal_output +7900,5964580,"TERMINAL",0,0,"14",,terminal_output +7901,5965611,"TERMINAL",0,0,"25",,terminal_output +7902,5966669,"TERMINAL",0,0,"36",,terminal_output +7903,5967711,"TERMINAL",0,0,"47",,terminal_output +7904,5968780,"TERMINAL",0,0,"58",,terminal_output +7905,5969812,"TERMINAL",0,0,"69",,terminal_output +7906,5970862,"TERMINAL",0,0,"710",,terminal_output +7907,5971899,"TERMINAL",0,0,"81",,terminal_output +7908,5973052,"TERMINAL",0,0,"92",,terminal_output +7909,5973973,"TERMINAL",0,0,"103",,terminal_output +7910,5975011,"TERMINAL",0,0,"14",,terminal_output +7911,5976064,"TERMINAL",0,0,"25",,terminal_output +7912,5977096,"TERMINAL",0,0,"36",,terminal_output +7913,5978241,"TERMINAL",0,0,"48",,terminal_output +7914,5979185,"TERMINAL",0,0,"69",,terminal_output +7915,5980287,"TERMINAL",0,0,"720",,terminal_output +7916,5981278,"TERMINAL",0,0,"81",,terminal_output +7917,5982333,"TERMINAL",0,0,"92",,terminal_output +7918,5983364,"TERMINAL",0,0,"203",,terminal_output +7919,5984485,"TERMINAL",0,0,"14",,terminal_output +7920,5985508,"TERMINAL",0,0,"25",,terminal_output +7921,5986580,"TERMINAL",0,0,"36",,terminal_output +7922,5987535,"TERMINAL",0,0,"47",,terminal_output +7923,5988684,"TERMINAL",0,0,"58",,terminal_output +7924,5989630,"TERMINAL",0,0,"69",,terminal_output +7925,5990730,"TERMINAL",0,0,"730",,terminal_output +7926,5991754,"TERMINAL",0,0,"81",,terminal_output +7927,5992775,"TERMINAL",0,0,"92",,terminal_output +7928,5993793,"TERMINAL",0,0,"303",,terminal_output +7929,5994837,"TERMINAL",0,0,"14",,terminal_output +7930,5995953,"TERMINAL",0,0,"25",,terminal_output +7931,5996978,"TERMINAL",0,0,"36",,terminal_output +7932,5997955,"TERMINAL",0,0,"47",,terminal_output +7933,5999000,"TERMINAL",0,0,"58",,terminal_output +7934,6000082,"TERMINAL",0,0,"69",,terminal_output +7935,6001082,"TERMINAL",0,0,"740",,terminal_output +7936,6002611,"TERMINAL",0,0,"82",,terminal_output +7937,6003572,"TERMINAL",0,0,"403",,terminal_output +7938,6004656,"TERMINAL",0,0,"14",,terminal_output +7939,6005691,"TERMINAL",0,0,"25",,terminal_output +7940,6006921,"TERMINAL",0,0,"36",,terminal_output +7941,6007832,"TERMINAL",0,0,"47",,terminal_output +7942,6008856,"TERMINAL",0,0,"58",,terminal_output +7943,6009818,"TERMINAL",0,0,"69",,terminal_output +7944,6010912,"TERMINAL",0,0,"750",,terminal_output +7945,6011921,"TERMINAL",0,0,"81",,terminal_output +7946,6012963,"TERMINAL",0,0,"92",,terminal_output +7947,6014008,"TERMINAL",0,0,"503",,terminal_output +7948,6015057,"TERMINAL",0,0,"14",,terminal_output +7949,6016126,"TERMINAL",0,0,"26",,terminal_output +7950,6017254,"TERMINAL",0,0,"47",,terminal_output +7951,6018277,"TERMINAL",0,0,"58",,terminal_output +7952,6019246,"TERMINAL",0,0,"69",,terminal_output +7953,6020325,"TERMINAL",0,0,"740:00",,terminal_output +7954,6021348,"TERMINAL",0,0,"81",,terminal_output +7955,6022476,"TERMINAL",0,0,"92",,terminal_output +7956,6023499,"TERMINAL",0,0,"6:003",,terminal_output +7957,6024523,"TERMINAL",0,0,"14",,terminal_output +7958,6025520,"TERMINAL",0,0,"25",,terminal_output +7959,6026573,"TERMINAL",0,0,"36",,terminal_output +7960,6027697,"TERMINAL",0,0,"47",,terminal_output +7961,6028724,"TERMINAL",0,0,"58",,terminal_output +7962,6029747,"TERMINAL",0,0,"69",,terminal_output +7963,6030769,"TERMINAL",0,0,"710",,terminal_output +7964,6031896,"TERMINAL",0,0,"81",,terminal_output +7965,6032877,"TERMINAL",0,0,"92",,terminal_output +7966,6033946,"TERMINAL",0,0,"103",,terminal_output +7967,6034964,"TERMINAL",0,0,"14",,terminal_output +7968,6036014,"TERMINAL",0,0,"25",,terminal_output +7969,6037052,"TERMINAL",0,0,"36",,terminal_output +7970,6038093,"TERMINAL",0,0,"47",,terminal_output +7971,6039167,"TERMINAL",0,0,"59",,terminal_output +7972,6040191,"TERMINAL",0,0,"720",,terminal_output +7973,6041317,"TERMINAL",0,0,"81",,terminal_output +7974,6042340,"TERMINAL",0,0,"92",,terminal_output +7975,6043348,"TERMINAL",0,0,"203",,terminal_output +7976,6044369,"TERMINAL",0,0,"14",,terminal_output +7977,6045515,"TERMINAL",0,0,"25",,terminal_output +7978,6046540,"TERMINAL",0,0,"36",,terminal_output +7979,6047564,"TERMINAL",0,0,"47",,terminal_output +7980,6048588,"TERMINAL",0,0,"58",,terminal_output +7981,6049611,"TERMINAL",0,0,"69",,terminal_output +7982,6050636,"TERMINAL",0,0,"730",,terminal_output +7983,6051761,"TERMINAL",0,0,"81",,terminal_output +7984,6052788,"TERMINAL",0,0,"92",,terminal_output +7985,6053809,"TERMINAL",0,0,"303",,terminal_output +7986,6054834,"TERMINAL",0,0,"14",,terminal_output +7987,6055858,"TERMINAL",0,0,"25",,terminal_output +7988,6056900,"TERMINAL",0,0,"36",,terminal_output +7989,6057979,"TERMINAL",0,0,"47",,terminal_output +7990,6058984,"TERMINAL",0,0,"58",,terminal_output +7991,6060027,"TERMINAL",0,0,"69",,terminal_output +7992,6061077,"TERMINAL",0,0,"740",,terminal_output +7993,6062121,"TERMINAL",0,0,"82",,terminal_output +7994,6063231,"TERMINAL",0,0,"403",,terminal_output +7995,6064254,"TERMINAL",0,0,"14",,terminal_output +7996,6065278,"TERMINAL",0,0,"25",,terminal_output +7997,6066330,"TERMINAL",0,0,"36",,terminal_output +7998,6067429,"TERMINAL",0,0,"47",,terminal_output +7999,6068427,"TERMINAL",0,0,"58",,terminal_output +8000,6069579,"TERMINAL",0,0,"69",,terminal_output +8001,6070604,"TERMINAL",0,0,"750",,terminal_output +8002,6071569,"TERMINAL",0,0,"81",,terminal_output +8003,6072652,"TERMINAL",0,0,"92",,terminal_output +8004,6073695,"TERMINAL",0,0,"503",,terminal_output +8005,6074707,"TERMINAL",0,0,"14",,terminal_output +8006,6075836,"TERMINAL",0,0,"25",,terminal_output +8007,6076852,"TERMINAL",0,0,"36",,terminal_output +8008,6077840,"TERMINAL",0,0,"47",,terminal_output +8009,6078899,"TERMINAL",0,0,"58",,terminal_output +8010,6079960,"TERMINAL",0,0,"69",,terminal_output +8011,6080994,"TERMINAL",0,0,"71:00",,terminal_output +8012,6082031,"TERMINAL",0,0,"81",,terminal_output +8013,6083074,"TERMINAL",0,0,"92",,terminal_output +8014,6084225,"TERMINAL",0,0,"7:004",,terminal_output +8015,6085172,"TERMINAL",0,0,"25",,terminal_output +8016,6086219,"TERMINAL",0,0,"36",,terminal_output +8017,6087295,"TERMINAL",0,0,"47",,terminal_output +8018,6088311,"TERMINAL",0,0,"58",,terminal_output +8019,6089510,"TERMINAL",0,0,"69",,terminal_output +8020,6090468,"TERMINAL",0,0,"710",,terminal_output +8021,6091493,"TERMINAL",0,0,"81",,terminal_output +8022,6092516,"TERMINAL",0,0,"92",,terminal_output +8023,6093646,"TERMINAL",0,0,"103",,terminal_output +8024,6094623,"TERMINAL",0,0,"14",,terminal_output +8025,6095639,"TERMINAL",0,0,"25",,terminal_output +8026,6096716,"TERMINAL",0,0,"36",,terminal_output +8027,6097713,"TERMINAL",0,0,"47",,terminal_output +8028,6098757,"TERMINAL",0,0,"58",,terminal_output +8029,6099866,"TERMINAL",0,0,"69",,terminal_output +8030,6100851,"TERMINAL",0,0,"720",,terminal_output +8031,6101942,"TERMINAL",0,0,"81",,terminal_output +8032,6102962,"TERMINAL",0,0,"92",,terminal_output +8033,6103996,"TERMINAL",0,0,"203",,terminal_output +8034,6105043,"TERMINAL",0,0,"14",,terminal_output +8035,6106097,"TERMINAL",0,0,"25",,terminal_output +8036,6107137,"TERMINAL",0,0,"37",,terminal_output +8037,6108286,"TERMINAL",0,0,"58",,terminal_output +8038,6109310,"TERMINAL",0,0,"69",,terminal_output +8039,6110335,"TERMINAL",0,0,"730",,terminal_output +8040,6111360,"TERMINAL",0,0,"81",,terminal_output +8041,6112431,"TERMINAL",0,0,"92",,terminal_output +8042,6113415,"TERMINAL",0,0,"303",,terminal_output +8043,6114509,"TERMINAL",0,0,"14",,terminal_output +8044,6115523,"TERMINAL",0,0,"25",,terminal_output +8045,6116554,"TERMINAL",0,0,"36",,terminal_output +8046,6117607,"TERMINAL",0,0,"47",,terminal_output +8047,6118785,"TERMINAL",0,0,"58",,terminal_output +8048,6119710,"TERMINAL",0,0,"69",,terminal_output +8049,6120787,"TERMINAL",0,0,"740",,terminal_output +8050,6121814,"TERMINAL",0,0,"81",,terminal_output +8051,6123084,"TERMINAL",0,0,"92",,terminal_output +8052,6124135,"TERMINAL",0,0,"404",,terminal_output +8053,6125166,"TERMINAL",0,0,"25",,terminal_output +8054,6126530,"TERMINAL",0,0,"36",,terminal_output +8055,6127548,"TERMINAL",0,0,"47",,terminal_output +8056,6128597,"TERMINAL",0,0,"58",,terminal_output +8057,6129580,"TERMINAL",0,0,"69",,terminal_output +8058,6130611,"TERMINAL",0,0,"750",,terminal_output +8059,6131710,"TERMINAL",0,0,"81",,terminal_output +8060,6132862,"TERMINAL",0,0,"92",,terminal_output +8061,6133815,"TERMINAL",0,0,"503",,terminal_output +8062,6134851,"TERMINAL",0,0,"14",,terminal_output +8063,6135968,"TERMINAL",0,0,"25",,terminal_output +8064,6136968,"TERMINAL",0,0,"36",,terminal_output +8065,6137982,"TERMINAL",0,0,"47",,terminal_output +8066,6138981,"TERMINAL",0,0,"58",,terminal_output +8067,6140069,"TERMINAL",0,0,"69",,terminal_output +8068,6141165,"TERMINAL",0,0,"72:00",,terminal_output +8069,6142101,"TERMINAL",0,0,"81",,terminal_output +8070,6143151,"TERMINAL",0,0,"8:003",,terminal_output +8071,6144190,"TERMINAL",0,0,"14",,terminal_output +8072,6145232,"TERMINAL",0,0,"25",,terminal_output +8073,6146271,"TERMINAL",0,0,"36",,terminal_output +8074,6147405,"TERMINAL",0,0,"47",,terminal_output +8075,6148440,"TERMINAL",0,0,"58",,terminal_output +8076,6149452,"TERMINAL",0,0,"69",,terminal_output +8077,6150477,"TERMINAL",0,0,"710",,terminal_output +8078,6151493,"TERMINAL",0,0,"81",,terminal_output +8079,6152536,"TERMINAL",0,0,"92",,terminal_output +8080,6153651,"TERMINAL",0,0,"103",,terminal_output +8081,6154675,"TERMINAL",0,0,"14",,terminal_output +8082,6155698,"TERMINAL",0,0,"25",,terminal_output +8083,6156825,"TERMINAL",0,0,"36",,terminal_output +8084,6157850,"TERMINAL",0,0,"47",,terminal_output +8085,6158875,"TERMINAL",0,0,"58",,terminal_output +8086,6159862,"TERMINAL",0,0,"69",,terminal_output +8087,6160921,"TERMINAL",0,0,"720",,terminal_output +8088,6162063,"TERMINAL",0,0,"81",,terminal_output +8089,6162997,"TERMINAL",0,0,"92",,terminal_output +8090,6164096,"TERMINAL",0,0,"203",,terminal_output +8091,6165121,"TERMINAL",0,0,"14",,terminal_output +8092,6166148,"TERMINAL",0,0,"26",,terminal_output +8093,6167181,"TERMINAL",0,0,"47",,terminal_output +8094,6168251,"TERMINAL",0,0,"58",,terminal_output +8095,6169270,"TERMINAL",0,0,"69",,terminal_output +8096,6170342,"TERMINAL",0,0,"730",,terminal_output +8097,6171469,"TERMINAL",0,0,"81",,terminal_output +8098,6172408,"TERMINAL",0,0,"92",,terminal_output +8099,6173517,"TERMINAL",0,0,"303",,terminal_output +8100,6174541,"TERMINAL",0,0,"14",,terminal_output +8101,6175565,"TERMINAL",0,0,"25",,terminal_output +8102,6176567,"TERMINAL",0,0,"36",,terminal_output +8103,6177715,"TERMINAL",0,0,"47",,terminal_output +8104,6178740,"TERMINAL",0,0,"58",,terminal_output +8105,6179764,"TERMINAL",0,0,"69",,terminal_output +8106,6180801,"TERMINAL",0,0,"740",,terminal_output +8107,6181811,"TERMINAL",0,0,"81",,terminal_output +8108,6182831,"TERMINAL",0,0,"92",,terminal_output +8109,6183963,"TERMINAL",0,0,"403",,terminal_output +8110,6184985,"TERMINAL",0,0,"14",,terminal_output +8111,6186012,"TERMINAL",0,0,"25",,terminal_output +8112,6187107,"TERMINAL",0,0,"36",,terminal_output +8113,6188102,"TERMINAL",0,0,"47",,terminal_output +8114,6189080,"TERMINAL",0,0,"58",,terminal_output +8115,6189874,"genie.py",3818,0,"",python,selection_mouse +8116,6190130,"TERMINAL",0,0,"650",,terminal_output +8117,6190605,"genie.py",3822,0,"",python,selection_mouse +8118,6190608,"genie.py",3821,0,"",python,selection_command +8119,6191174,"TERMINAL",0,0,"81",,terminal_output +8120,6191484,"genie.py",3802,0,"",python,selection_mouse +8121,6191495,"genie.py",3801,0,"",python,selection_command +8122,6192218,"TERMINAL",0,0,"92",,terminal_output +8123,6192545,"genie.py",3802,0,"\n ",python,content +8124,6192929,"genie.py",3811,0,"i",python,content +8125,6192930,"genie.py",3812,0,"",python,selection_keyboard +8126,6193112,"genie.py",3812,0,"n",python,content +8127,6193113,"genie.py",3813,0,"",python,selection_keyboard +8128,6193190,"genie.py",3813,0,"i",python,content +8129,6193191,"genie.py",3814,0,"",python,selection_keyboard +8130,6193314,"genie.py",3814,0,"t",python,content +8131,6193316,"genie.py",3815,0,"",python,selection_keyboard +8132,6193317,"TERMINAL",0,0,"503",,terminal_output +8133,6193455,"genie.py",3815,0,"i",python,content +8134,6193457,"genie.py",3816,0,"",python,selection_keyboard +8135,6193983,"genie.py",3816,0,"a",python,content +8136,6193983,"genie.py",3817,0,"",python,selection_keyboard +8137,6194090,"genie.py",3817,0,"l",python,content +8138,6194090,"genie.py",3818,0,"",python,selection_keyboard +8139,6194311,"TERMINAL",0,0,"14",,terminal_output +8140,6194373,"genie.py",3818,0,"_",python,content +8141,6194374,"genie.py",3819,0,"",python,selection_keyboard +8142,6194898,"genie.py",3819,0,"T",python,content +8143,6194898,"genie.py",3820,0,"",python,selection_keyboard +8144,6195220,"genie.py",3820,0," ",python,content +8145,6195220,"genie.py",3821,0,"",python,selection_keyboard +8146,6195326,"genie.py",3821,0,"=",python,content +8147,6195326,"genie.py",3822,0,"",python,selection_keyboard +8148,6195430,"genie.py",3822,0," ",python,content +8149,6195430,"genie.py",3823,0,"",python,selection_keyboard +8150,6195430,"TERMINAL",0,0,"25",,terminal_output +8151,6195692,"genie.py",3823,0,"T",python,content +8152,6195693,"genie.py",3824,0,"",python,selection_keyboard +8153,6196242,"genie.py",3823,0,"",python,selection_command +8154,6196405,"genie.py",3843,0,"",python,selection_command +8155,6196480,"TERMINAL",0,0,"36",,terminal_output +8156,6196751,"genie.py",3842,0,"",python,selection_command +8157,6196905,"genie.py",3841,0,"",python,selection_command +8158,6197025,"genie.py",3840,0,"",python,selection_command +8159,6197187,"genie.py",3839,0,"",python,selection_command +8160,6197307,"genie.py",3838,0,"",python,selection_command +8161,6197429,"genie.py",3837,0,"",python,selection_command +8162,6197462,"TERMINAL",0,0,"47",,terminal_output +8163,6197606,"genie.py",3836,0,"",python,selection_command +8164,6197733,"genie.py",3835,0,"",python,selection_command +8165,6197885,"genie.py",3834,0,"",python,selection_command +8166,6198049,"genie.py",3833,0,"",python,selection_command +8167,6198366,"genie.py",3833,5,"",python,content +8168,6198502,"TERMINAL",0,0,"58",,terminal_output +8169,6198705,"genie.py",3833,1,"",python,content +8170,6198932,"genie.py",3833,1,"",python,content +8171,6199374,"genie.py",3833,1,"",python,content +8172,6199542,"TERMINAL",0,0,"69",,terminal_output +8173,6199752,"genie.py",3833,1,"",python,content +8174,6200122,"genie.py",3833,2,"",python,content +8175,6200185,"genie.py",3832,0,"",python,selection_command +8176,6200634,"genie.py",3833,0,"",python,selection_command +8177,6200655,"TERMINAL",0,0,"73:00",,terminal_output +8178,6201122,"genie.py",3833,0,"f",python,content +8179,6201123,"genie.py",3834,0,"",python,selection_keyboard +8180,6201207,"genie.py",3834,0,"p",python,content +8181,6201208,"genie.py",3835,0,"",python,selection_keyboard +8182,6201308,"genie.py",3835,0,"r",python,content +8183,6201310,"genie.py",3836,0,"",python,selection_keyboard +8184,6201445,"genie.py",3836,0," ",python,content +8185,6201446,"genie.py",3837,0,"",python,selection_keyboard +8186,6201627,"TERMINAL",0,0,"81",,terminal_output +8187,6202138,"genie.py",3836,1,"",python,content +8188,6202276,"genie.py",3835,1,"",python,content +8189,6202400,"genie.py",3834,1,"",python,content +8190,6202574,"genie.py",3834,0,"p",python,content +8191,6202575,"genie.py",3835,0,"",python,selection_keyboard +8192,6202674,"genie.py",3835,0,"r",python,content +8193,6202675,"genie.py",3836,0,"",python,selection_keyboard +8194,6202683,"TERMINAL",0,0,"92",,terminal_output +8195,6203073,"genie.py",3835,1,"",python,content +8196,6203198,"genie.py",3834,1,"",python,content +8197,6203432,"genie.py",3834,0,"o",python,content +8198,6203433,"genie.py",3835,0,"",python,selection_keyboard +8199,6203461,"genie.py",3835,0,"r",python,content +8200,6203462,"genie.py",3836,0,"",python,selection_keyboard +8201,6203559,"genie.py",3836,0," ",python,content +8202,6203560,"genie.py",3837,0,"",python,selection_keyboard +8203,6203778,"TERMINAL",0,0,"9:003",,terminal_output +8204,6203845,"genie.py",3837,0,"Z",python,content +8205,6203846,"genie.py",3838,0,"",python,selection_keyboard +8206,6204024,"genie.py",3838,0," ",python,content +8207,6204025,"genie.py",3839,0,"",python,selection_keyboard +8208,6204145,"genie.py",3839,0,"i",python,content +8209,6204146,"genie.py",3840,0,"",python,selection_keyboard +8210,6204253,"genie.py",3840,0,"n",python,content +8211,6204253,"genie.py",3841,0,"",python,selection_keyboard +8212,6204530,"genie.py",3840,1,"",python,content +8213,6204790,"genie.py",3839,1,"",python,content +8214,6204918,"genie.py",3838,1,"",python,content +8215,6204981,"genie.py",3837,1,"",python,content +8216,6204996,"TERMINAL",0,0,"14",,terminal_output +8217,6205728,"genie.py",3837,0,"T",python,content +8218,6205729,"genie.py",3838,0,"",python,selection_keyboard +8219,6205805,"genie.py",3838,0," ",python,content +8220,6205806,"genie.py",3839,0,"",python,selection_keyboard +8221,6205846,"TERMINAL",0,0,"25",,terminal_output +8222,6205920,"genie.py",3839,0,"i",python,content +8223,6205921,"genie.py",3840,0,"",python,selection_keyboard +8224,6206079,"genie.py",3840,0,"n",python,content +8225,6206080,"genie.py",3841,0,"",python,selection_keyboard +8226,6206108,"genie.py",3841,0," ",python,content +8227,6206108,"genie.py",3842,0,"",python,selection_keyboard +8228,6206410,"genie.py",3842,0,"r",python,content +8229,6206411,"genie.py",3843,0,"",python,selection_keyboard +8230,6206692,"genie.py",3843,0,"a",python,content +8231,6206693,"genie.py",3844,0,"",python,selection_keyboard +8232,6206757,"genie.py",3844,0,"n",python,content +8233,6206758,"genie.py",3845,0,"",python,selection_keyboard +8234,6206926,"genie.py",3845,0,"g",python,content +8235,6206927,"genie.py",3846,0,"",python,selection_keyboard +8236,6207017,"TERMINAL",0,0,"36",,terminal_output +8237,6207296,"genie.py",3846,0,"e",python,content +8238,6207297,"genie.py",3847,0,"",python,selection_keyboard +8239,6207919,"genie.py",3847,0,"()",python,content +8240,6207920,"genie.py",3848,0,"",python,selection_keyboard +8241,6207970,"TERMINAL",0,0,"47",,terminal_output +8242,6208402,"genie.py",3848,0,"T",python,content +8243,6208403,"genie.py",3849,0,"",python,selection_keyboard +8244,6208728,"genie.py",3849,0,",",python,content +8245,6208729,"genie.py",3850,0,"",python,selection_keyboard +8246,6208808,"genie.py",3850,0," ",python,content +8247,6208809,"genie.py",3851,0,"",python,selection_keyboard +8248,6208904,"genie.py",3851,0,"S",python,content +8249,6208905,"genie.py",3852,0,"",python,selection_keyboard +8250,6208995,"TERMINAL",0,0,"58",,terminal_output +8251,6209433,"genie.py",3851,0,"",python,selection_command +8252,6210010,"TERMINAL",0,0,"69",,terminal_output +8253,6210175,"genie.py",3852,0,"",python,selection_command +8254,6210392,"genie.py",3852,0,":",python,content +8255,6210392,"genie.py",3853,0,"",python,selection_keyboard +8256,6210940,"genie.py",3852,1,"",python,content +8257,6211116,"TERMINAL",0,0,"710",,terminal_output +8258,6211431,"genie.py",3851,0,"",python,selection_command +8259,6211905,"genie.py",3852,0,"",python,selection_command +8260,6212167,"genie.py",3852,0,"_",python,content +8261,6212168,"genie.py",3853,0,"",python,selection_keyboard +8262,6212187,"TERMINAL",0,0,"81",,terminal_output +8263,6212672,"genie.py",3852,1,"",python,content +8264,6212842,"genie.py",3851,0,"",python,selection_command +8265,6213169,"TERMINAL",0,0,"103",,terminal_output +8266,6213346,"genie.py",3852,0,"",python,selection_command +8267,6213618,"genie.py",3852,0,":",python,content +8268,6213619,"genie.py",3853,0,"",python,selection_keyboard +8269,6213961,"genie.py",3852,0,"",python,selection_command +8270,6214185,"TERMINAL",0,0,"14",,terminal_output +8271,6214282,"genie.py",3851,0,"",python,selection_command +8272,6215093,"genie.py",3852,0,"",python,selection_command +8273,6215234,"TERMINAL",0,0,"25",,terminal_output +8274,6215909,"genie.py",3851,1,"",python,content +8275,6216307,"TERMINAL",0,0,"36",,terminal_output +8276,6216517,"genie.py",3853,0,"",python,selection_command +8277,6216764,"genie.py",3853,0,":",python,content +8278,6216764,"genie.py",3854,0,"",python,selection_keyboard +8279,6216919,"genie.py",3853,0,"",python,selection_command +8280,6217312,"TERMINAL",0,0,"47",,terminal_output +8281,6218392,"TERMINAL",0,0,"58",,terminal_output +8282,6218866,"genie.py",3852,0,"",python,selection_command +8283,6218991,"genie.py",3851,0,"",python,selection_command +8284,6219401,"TERMINAL",0,0,"69",,terminal_output +8285,6220518,"TERMINAL",0,0,"720",,terminal_output +8286,6220973,"genie.py",3851,1,"S",python,content +8287,6221543,"TERMINAL",0,0,"81",,terminal_output +8288,6222555,"TERMINAL",0,0,"92",,terminal_output +8289,6223619,"TERMINAL",0,0,"203",,terminal_output +8290,6224511,"genie.py",3849,0,"",python,selection_mouse +8291,6224648,"TERMINAL",0,0,"14",,terminal_output +8292,6225533,"genie.py",3848,1,"",python,content +8293,6225670,"TERMINAL",0,0,"25",,terminal_output +8294,6225758,"genie.py",3848,0,"i",python,content +8295,6225759,"genie.py",3849,0,"",python,selection_keyboard +8296,6225859,"genie.py",3849,0,"n",python,content +8297,6225861,"genie.py",3850,0,"",python,selection_keyboard +8298,6226764,"TERMINAL",0,0,"36",,terminal_output +8299,6227388,"genie.py",3848,2,"initial_T",python,content +8300,6227777,"TERMINAL",0,0,"47",,terminal_output +8301,6228835,"TERMINAL",0,0,"58",,terminal_output +8302,6229943,"TERMINAL",0,0,"69",,terminal_output +8303,6230962,"TERMINAL",0,0,"730",,terminal_output +8304,6231709,"genie.py",5329,0,"",python,selection_mouse +8305,6231871,"genie.py",5328,1," ",python,selection_mouse +8306,6232006,"genie.py",5315,18," T +=1\n",python,selection_mouse +8307,6232038,"TERMINAL",0,0,"81",,terminal_output +8308,6232500,"genie.py",5315,18,"",python,content +8309,6233032,"TERMINAL",0,0,"92",,terminal_output +8310,6234082,"TERMINAL",0,0,"303",,terminal_output +8311,6235162,"TERMINAL",0,0,"14",,terminal_output +8312,6236159,"TERMINAL",0,0,"26",,terminal_output +8313,6237185,"TERMINAL",0,0,"47",,terminal_output +8314,6237274,"genie.py",5375,0,"",python,selection_mouse +8315,6237928,"genie.py",5315,0,"",python,selection_mouse +8316,6238253,"TERMINAL",0,0,"58",,terminal_output +8317,6238758,"genie.py",5429,0,"",python,selection_mouse +8318,6239289,"TERMINAL",0,0,"69",,terminal_output +8319,6239670,"genie.py",5325,0,"",python,selection_mouse +8320,6239820,"genie.py",5324,16,"new_frame_pixels",python,selection_mouse +8321,6240057,"genie.py",5324,52,"new_frame_pixels = self.tokenizer.decode(\n ",python,selection_mouse +8322,6240057,"genie.py",5324,78,"new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n ",python,selection_mouse +8323,6240058,"genie.py",5324,124,"new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )",python,selection_mouse +8324,6240130,"genie.py",5324,156,"new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels",python,selection_mouse +8325,6240321,"TERMINAL",0,0,"740",,terminal_output +8326,6240546,"genie.py",5467,0,"",python,selection_mouse +8327,6241410,"TERMINAL",0,0,"81",,terminal_output +8328,6242416,"genie.py",5295,0,"",python,selection_mouse +8329,6242477,"TERMINAL",0,0,"92",,terminal_output +8330,6242583,"genie.py",5287,10,"token_idxs",python,selection_mouse +8331,6243944,"TERMINAL",0,0,"403",,terminal_output +8332,6245041,"TERMINAL",0,0,"14",,terminal_output +8333,6246119,"TERMINAL",0,0,"25",,terminal_output +8334,6247102,"TERMINAL",0,0,"36",,terminal_output +8335,6248179,"TERMINAL",0,0,"48",,terminal_output +8336,6249192,"TERMINAL",0,0,"69",,terminal_output +8337,6250221,"TERMINAL",0,0,"750",,terminal_output +8338,6251267,"TERMINAL",0,0,"81",,terminal_output +8339,6252317,"TERMINAL",0,0,"92",,terminal_output +8340,6253365,"TERMINAL",0,0,"503",,terminal_output +8341,6254524,"TERMINAL",0,0,"14",,terminal_output +8342,6255541,"TERMINAL",0,0,"25",,terminal_output +8343,6256505,"TERMINAL",0,0,"36",,terminal_output +8344,6257592,"TERMINAL",0,0,"47",,terminal_output +8345,6258607,"TERMINAL",0,0,"58",,terminal_output +8346,6259738,"TERMINAL",0,0,"69",,terminal_output +8347,6260765,"TERMINAL",0,0,"74:00",,terminal_output +8348,6261751,"TERMINAL",0,0,"81",,terminal_output +8349,6262798,"TERMINAL",0,0,"92",,terminal_output +8350,6263844,"TERMINAL",0,0,"8:00:003",,terminal_output +8351,6264963,"TERMINAL",0,0,"14",,terminal_output +8352,6265947,"TERMINAL",0,0,"25",,terminal_output +8353,6266987,"TERMINAL",0,0,"36",,terminal_output +8354,6268034,"TERMINAL",0,0,"47",,terminal_output +8355,6269189,"TERMINAL",0,0,"58",,terminal_output +8356,6270184,"TERMINAL",0,0,"610",,terminal_output +8357,6271210,"TERMINAL",0,0,"81",,terminal_output +8358,6272218,"TERMINAL",0,0,"92",,terminal_output +8359,6273270,"TERMINAL",0,0,"103",,terminal_output +8360,6274312,"TERMINAL",0,0,"14",,terminal_output +8361,6275405,"TERMINAL",0,0,"25",,terminal_output +8362,6276421,"TERMINAL",0,0,"36",,terminal_output +8363,6277454,"TERMINAL",0,0,"47",,terminal_output +8364,6278580,"TERMINAL",0,0,"58",,terminal_output +8365,6279604,"TERMINAL",0,0,"69",,terminal_output +8366,6280629,"TERMINAL",0,0,"720",,terminal_output +8367,6281655,"TERMINAL",0,0,"81",,terminal_output +8368,6282779,"TERMINAL",0,0,"92",,terminal_output +8369,6283806,"TERMINAL",0,0,"203",,terminal_output +8370,6284828,"TERMINAL",0,0,"14",,terminal_output +8371,6285849,"TERMINAL",0,0,"25",,terminal_output +8372,6286979,"TERMINAL",0,0,"36",,terminal_output +8373,6287983,"TERMINAL",0,0,"47",,terminal_output +8374,6289025,"TERMINAL",0,0,"58",,terminal_output +8375,6290049,"TERMINAL",0,0,"69",,terminal_output +8376,6291186,"TERMINAL",0,0,"730",,terminal_output +8377,6292200,"TERMINAL",0,0,"82",,terminal_output +8378,6293204,"TERMINAL",0,0,"303",,terminal_output +8379,6294269,"TERMINAL",0,0,"14",,terminal_output +8380,6295282,"TERMINAL",0,0,"25",,terminal_output +8381,6296335,"TERMINAL",0,0,"36",,terminal_output +8382,6297422,"TERMINAL",0,0,"47",,terminal_output +8383,6298456,"TERMINAL",0,0,"58",,terminal_output +8384,6299573,"TERMINAL",0,0,"69",,terminal_output +8385,6300606,"TERMINAL",0,0,"740",,terminal_output +8386,6301620,"TERMINAL",0,0,"81",,terminal_output +8387,6302647,"TERMINAL",0,0,"92",,terminal_output +8388,6303014,"genie.py",4393,0,"",python,selection_mouse +8389,6303264,"genie.py",4331,62," # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8390,6303267,"genie.py",4240,153,"ne, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8391,6303267,"genie.py",4229,164,"range(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8392,6303299,"genie.py",4160,233,"is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8393,6303343,"genie.py",4219,174,"k = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8394,6303370,"genie.py",4216,177,"mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8395,6303399,"genie.py",4214,179," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8396,6303432,"genie.py",4212,181," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8397,6303462,"genie.py",4211,182," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8398,6303486,"genie.py",4209,184," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8399,6303591,"genie.py",4208,185," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8400,6303660,"genie.py",4207,186," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8401,6303711,"genie.py",4205,188," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8402,6303746,"genie.py",4204,189," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)",python,selection_mouse +8403,6303836,"TERMINAL",0,0,"403",,terminal_output +8404,6304696,"TERMINAL",0,0,"14",,terminal_output +8405,6305224,"genie.py",4129,0,"",python,selection_mouse +8406,6305820,"TERMINAL",0,0,"25",,terminal_output +8407,6306026,"genie.py",4351,0,"",python,selection_mouse +8408,6306299,"genie.py",4346,5,"S, N)",python,selection_mouse +8409,6306299,"genie.py",4351,42,"\n init_mask = mask.astype(bool)",python,selection_mouse +8410,6306300,"genie.py",4351,13,"\n ",python,selection_mouse +8411,6306300,"genie.py",4351,1,"\n",python,selection_mouse +8412,6306510,"genie.py",4278,73," mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)",python,selection_mouse +8413,6306574,"genie.py",4204,147," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)",python,selection_mouse +8414,6306789,"TERMINAL",0,0,"36",,terminal_output +8415,6307169,"genie.py",4204,147,"",python,content +8416,6307354,"genie.py",4204,0," mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,content +8417,6307909,"TERMINAL",0,0,"47",,terminal_output +8418,6308833,"genie.py",4282,3,"ue)",python,selection_mouse +8419,6308834,"genie.py",4272,13," :].set(True)",python,selection_mouse +8420,6308834,"genie.py",4263,22,".at[:, T, :].set(True)",python,selection_mouse +8421,6308927,"genie.py",4214,71," jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8422,6308928,"genie.py",4211,74,"k = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8423,6308948,"genie.py",4210,75,"sk = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8424,6308949,"TERMINAL",0,0,"58",,terminal_output +8425,6309197,"genie.py",4209,76,"ask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8426,6309197,"genie.py",4208,77,"mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8427,6309197,"genie.py",4207,78," mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8428,6309264,"genie.py",4206,79," mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8429,6309299,"genie.py",4205,80," mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8430,6309369,"genie.py",4204,81," mask = jnp.zeros((B, S, N), dtype=bool)\n mask = mask.at[:, T, :].set(True)",python,selection_mouse +8431,6309791,"genie.py",4248,4," ",python,content +8432,6309792,"genie.py",4204,4," ",python,content +8433,6309931,"TERMINAL",0,0,"69",,terminal_output +8434,6310226,"genie.py",4252,8," ",python,content +8435,6310227,"genie.py",4204,8," ",python,content +8436,6310973,"TERMINAL",0,0,"750",,terminal_output +8437,6311994,"genie.py",4270,0,"",python,selection_mouse +8438,6311994,"genie.py",4268,4,"mask",python,selection_mouse +8439,6312059,"TERMINAL",0,0,"81",,terminal_output +8440,6313071,"TERMINAL",0,0,"92",,terminal_output +8441,6314114,"TERMINAL",0,0,"504",,terminal_output +8442,6314858,"genie.py",4269,0,"",python,selection_mouse +8443,6315156,"TERMINAL",0,0,"25",,terminal_output +8444,6315761,"genie.py",4268,0,"",python,selection_command +8445,6316265,"TERMINAL",0,0,"36",,terminal_output +8446,6317315,"TERMINAL",0,0,"47",,terminal_output +8447,6317759,"genie.py",4268,0,"i",python,content +8448,6317760,"genie.py",4269,0,"",python,selection_keyboard +8449,6317907,"genie.py",4269,0,"n",python,content +8450,6317908,"genie.py",4270,0,"",python,selection_keyboard +8451,6318038,"genie.py",4270,0,"i",python,content +8452,6318039,"genie.py",4271,0,"",python,selection_keyboard +8453,6318092,"genie.py",4271,0,"t",python,content +8454,6318093,"genie.py",4272,0,"",python,selection_keyboard +8455,6318314,"genie.py",4272,0,"_",python,content +8456,6318315,"genie.py",4273,0,"",python,selection_keyboard +8457,6318316,"TERMINAL",0,0,"58",,terminal_output +8458,6319344,"TERMINAL",0,0,"69",,terminal_output +8459,6319726,"genie.py",4272,0,"",python,selection_command +8460,6320082,"genie.py",4220,0,"",python,selection_command +8461,6320464,"TERMINAL",0,0,"75:00",,terminal_output +8462,6321299,"genie.py",4272,0,"",python,selection_command +8463,6321436,"genie.py",4323,0,"",python,selection_command +8464,6321449,"TERMINAL",0,0,"81",,terminal_output +8465,6321767,"genie.py",4307,42,"",python,content +8466,6322492,"TERMINAL",0,0,"92",,terminal_output +8467,6322837,"genie.py",4306,0,"",python,selection_mouse +8468,6322868,"genie.py",4305,0,"",python,selection_command +8469,6323544,"TERMINAL",0,0,"1:003",,terminal_output +8470,6323956,"genie.py",4221,0,"",python,selection_mouse +8471,6324472,"genie.py",4234,0,"",python,selection_mouse +8472,6324591,"TERMINAL",0,0,"14",,terminal_output +8473,6325705,"TERMINAL",0,0,"25",,terminal_output +8474,6326710,"TERMINAL",0,0,"36",,terminal_output +8475,6327733,"TERMINAL",0,0,"47",,terminal_output +8476,6328083,"genie.py",4255,0,"",python,selection_mouse +8477,6328105,"genie.py",4254,0,"",python,selection_command +8478,6328785,"TERMINAL",0,0,"58",,terminal_output +8479,6329577,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +8480,6329801,"TERMINAL",0,0,"69",,terminal_output +8481,6330570,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8482,6330907,"TERMINAL",0,0,"710",,terminal_output +8483,6331014,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +8484,6331932,"TERMINAL",0,0,"81",,terminal_output +8485,6332983,"TERMINAL",0,0,"92",,terminal_output +8486,6334082,"TERMINAL",0,0,"103",,terminal_output +8487,6335109,"TERMINAL",0,0,"14",,terminal_output +8488,6336133,"TERMINAL",0,0,"25",,terminal_output +8489,6337153,"TERMINAL",0,0,"37",,terminal_output +8490,6337565,"TERMINAL",0,0,"2025-07-03 18:01:14.402494: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8491,6338282,"TERMINAL",0,0,"58",,terminal_output +8492,6339229,"TERMINAL",0,0,"69",,terminal_output +8493,6340255,"TERMINAL",0,0,"720",,terminal_output +8494,6341316,"TERMINAL",0,0,"81",,terminal_output +8495,6342355,"TERMINAL",0,0,"92",,terminal_output +8496,6343405,"TERMINAL",0,0,"203",,terminal_output +8497,6344527,"TERMINAL",0,0,"14",,terminal_output +8498,6345550,"TERMINAL",0,0,"25",,terminal_output +8499,6346575,"TERMINAL",0,0,"36",,terminal_output +8500,6347598,"TERMINAL",0,0,"47",,terminal_output +8501,6348666,"TERMINAL",0,0,"58",,terminal_output +8502,6349748,"TERMINAL",0,0,"69",,terminal_output +8503,6350482,"TERMINAL",0,0,"2025-07-03 18:01:27.382233: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8504,6350711,"TERMINAL",0,0,"730",,terminal_output +8505,6351762,"TERMINAL",0,0,"81",,terminal_output +8506,6352810,"TERMINAL",0,0,"92",,terminal_output +8507,6353851,"TERMINAL",0,0,"303",,terminal_output +8508,6354973,"TERMINAL",0,0,"14",,terminal_output +8509,6355996,"TERMINAL",0,0,"25",,terminal_output +8510,6356973,"TERMINAL",0,0,"36",,terminal_output +8511,6358013,"TERMINAL",0,0,"47",,terminal_output +8512,6359068,"TERMINAL",0,0,"58",,terminal_output +8513,6360103,"TERMINAL",0,0,"69",,terminal_output +8514,6361147,"TERMINAL",0,0,"841",,terminal_output +8515,6362193,"TERMINAL",0,0,"92",,terminal_output +8516,6363268,"TERMINAL",0,0,"403",,terminal_output +8517,6364458,"TERMINAL",0,0,"14",,terminal_output +8518,6365519,"TERMINAL",0,0,"25",,terminal_output +8519,6366435,"TERMINAL",0,0,"2025-07-03 18:01:43.337769: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8520,6366549,"TERMINAL",0,0,"36",,terminal_output +8521,6367669,"TERMINAL",0,0,"47",,terminal_output +8522,6368636,"TERMINAL",0,0,"58",,terminal_output +8523,6369677,"TERMINAL",0,0,"69",,terminal_output +8524,6370728,"TERMINAL",0,0,"750",,terminal_output +8525,6371766,"TERMINAL",0,0,"81",,terminal_output +8526,6372832,"TERMINAL",0,0,"92",,terminal_output +8527,6373506,"TERMINAL",0,0,"2025-07-03 18:01:50.392839: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8528,6373916,"TERMINAL",0,0,"503",,terminal_output +8529,6374917,"TERMINAL",0,0,"14",,terminal_output +8530,6375965,"TERMINAL",0,0,"25",,terminal_output +8531,6376993,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8532,6377010,"TERMINAL",0,0,"36",,terminal_output +8533,6378054,"TERMINAL",0,0,"47",,terminal_output +8534,6379104,"TERMINAL",0,0,"59",,terminal_output +8535,6380147,"TERMINAL",0,0,"76:00",,terminal_output +8536,6381189,"TERMINAL",0,0,"81",,terminal_output +8537,6382313,"TERMINAL",0,0,"92",,terminal_output +8538,6383278,"TERMINAL",0,0,"2:003",,terminal_output +8539,6384357,"TERMINAL",0,0,"14",,terminal_output +8540,6384839,"TERMINAL",0,0,"2025-07-03 18:02:01.735679: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8541,6385379,"TERMINAL",0,0,"25",,terminal_output +8542,6386426,"TERMINAL",0,0,"36",,terminal_output +8543,6387480,"TERMINAL",0,0,"47",,terminal_output +8544,6387997,"TERMINAL",0,0,"2025-07-03 18:02:04.865510: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8545,6388595,"TERMINAL",0,0,"58",,terminal_output +8546,6389576,"TERMINAL",0,0,"69",,terminal_output +8547,6390709,"TERMINAL",0,0,"710",,terminal_output +8548,6391727,"TERMINAL",0,0,"81",,terminal_output +8549,6392757,"TERMINAL",0,0,"92",,terminal_output +8550,6393749,"TERMINAL",0,0,"103",,terminal_output +8551,6394806,"TERMINAL",0,0,"14",,terminal_output +8552,6395846,"TERMINAL",0,0,"25",,terminal_output +8553,6395956,"TERMINAL",0,0,"2025-07-03 18:02:12.814525: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8554,6396901,"TERMINAL",0,0,"36",,terminal_output +8555,6397518,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +8556,6397801,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8557,6397980,"TERMINAL",0,0,"47",,terminal_output +8558,6399005,"TERMINAL",0,0,"58",,terminal_output +8559,6399239,"TERMINAL",0,0,"2025-07-03 18:02:16.138797: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8560,6400131,"TERMINAL",0,0,"69",,terminal_output +8561,6401155,"TERMINAL",0,0,"720",,terminal_output +8562,6402179,"TERMINAL",0,0,"82",,terminal_output +8563,6403202,"TERMINAL",0,0,"203",,terminal_output +8564,6404228,"TERMINAL",0,0,"14",,terminal_output +8565,6405355,"TERMINAL",0,0,"25",,terminal_output +8566,6405466,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8567,6406322,"TERMINAL",0,0,"36",,terminal_output +8568,6407443,"TERMINAL",0,0,"47",,terminal_output +8569,6408416,"TERMINAL",0,0,"58",,terminal_output +8570,6408614,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8571,6409457,"TERMINAL",0,0,"69",,terminal_output +8572,6410576,"TERMINAL",0,0,"730",,terminal_output +8573,6411549,"TERMINAL",0,0,"81",,terminal_output +8574,6411814,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8575,6412602,"TERMINAL",0,0,"92",,terminal_output +8576,6413650,"TERMINAL",0,0,"303",,terminal_output +8577,6414775,"TERMINAL",0,0,"14",,terminal_output +8578,6414929,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8579,6415798,"TERMINAL",0,0,"25",,terminal_output +8580,6416792,"TERMINAL",0,0,"36",,terminal_output +8581,6417847,"TERMINAL",0,0,"47",,terminal_output +8582,6418063,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8583,6418973,"TERMINAL",0,0,"58",,terminal_output +8584,6419946,"TERMINAL",0,0,"69",,terminal_output +8585,6421032,"TERMINAL",0,0,"740",,terminal_output +8586,6421330,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8587,6422025,"TERMINAL",0,0,"81",,terminal_output +8588,6423071,"TERMINAL",0,0,"92",,terminal_output +8589,6424195,"TERMINAL",0,0,"404",,terminal_output +8590,6424506,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8591,6425163,"TERMINAL",0,0,"25",,terminal_output +8592,6426253,"TERMINAL",0,0,"36",,terminal_output +8593,6427270,"TERMINAL",0,0,"47",,terminal_output +8594,6427678,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8595,6428312,"TERMINAL",0,0,"58",,terminal_output +8596,6429358,"TERMINAL",0,0,"69",,terminal_output +8597,6430401,"TERMINAL",0,0,"750",,terminal_output +8598,6430851,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8599,6431444,"TERMINAL",0,0,"81",,terminal_output +8600,6432592,"TERMINAL",0,0,"92",,terminal_output +8601,6433617,"TERMINAL",0,0,"503",,terminal_output +8602,6433924,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8603,6434642,"TERMINAL",0,0,"14",,terminal_output +8604,6435665,"TERMINAL",0,0,"25",,terminal_output +8605,6436792,"TERMINAL",0,0,"36",,terminal_output +8606,6437025,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8607,6437814,"TERMINAL",0,0,"47",,terminal_output +8608,6438784,"TERMINAL",0,0,"58",,terminal_output +8609,6439863,"TERMINAL",0,0,"69",,terminal_output +8610,6440170,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8611,6440887,"TERMINAL",0,0,"77:00",,terminal_output +8612,6442012,"TERMINAL",0,0,"81",,terminal_output +8613,6442983,"TERMINAL",0,0,"92",,terminal_output +8614,6443281,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8615,6444066,"TERMINAL",0,0,"3:003",,terminal_output +8616,6445085,"TERMINAL",0,0,"14",,terminal_output +8617,6446108,"TERMINAL",0,0,"26",,terminal_output +8618,6446621,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +8619,6447235,"TERMINAL",0,0,"47",,terminal_output +8620,6448273,"TERMINAL",0,0,"58",,terminal_output +8621,6449249,"TERMINAL",0,0,"69",,terminal_output +8622,6450317,"TERMINAL",0,0,"710",,terminal_output +8623,6451344,"TERMINAL",0,0,"81",,terminal_output +8624,6452391,"TERMINAL",0,0,"92",,terminal_output +8625,6453482,"TERMINAL",0,0,"103",,terminal_output +8626,6454506,"TERMINAL",0,0,"14",,terminal_output +8627,6455628,"TERMINAL",0,0,"25",,terminal_output +8628,6456187,"TERMINAL",0,0,"SSIM: 0.3250141739845276\r\n",,terminal_output +8629,6456656,"TERMINAL",0,0,"36",,terminal_output +8630,6457681,"TERMINAL",0,0,"47",,terminal_output +8631,6458881,"TERMINAL",0,0,"58",,terminal_output +8632,6458893,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +8633,6459745,"TERMINAL",0,0,"69",,terminal_output +8634,6460793,"TERMINAL",0,0,"720",,terminal_output +8635,6461866,"TERMINAL",0,0,"81",,terminal_output +8636,6462885,"TERMINAL",0,0,"92",,terminal_output +8637,6463935,"TERMINAL",0,0,"203",,terminal_output +8638,6465055,"TERMINAL",0,0,"14",,terminal_output +8639,6466078,"TERMINAL",0,0,"25",,terminal_output +8640,6467101,"TERMINAL",0,0,"36",,terminal_output +8641,6468227,"TERMINAL",0,0,"48",,terminal_output +8642,6469270,"TERMINAL",0,0,"69",,terminal_output +8643,6470275,"TERMINAL",0,0,"730",,terminal_output +8644,6471301,"TERMINAL",0,0,"81",,terminal_output +8645,6472308,"TERMINAL",0,0,"92",,terminal_output +8646,6473355,"TERMINAL",0,0,"303",,terminal_output +8647,6474402,"TERMINAL",0,0,"14",,terminal_output +8648,6475503,"TERMINAL",0,0,"25",,terminal_output +8649,6476497,"TERMINAL",0,0,"36",,terminal_output +8650,6477649,"TERMINAL",0,0,"47",,terminal_output +8651,6478672,"TERMINAL",0,0,"58",,terminal_output +8652,6480282,"TERMINAL",0,0,"69",,terminal_output +8653,6481170,"TERMINAL",0,0,"740",,terminal_output +8654,6482154,"TERMINAL",0,0,"82",,terminal_output +8655,6482474,"genie.py",0,0,"",python,tab +8656,6482475,"genie.py",8072,0,"",python,selection_mouse +8657,6482629,"genie.py",8070,4,"mask",python,selection_mouse +8658,6483149,"TERMINAL",0,0,"403",,terminal_output +8659,6484309,"TERMINAL",0,0,"14",,terminal_output +8660,6484513,"genie.py",8536,0,"",python,selection_mouse +8661,6485344,"TERMINAL",0,0,"25",,terminal_output +8662,6485481,"genie.py",9132,0,"",python,selection_mouse +8663,6485626,"genie.py",9132,4,"mask",python,selection_mouse +8664,6486477,"TERMINAL",0,0,"36",,terminal_output +8665,6487339,"TERMINAL",0,0,"47",,terminal_output +8666,6488388,"genie.py",9242,0,"",python,selection_mouse +8667,6488430,"TERMINAL",0,0,"58",,terminal_output +8668,6488944,"genie.py",9202,0,"",python,selection_mouse +8669,6489096,"genie.py",9198,5,"where",python,selection_mouse +8670,6489427,"TERMINAL",0,0,"69",,terminal_output +8671,6490445,"genie.py",9626,0,"",python,selection_mouse +8672,6490584,"genie.py",9625,4,"mask",python,selection_mouse +8673,6490620,"TERMINAL",0,0,"750",,terminal_output +8674,6491577,"TERMINAL",0,0,"81",,terminal_output +8675,6492602,"TERMINAL",0,0,"92",,terminal_output +8676,6493623,"TERMINAL",0,0,"503",,terminal_output +8677,6494654,"TERMINAL",0,0,"14",,terminal_output +8678,6495774,"TERMINAL",0,0,"25",,terminal_output +8679,6496796,"TERMINAL",0,0,"36",,terminal_output +8680,6497824,"TERMINAL",0,0,"47",,terminal_output +8681,6498887,"TERMINAL",0,0,"58",,terminal_output +8682,6499972,"TERMINAL",0,0,"69",,terminal_output +8683,6501098,"TERMINAL",0,0,"78:00",,terminal_output +8684,6501994,"TERMINAL",0,0,"81",,terminal_output +8685,6503035,"TERMINAL",0,0,"92",,terminal_output +8686,6503188,"genie.py",4307,0,"",python,selection_mouse +8687,6503792,"genie.py",4290,0,"",python,selection_mouse +8688,6504112,"TERMINAL",0,0,"4:003",,terminal_output +8689,6504802,"genie.py",4303,0,"",python,selection_mouse +8690,6505197,"TERMINAL",0,0,"15",,terminal_output +8691,6506199,"TERMINAL",0,0,"36",,terminal_output +8692,6506689,"genie.py",4306,0,"\n init_mask = mask.astype(bool)",python,content +8693,6506731,"genie.py",4319,0,"",python,selection_command +8694,6507229,"TERMINAL",0,0,"47",,terminal_output +8695,6508299,"TERMINAL",0,0,"58",,terminal_output +8696,6509201,"genie.py",4318,0,"",python,selection_command +8697,6509319,"TERMINAL",0,0,"69",,terminal_output +8698,6510264,"genie.py",4307,42,"",python,content +8699,6510430,"genie.py",4256,0,"",python,selection_command +8700,6510510,"TERMINAL",0,0,"710",,terminal_output +8701,6510985,"genie.py",4306,0,"\n ",python,content +8702,6511423,"TERMINAL",0,0,"81",,terminal_output +8703,6511621,"genie.py",4319,0,"b",python,content +8704,6511622,"genie.py",4320,0,"",python,selection_keyboard +8705,6511672,"genie.py",4320,0,"r",python,content +8706,6511673,"genie.py",4321,0,"",python,selection_keyboard +8707,6511920,"genie.py",4321,0,"e",python,content +8708,6511921,"genie.py",4322,0,"",python,selection_keyboard +8709,6512058,"genie.py",4322,0,"a",python,content +8710,6512059,"genie.py",4323,0,"",python,selection_keyboard +8711,6512143,"genie.py",4323,0,"k",python,content +8712,6512143,"genie.py",4324,0,"",python,selection_keyboard +8713,6512509,"TERMINAL",0,0,"92",,terminal_output +8714,6512932,"genie.py",4319,5,"breakpoint",python,content +8715,6513508,"TERMINAL",0,0,"103",,terminal_output +8716,6513682,"genie.py",4329,0,"()",python,content +8717,6513683,"genie.py",4330,0,"",python,selection_keyboard +8718,6513801,"genie.py",4330,1,")",python,content +8719,6513801,"genie.py",4331,0,"",python,selection_keyboard +8720,6513886,"genie.py",4330,0,"",python,selection_command +8721,6514560,"TERMINAL",0,0,"14",,terminal_output +8722,6515640,"TERMINAL",0,0,"25",,terminal_output +8723,6516639,"TERMINAL",0,0,"36",,terminal_output +8724,6517239,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +8725,6517406,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8726,6517524,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +8727,6517689,"TERMINAL",0,0,"47",,terminal_output +8728,6518815,"TERMINAL",0,0,"58",,terminal_output +8729,6519782,"TERMINAL",0,0,"69",,terminal_output +8730,6520862,"TERMINAL",0,0,"2025-07-03 18:04:17.686111: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8731,6520872,"TERMINAL",0,0,"720",,terminal_output +8732,6521867,"TERMINAL",0,0,"81",,terminal_output +8733,6522914,"TERMINAL",0,0,"92",,terminal_output +8734,6524037,"TERMINAL",0,0,"203",,terminal_output +8735,6525061,"TERMINAL",0,0,"14",,terminal_output +8736,6526085,"TERMINAL",0,0,"25",,terminal_output +8737,6527109,"TERMINAL",0,0,"36",,terminal_output +8738,6528135,"TERMINAL",0,0,"48",,terminal_output +8739,6529320,"TERMINAL",0,0,"69",,terminal_output +8740,6530292,"TERMINAL",0,0,"730",,terminal_output +8741,6531310,"TERMINAL",0,0,"81",,terminal_output +8742,6532332,"TERMINAL",0,0,"92",,terminal_output +8743,6533374,"TERMINAL",0,0,"303",,terminal_output +8744,6533664,"TERMINAL",0,0,"2025-07-03 18:04:30.522473: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8745,6534420,"TERMINAL",0,0,"14",,terminal_output +8746,6535467,"TERMINAL",0,0,"25",,terminal_output +8747,6536514,"TERMINAL",0,0,"36",,terminal_output +8748,6537657,"TERMINAL",0,0,"47",,terminal_output +8749,6538682,"TERMINAL",0,0,"58",,terminal_output +8750,6539705,"TERMINAL",0,0,"69",,terminal_output +8751,6540727,"TERMINAL",0,0,"740",,terminal_output +8752,6541854,"TERMINAL",0,0,"81",,terminal_output +8753,6542820,"TERMINAL",0,0,"92",,terminal_output +8754,6543839,"TERMINAL",0,0,"403",,terminal_output +8755,6544901,"TERMINAL",0,0,"14",,terminal_output +8756,6545926,"TERMINAL",0,0,"25",,terminal_output +8757,6547012,"TERMINAL",0,0,"36",,terminal_output +8758,6548014,"TERMINAL",0,0,"47",,terminal_output +8759,6549124,"TERMINAL",0,0,"58",,terminal_output +8760,6549638,"TERMINAL",0,0,"2025-07-03 18:04:46.446293: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8761,6550148,"TERMINAL",0,0,"69",,terminal_output +8762,6551172,"TERMINAL",0,0,"851",,terminal_output +8763,6552200,"TERMINAL",0,0,"92",,terminal_output +8764,6553246,"TERMINAL",0,0,"503",,terminal_output +8765,6554347,"TERMINAL",0,0,"14",,terminal_output +8766,6555337,"TERMINAL",0,0,"25",,terminal_output +8767,6556396,"TERMINAL",0,0,"36",,terminal_output +8768,6556813,"TERMINAL",0,0,"2025-07-03 18:04:53.639073: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8769,6557429,"TERMINAL",0,0,"47",,terminal_output +8770,6558546,"TERMINAL",0,0,"58",,terminal_output +8771,6559570,"TERMINAL",0,0,"69",,terminal_output +8772,6560306,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8773,6560596,"TERMINAL",0,0,"79:00",,terminal_output +8774,6561610,"TERMINAL",0,0,"81",,terminal_output +8775,6562745,"TERMINAL",0,0,"92",,terminal_output +8776,6563719,"TERMINAL",0,0,"5:003",,terminal_output +8777,6564740,"TERMINAL",0,0,"14",,terminal_output +8778,6565782,"TERMINAL",0,0,"25",,terminal_output +8779,6566829,"TERMINAL",0,0,"36",,terminal_output +8780,6567695,"TERMINAL",0,0,"2025-07-03 18:05:04.584899: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8781,6567876,"TERMINAL",0,0,"47",,terminal_output +8782,6568954,"TERMINAL",0,0,"58",,terminal_output +8783,6570014,"TERMINAL",0,0,"69",,terminal_output +8784,6570664,"TERMINAL",0,0,"2025-07-03 18:05:07.565297: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8785,6571073,"TERMINAL",0,0,"710",,terminal_output +8786,6572062,"TERMINAL",0,0,"81",,terminal_output +8787,6573093,"TERMINAL",0,0,"92",,terminal_output +8788,6574133,"TERMINAL",0,0,"104",,terminal_output +8789,6575238,"TERMINAL",0,0,"25",,terminal_output +8790,6576222,"TERMINAL",0,0,"36",,terminal_output +8791,6577284,"TERMINAL",0,0,"47",,terminal_output +8792,6578322,"TERMINAL",0,0,"58",,terminal_output +8793,6578718,"TERMINAL",0,0,"2025-07-03 18:05:15.517896: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8794,6579353,"TERMINAL",0,0,"69",,terminal_output +8795,6580254,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +8796,6580398,"TERMINAL",0,0,"720",,terminal_output +8797,6580518,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(129)sample_mihir()\r\n-> assert init_mask.shape == (B, S, N), ""Wrong mask shape""\r\n(Pdb) ",,terminal_output +8798,6581444,"TERMINAL",0,0,"81",,terminal_output +8799,6582494,"TERMINAL",0,0,"92",,terminal_output +8800,6583634,"TERMINAL",0,0,"203",,terminal_output +8801,6584582,"TERMINAL",0,0,"14",,terminal_output +8802,6585627,"TERMINAL",0,0,"25",,terminal_output +8803,6586675,"TERMINAL",0,0,"36",,terminal_output +8804,6587726,"TERMINAL",0,0,"47",,terminal_output +8805,6588856,"TERMINAL",0,0,"58",,terminal_output +8806,6589819,"TERMINAL",0,0,"69",,terminal_output +8807,6590913,"TERMINAL",0,0,"730",,terminal_output +8808,6591928,"TERMINAL",0,0,"81",,terminal_output +8809,6592980,"TERMINAL",0,0,"92",,terminal_output +8810,6594007,"TERMINAL",0,0,"303",,terminal_output +8811,6595063,"TERMINAL",0,0,"14",,terminal_output +8812,6596134,"TERMINAL",0,0,"26",,terminal_output +8813,6597253,"TERMINAL",0,0,"47",,terminal_output +8814,6598290,"TERMINAL",0,0,"58",,terminal_output +8815,6599322,"TERMINAL",0,0,"69",,terminal_output +8816,6600341,"TERMINAL",0,0,"740",,terminal_output +8817,6601350,"TERMINAL",0,0,"81",,terminal_output +8818,6602385,"TERMINAL",0,0,"92",,terminal_output +8819,6602944,"TERMINAL",0,0,"i",,terminal_output +8820,6603004,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +8821,6603149,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +8822,6603210,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +8823,6603430,"TERMINAL",0,0,"403",,terminal_output +8824,6604473,"TERMINAL",0,0,"14",,terminal_output +8825,6605509,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +8826,6605519,"TERMINAL",0,0,"25",,terminal_output +8827,6605812,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +8828,6605932,"TERMINAL",0,0,"[?25la[?25h[?25ls[?25h",,terminal_output +8829,6606067,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +8830,6606571,"TERMINAL",0,0,"36",,terminal_output +8831,6607223,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +8832,6607644,"TERMINAL",0,0,"47",,terminal_output +8833,6608527,"TERMINAL",0,0,"[?25l:[?25h",,terminal_output +8834,6608654,"TERMINAL",0,0,"58",,terminal_output +8835,6609098,"TERMINAL",0,0,"[?25l:\r[?25h",,terminal_output +8836,6609762,"TERMINAL",0,0,"69",,terminal_output +8837,6610137,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +8838,6610835,"TERMINAL",0,0,"750",,terminal_output +8839,6611352,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +8840,6611666,"TERMINAL",0,0,"[?25l:[?25h",,terminal_output +8841,6611784,"TERMINAL",0,0,"81",,terminal_output +8842,6612434,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +8843,6612692,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +8844,6612830,"TERMINAL",0,0,"92",,terminal_output +8845,6612986,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +8846,6613170,"TERMINAL",0,0,"\r\n",,terminal_output +8847,6613232,"TERMINAL",0,0,"Array([False, True, False, False, False, False, False, False, False,\r\n False, False, False, False, False, False, False], dtype=bool)\r\n(Pdb) ",,terminal_output +8848,6613922,"TERMINAL",0,0,"503",,terminal_output +8849,6614991,"TERMINAL",0,0,"14",,terminal_output +8850,6615992,"TERMINAL",0,0,"25",,terminal_output +8851,6617139,"TERMINAL",0,0,"36",,terminal_output +8852,6618061,"TERMINAL",0,0,"47",,terminal_output +8853,6619167,"TERMINAL",0,0,"58",,terminal_output +8854,6620192,"TERMINAL",0,0,"650:00",,terminal_output +8855,6621269,"TERMINAL",0,0,"81",,terminal_output +8856,6622241,"TERMINAL",0,0,"92",,terminal_output +8857,6623275,"TERMINAL",0,0,"6:003",,terminal_output +8858,6624323,"TERMINAL",0,0,"14",,terminal_output +8859,6625375,"TERMINAL",0,0,"25",,terminal_output +8860,6626409,"TERMINAL",0,0,"36",,terminal_output +8861,6627461,"TERMINAL",0,0,"47",,terminal_output +8862,6628591,"TERMINAL",0,0,"58",,terminal_output +8863,6629561,"TERMINAL",0,0,"69",,terminal_output +8864,6630676,"TERMINAL",0,0,"710",,terminal_output +8865,6631715,"TERMINAL",0,0,"81",,terminal_output +8866,6632689,"TERMINAL",0,0,"92",,terminal_output +8867,6633745,"TERMINAL",0,0,"103",,terminal_output +8868,6634836,"TERMINAL",0,0,"14",,terminal_output +8869,6635859,"TERMINAL",0,0,"25",,terminal_output +8870,6636884,"TERMINAL",0,0,"36",,terminal_output +8871,6637863,"TERMINAL",0,0,"47",,terminal_output +8872,6638939,"TERMINAL",0,0,"58",,terminal_output +8873,6640057,"TERMINAL",0,0,"69",,terminal_output +8874,6641081,"TERMINAL",0,0,"720",,terminal_output +8875,6642105,"TERMINAL",0,0,"81",,terminal_output +8876,6643102,"TERMINAL",0,0,"92",,terminal_output +8877,6644257,"TERMINAL",0,0,"214",,terminal_output +8878,6645282,"TERMINAL",0,0,"25",,terminal_output +8879,6646305,"TERMINAL",0,0,"36",,terminal_output +8880,6647329,"TERMINAL",0,0,"47",,terminal_output +8881,6648353,"TERMINAL",0,0,"58",,terminal_output +8882,6649389,"TERMINAL",0,0,"69",,terminal_output +8883,6650438,"TERMINAL",0,0,"730",,terminal_output +8884,6651479,"TERMINAL",0,0,"81",,terminal_output +8885,6652524,"TERMINAL",0,0,"92",,terminal_output +8886,6653572,"TERMINAL",0,0,"303",,terminal_output +8887,6654701,"TERMINAL",0,0,"14",,terminal_output +8888,6655739,"genie.py",0,0,"",python,tab +8889,6655740,"genie.py",4289,0,"",python,selection_mouse +8890,6655822,"TERMINAL",0,0,"25",,terminal_output +8891,6656715,"TERMINAL",0,0,"36",,terminal_output +8892,6657072,"genie.py",4331,0,"",python,selection_mouse +8893,6657084,"genie.py",4330,0,"",python,selection_command +8894,6657772,"TERMINAL",0,0,"47",,terminal_output +8895,6658797,"TERMINAL",0,0,"58",,terminal_output +8896,6659923,"TERMINAL",0,0,"69",,terminal_output +8897,6660946,"TERMINAL",0,0,"740",,terminal_output +8898,6661971,"TERMINAL",0,0,"81",,terminal_output +8899,6662994,"TERMINAL",0,0,"92",,terminal_output +8900,6664121,"TERMINAL",0,0,"403",,terminal_output +8901,6664303,"genie.py",0,0,"",python,tab +8902,6664304,"genie.py",4287,0,"",python,selection_mouse +8903,6664997,"genie.py",4293,0,"",python,selection_mouse +8904,6665091,"TERMINAL",0,0,"14",,terminal_output +8905,6665940,"genie.py",4292,0,"",python,selection_mouse +8906,6666114,"TERMINAL",0,0,"26",,terminal_output +8907,6667155,"TERMINAL",0,0,"47",,terminal_output +8908,6668178,"genie.py",4332,0,"",python,selection_mouse +8909,6668356,"TERMINAL",0,0,"58",,terminal_output +8910,6668665,"genie.py",4290,0,"",python,selection_mouse +8911,6669344,"TERMINAL",0,0,"69",,terminal_output +8912,6670293,"TERMINAL",0,0,"750",,terminal_output +8913,6671392,"TERMINAL",0,0,"81",,terminal_output +8914,6672408,"TERMINAL",0,0,"92",,terminal_output +8915,6673459,"TERMINAL",0,0,"503",,terminal_output +8916,6674477,"TERMINAL",0,0,"14",,terminal_output +8917,6675139,"genie.py",4242,0,"",python,selection_mouse +8918,6675592,"TERMINAL",0,0,"25",,terminal_output +8919,6675674,"genie.py",4331,0,"",python,selection_mouse +8920,6675689,"genie.py",4330,0,"",python,selection_command +8921,6676240,"genie.py",4288,0,"",python,selection_mouse +8922,6676568,"TERMINAL",0,0,"36",,terminal_output +8923,6677071,"genie.py",4287,3,"[:,",python,selection_mouse +8924,6677642,"TERMINAL",0,0,"47",,terminal_output +8925,6677691,"genie.py",4291,0,"",python,selection_mouse +8926,6678301,"genie.py",4297,0,"",python,selection_mouse +8927,6678762,"TERMINAL",0,0,"58",,terminal_output +8928,6679568,"genie.py",4331,0,"",python,selection_mouse +8929,6679572,"genie.py",4330,0,"",python,selection_command +8930,6679894,"TERMINAL",0,0,"69",,terminal_output +8931,6680916,"TERMINAL",0,0,"71:00",,terminal_output +8932,6681940,"TERMINAL",0,0,"81",,terminal_output +8933,6682980,"TERMINAL",0,0,"92",,terminal_output +8934,6684092,"TERMINAL",0,0,"7:003",,terminal_output +8935,6685115,"TERMINAL",0,0,"14",,terminal_output +8936,6686084,"TERMINAL",0,0,"25",,terminal_output +8937,6687170,"TERMINAL",0,0,"37",,terminal_output +8938,6688186,"TERMINAL",0,0,"58",,terminal_output +8939,6689313,"TERMINAL",0,0,"69",,terminal_output +8940,6690337,"TERMINAL",0,0,"710",,terminal_output +8941,6691374,"TERMINAL",0,0,"81",,terminal_output +8942,6692390,"TERMINAL",0,0,"92",,terminal_output +8943,6693403,"TERMINAL",0,0,"103",,terminal_output +8944,6694447,"TERMINAL",0,0,"14",,terminal_output +8945,6695503,"TERMINAL",0,0,"25",,terminal_output +8946,6696584,"TERMINAL",0,0,"36",,terminal_output +8947,6697608,"TERMINAL",0,0,"47",,terminal_output +8948,6698736,"TERMINAL",0,0,"58",,terminal_output +8949,6699759,"TERMINAL",0,0,"69",,terminal_output +8950,6699979,"TERMINAL",0,0,"\rinit_mask[0,:,0]",,terminal_output +8951,6700771,"TERMINAL",0,0,"720",,terminal_output +8952,6701819,"TERMINAL",0,0,"81",,terminal_output +8953,6702873,"TERMINAL",0,0,"92",,terminal_output +8954,6703957,"TERMINAL",0,0,"203",,terminal_output +8955,6704207,"TERMINAL",0,0,"[?25l0\r][?25h",,terminal_output +8956,6704313,"TERMINAL",0,0,"\r1]",,terminal_output +8957,6704488,"TERMINAL",0,0,"\r\nArray([False, True, False, False, False, False, False, False, False,\r\n False, False, False, False, False, False, False], dtype=bool)\r\n(Pdb) ",,terminal_output +8958,6704991,"TERMINAL",0,0,"14",,terminal_output +8959,6705096,"TERMINAL",0,0,"\rinit_mask[0,:,1]",,terminal_output +8960,6705772,"TERMINAL",0,0,"[?25l1\r][?25h",,terminal_output +8961,6705879,"TERMINAL",0,0,"\r2]",,terminal_output +8962,6705978,"TERMINAL",0,0,"25",,terminal_output +8963,6705997,"TERMINAL",0,0,"\r\nArray([False, True, False, False, False, False, False, False, False,\r\n False, False, False, False, False, False, False], dtype=bool)\r\n(Pdb) ",,terminal_output +8964,6706383,"TERMINAL",0,0,"\rinit_mask[0,:,2]",,terminal_output +8965,6706724,"TERMINAL",0,0,"[?25l2\r][?25h",,terminal_output +8966,6706884,"TERMINAL",0,0,"\r3]",,terminal_output +8967,6707016,"TERMINAL",0,0,"36",,terminal_output +8968,6707441,"TERMINAL",0,0,"\r\nArray([False, True, False, False, False, False, False, False, False,\r\n False, False, False, False, False, False, False], dtype=bool)\r\n(Pdb) ",,terminal_output +8969,6708004,"TERMINAL",0,0,"\rinit_mask[0,:,3]",,terminal_output +8970,6708098,"TERMINAL",0,0,"47",,terminal_output +8971,6708776,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +8972,6709164,"TERMINAL",0,0,"59",,terminal_output +8973,6709559,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +8974,6709755,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +8975,6709930,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +8976,6710204,"TERMINAL",0,0,"730",,terminal_output +8977,6711233,"TERMINAL",0,0,"81",,terminal_output +8978,6712369,"TERMINAL",0,0,"92",,terminal_output +8979,6713290,"TERMINAL",0,0,"303",,terminal_output +8980,6714404,"TERMINAL",0,0,"14",,terminal_output +8981,6715426,"TERMINAL",0,0,"25",,terminal_output +8982,6716422,"TERMINAL",0,0,"36",,terminal_output +8983,6717474,"TERMINAL",0,0,"47",,terminal_output +8984,6717826,"TERMINAL",0,0,"[?25l:\r,3][?25h\r1,3]",,terminal_output +8985,6718179,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +8986,6718514,"TERMINAL",0,0,"58",,terminal_output +8987,6718578,"TERMINAL",0,0,"3",,terminal_output +8988,6719331,"TERMINAL",0,0,"[?25l3\r][?25h",,terminal_output +8989,6719561,"TERMINAL",0,0,"69",,terminal_output +8990,6720414,"TERMINAL",0,0,"[?25l]\r:][?25h",,terminal_output +8991,6720614,"TERMINAL",0,0,"740",,terminal_output +8992,6720712,"TERMINAL",0,0,"\r\nArray([ True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True, True, True, True, True, True, True, True,\r\n True, True], dtype=bool)\r\n(Pdb) ",,terminal_output +8993,6721672,"TERMINAL",0,0,"81",,terminal_output +8994,6722866,"TERMINAL",0,0,"92",,terminal_output +8995,6723743,"TERMINAL",0,0,"403",,terminal_output +8996,6724845,"TERMINAL",0,0,"14",,terminal_output +8997,6726058,"TERMINAL",0,0,"25",,terminal_output +8998,6727107,"TERMINAL",0,0,"37",,terminal_output +8999,6728226,"TERMINAL",0,0,"58",,terminal_output +9000,6729249,"TERMINAL",0,0,"69",,terminal_output +9001,6730277,"TERMINAL",0,0,"750",,terminal_output +9002,6731400,"TERMINAL",0,0,"81",,terminal_output +9003,6732424,"TERMINAL",0,0,"92",,terminal_output +9004,6733447,"TERMINAL",0,0,"503",,terminal_output +9005,6734472,"TERMINAL",0,0,"14",,terminal_output +9006,6735492,"TERMINAL",0,0,"25",,terminal_output +9007,6736623,"TERMINAL",0,0,"36",,terminal_output +9008,6737590,"TERMINAL",0,0,"47",,terminal_output +9009,6738671,"TERMINAL",0,0,"58",,terminal_output +9010,6739694,"TERMINAL",0,0,"69",,terminal_output +9011,6740822,"TERMINAL",0,0,"72:00",,terminal_output +9012,6741773,"TERMINAL",0,0,"81",,terminal_output +9013,6742870,"TERMINAL",0,0,"92",,terminal_output +9014,6743997,"TERMINAL",0,0,"8:003",,terminal_output +9015,6744954,"TERMINAL",0,0,"14",,terminal_output +9016,6746044,"TERMINAL",0,0,"25",,terminal_output +9017,6747067,"TERMINAL",0,0,"36",,terminal_output +9018,6748097,"TERMINAL",0,0,"47",,terminal_output +9019,6749223,"TERMINAL",0,0,"59",,terminal_output +9020,6750243,"TERMINAL",0,0,"710",,terminal_output +9021,6751266,"TERMINAL",0,0,"81",,terminal_output +9022,6752391,"TERMINAL",0,0,"92",,terminal_output +9023,6752647,"genie.py",0,0,"",python,tab +9024,6752649,"genie.py",4140,0,"",python,selection_mouse +9025,6753173,"genie.py",4255,0,"",python,selection_mouse +9026,6753343,"TERMINAL",0,0,"103",,terminal_output +9027,6754440,"TERMINAL",0,0,"14",,terminal_output +9028,6755408,"TERMINAL",0,0,"25",,terminal_output +9029,6756488,"TERMINAL",0,0,"36",,terminal_output +9030,6757552,"TERMINAL",0,0,"47",,terminal_output +9031,6758533,"TERMINAL",0,0,"58",,terminal_output +9032,6759569,"TERMINAL",0,0,"69",,terminal_output +9033,6760596,"TERMINAL",0,0,"720",,terminal_output +9034,6761708,"TERMINAL",0,0,"81",,terminal_output +9035,6762735,"TERMINAL",0,0,"92",,terminal_output +9036,6763759,"TERMINAL",0,0,"203",,terminal_output +9037,6764766,"TERMINAL",0,0,"14",,terminal_output +9038,6765817,"TERMINAL",0,0,"25",,terminal_output +9039,6766945,"TERMINAL",0,0,"36",,terminal_output +9040,6767904,"TERMINAL",0,0,"47",,terminal_output +9041,6768982,"TERMINAL",0,0,"58",,terminal_output +9042,6770004,"TERMINAL",0,0,"69",,terminal_output +9043,6771051,"TERMINAL",0,0,"730",,terminal_output +9044,6772101,"TERMINAL",0,0,"81",,terminal_output +9045,6773148,"TERMINAL",0,0,"303",,terminal_output +9046,6774305,"TERMINAL",0,0,"14",,terminal_output +9047,6775330,"TERMINAL",0,0,"25",,terminal_output +9048,6776292,"TERMINAL",0,0,"36",,terminal_output +9049,6777389,"TERMINAL",0,0,"47",,terminal_output +9050,6778392,"TERMINAL",0,0,"58",,terminal_output +9051,6779468,"TERMINAL",0,0,"69",,terminal_output +9052,6780335,"genie.py",4198,0,"",python,selection_mouse +9053,6780475,"TERMINAL",0,0,"740",,terminal_output +9054,6781516,"TERMINAL",0,0,"81",,terminal_output +9055,6782564,"TERMINAL",0,0,"92",,terminal_output +9056,6783624,"TERMINAL",0,0,"403",,terminal_output +9057,6783730,"genie.py",4385,0,"",python,selection_mouse +9058,6784276,"genie.py",4331,0,"",python,selection_mouse +9059,6784654,"TERMINAL",0,0,"14",,terminal_output +9060,6786286,"TERMINAL",0,0,"26",,terminal_output +9061,6787311,"TERMINAL",0,0,"47",,terminal_output +9062,6788355,"TERMINAL",0,0,"58",,terminal_output +9063,6789461,"TERMINAL",0,0,"69",,terminal_output +9064,6790488,"TERMINAL",0,0,"750",,terminal_output +9065,6791511,"TERMINAL",0,0,"81",,terminal_output +9066,6792544,"TERMINAL",0,0,"92",,terminal_output +9067,6793660,"TERMINAL",0,0,"503",,terminal_output +9068,6794686,"TERMINAL",0,0,"14",,terminal_output +9069,6795708,"TERMINAL",0,0,"25",,terminal_output +9070,6796730,"TERMINAL",0,0,"36",,terminal_output +9071,6797857,"TERMINAL",0,0,"47",,terminal_output +9072,6798880,"TERMINAL",0,0,"58",,terminal_output +9073,6799906,"TERMINAL",0,0,"69",,terminal_output +9074,6800930,"TERMINAL",0,0,"73:00",,terminal_output +9075,6802057,"TERMINAL",0,0,"81",,terminal_output +9076,6803012,"TERMINAL",0,0,"92",,terminal_output +9077,6804104,"TERMINAL",0,0,"9:003",,terminal_output +9078,6805131,"TERMINAL",0,0,"15",,terminal_output +9079,6806152,"TERMINAL",0,0,"36",,terminal_output +9080,6807197,"TERMINAL",0,0,"47",,terminal_output +9081,6808305,"TERMINAL",0,0,"58",,terminal_output +9082,6809327,"TERMINAL",0,0,"69",,terminal_output +9083,6810356,"TERMINAL",0,0,"710",,terminal_output +9084,6811476,"TERMINAL",0,0,"81",,terminal_output +9085,6812439,"TERMINAL",0,0,"92",,terminal_output +9086,6813478,"TERMINAL",0,0,"103",,terminal_output +9087,6814528,"TERMINAL",0,0,"14",,terminal_output +9088,6815578,"TERMINAL",0,0,"25",,terminal_output +9089,6816701,"TERMINAL",0,0,"36",,terminal_output +9090,6817728,"TERMINAL",0,0,"47",,terminal_output +9091,6818757,"TERMINAL",0,0,"58",,terminal_output +9092,6819875,"TERMINAL",0,0,"69",,terminal_output +9093,6820898,"TERMINAL",0,0,"720",,terminal_output +9094,6821861,"TERMINAL",0,0,"81",,terminal_output +9095,6822913,"TERMINAL",0,0,"92",,terminal_output +9096,6823959,"TERMINAL",0,0,"203",,terminal_output +9097,6825004,"TERMINAL",0,0,"14",,terminal_output +9098,6826121,"TERMINAL",0,0,"25",,terminal_output +9099,6827144,"TERMINAL",0,0,"36",,terminal_output +9100,6828168,"TERMINAL",0,0,"58",,terminal_output +9101,6829202,"TERMINAL",0,0,"69",,terminal_output +9102,6830259,"TERMINAL",0,0,"730",,terminal_output +9103,6831343,"TERMINAL",0,0,"81",,terminal_output +9104,6832362,"TERMINAL",0,0,"92",,terminal_output +9105,6833492,"TERMINAL",0,0,"303",,terminal_output +9106,6834451,"TERMINAL",0,0,"14",,terminal_output +9107,6835486,"TERMINAL",0,0,"25",,terminal_output +9108,6836537,"TERMINAL",0,0,"36",,terminal_output +9109,6837578,"TERMINAL",0,0,"47",,terminal_output +9110,6838715,"TERMINAL",0,0,"58",,terminal_output +9111,6839694,"TERMINAL",0,0,"69",,terminal_output +9112,6840733,"TERMINAL",0,0,"740",,terminal_output +9113,6841833,"TERMINAL",0,0,"81",,terminal_output +9114,6842889,"TERMINAL",0,0,"92",,terminal_output +9115,6843971,"TERMINAL",0,0,"403",,terminal_output +9116,6845043,"TERMINAL",0,0,"14",,terminal_output +9117,6846018,"TERMINAL",0,0,"25",,terminal_output +9118,6847011,"TERMINAL",0,0,"36",,terminal_output +9119,6848069,"TERMINAL",0,0,"47",,terminal_output +9120,6849168,"TERMINAL",0,0,"58",,terminal_output +9121,6850185,"TERMINAL",0,0,"750",,terminal_output +9122,6851222,"TERMINAL",0,0,"81",,terminal_output +9123,6851555,"genie.py",0,0,"",python,tab +9124,6851556,"genie.py",3658,0,"",python,selection_mouse +9125,6852201,"genie.py",3981,0,"",python,selection_mouse +9126,6852285,"TERMINAL",0,0,"92",,terminal_output +9127,6853287,"TERMINAL",0,0,"503",,terminal_output +9128,6854341,"TERMINAL",0,0,"14",,terminal_output +9129,6855407,"TERMINAL",0,0,"25",,terminal_output +9130,6855737,"genie.py",3998,0,"",python,selection_mouse +9131,6856569,"genie.py",4059,0,"",python,selection_mouse +9132,6856581,"TERMINAL",0,0,"36",,terminal_output +9133,6857486,"TERMINAL",0,0,"47",,terminal_output +9134,6858542,"TERMINAL",0,0,"58",,terminal_output +9135,6859593,"TERMINAL",0,0,"69",,terminal_output +9136,6860636,"TERMINAL",0,0,"74:00",,terminal_output +9137,6861683,"TERMINAL",0,0,"81",,terminal_output +9138,6862781,"TERMINAL",0,0,"92",,terminal_output +9139,6863778,"TERMINAL",0,0,"10:003",,terminal_output +9140,6864828,"TERMINAL",0,0,"14",,terminal_output +9141,6865955,"TERMINAL",0,0,"25",,terminal_output +9142,6866906,"TERMINAL",0,0,"36",,terminal_output +9143,6867993,"TERMINAL",0,0,"47",,terminal_output +9144,6869027,"TERMINAL",0,0,"58",,terminal_output +9145,6870136,"TERMINAL",0,0,"69",,terminal_output +9146,6870741,"genie.py",4298,0,"",python,selection_mouse +9147,6871076,"TERMINAL",0,0,"710",,terminal_output +9148,6871256,"genie.py",4332,0,"",python,selection_mouse +9149,6872201,"TERMINAL",0,0,"82",,terminal_output +9150,6873157,"TERMINAL",0,0,"103",,terminal_output +9151,6874252,"TERMINAL",0,0,"14",,terminal_output +9152,6875262,"TERMINAL",0,0,"25",,terminal_output +9153,6876077,"TERMINAL",0,0,"t",,terminal_output +9154,6876238,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +9155,6876299,"TERMINAL",0,0,"36",,terminal_output +9156,6876540,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +9157,6876605,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +9158,6876784,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +9159,6877424,"TERMINAL",0,0,"47",,terminal_output +9160,6878385,"TERMINAL",0,0,"58",,terminal_output +9161,6879453,"TERMINAL",0,0,"69",,terminal_output +9162,6880474,"TERMINAL",0,0,"720",,terminal_output +9163,6881520,"TERMINAL",0,0,"81",,terminal_output +9164,6882565,"TERMINAL",0,0,"92",,terminal_output +9165,6883569,"TERMINAL",0,0,"\r",,terminal_output +9166,6883623,"TERMINAL",0,0,"203",,terminal_output +9167,6884695,"TERMINAL",0,0,"14",,terminal_output +9168,6885718,"TERMINAL",0,0,"25",,terminal_output +9169,6886751,"TERMINAL",0,0,"36",,terminal_output +9170,6887870,"TERMINAL",0,0,"47",,terminal_output +9171,6888895,"TERMINAL",0,0,"58",,terminal_output +9172,6889917,"TERMINAL",0,0,"69",,terminal_output +9173,6890941,"TERMINAL",0,0,"730",,terminal_output +9174,6892067,"TERMINAL",0,0,"81",,terminal_output +9175,6893028,"TERMINAL",0,0,"92",,terminal_output +9176,6894115,"TERMINAL",0,0,"303",,terminal_output +9177,6895141,"TERMINAL",0,0,"15",,terminal_output +9178,6896167,"TERMINAL",0,0,"36",,terminal_output +9179,6897231,"TERMINAL",0,0,"47",,terminal_output +9180,6898316,"TERMINAL",0,0,"58",,terminal_output +9181,6899300,"TERMINAL",0,0,"69",,terminal_output +9182,6900340,"TERMINAL",0,0,"740",,terminal_output +9183,6901496,"TERMINAL",0,0,"81",,terminal_output +9184,6902511,"TERMINAL",0,0,"92",,terminal_output +9185,6903542,"TERMINAL",0,0,"403",,terminal_output +9186,6904563,"TERMINAL",0,0,"14",,terminal_output +9187,6905554,"TERMINAL",0,0,"25",,terminal_output +9188,6906592,"TERMINAL",0,0,"36",,terminal_output +9189,6907643,"TERMINAL",0,0,"47",,terminal_output +9190,6908747,"TERMINAL",0,0,"58",,terminal_output +9191,6909759,"TERMINAL",0,0,"69",,terminal_output +9192,6910807,"TERMINAL",0,0,"750",,terminal_output +9193,6911837,"TERMINAL",0,0,"81",,terminal_output +9194,6912922,"TERMINAL",0,0,"92",,terminal_output +9195,6913984,"TERMINAL",0,0,"503",,terminal_output +9196,6915005,"TERMINAL",0,0,"14",,terminal_output +9197,6916029,"TERMINAL",0,0,"25",,terminal_output +9198,6917054,"TERMINAL",0,0,"36",,terminal_output +9199,6918067,"TERMINAL",0,0,"47",,terminal_output +9200,6919135,"TERMINAL",0,0,"59",,terminal_output +9201,6920280,"TERMINAL",0,0,"75:00",,terminal_output +9202,6921252,"TERMINAL",0,0,"81",,terminal_output +9203,6922297,"TERMINAL",0,0,"92",,terminal_output +9204,6922785,"genie.py",0,0,"",python,tab +9205,6922786,"genie.py",4331,0,"",python,selection_mouse +9206,6923231,"genie.py",4255,0,"",python,selection_mouse +9207,6923359,"TERMINAL",0,0,"1:003",,terminal_output +9208,6924426,"TERMINAL",0,0,"14",,terminal_output +9209,6924728,"genie.py",4329,2,"",python,content +9210,6925082,"genie.py",4319,10,"break",python,content +9211,6925459,"genie.py",4319,5,"",python,content +9212,6925497,"TERMINAL",0,0,"25",,terminal_output +9213,6925776,"genie.py",4306,13,"",python,content +9214,6926102,"genie.py",4307,0," init_mask = mask.astype(bool)\n",python,content +9215,6926490,"genie.py",4306,42,"",python,content +9216,6926502,"TERMINAL",0,0,"36",,terminal_output +9217,6926865,"genie.py",4307,0," init_mask = mask.astype(bool)\n",python,content +9218,6927519,"TERMINAL",0,0,"47",,terminal_output +9219,6927798,"genie.py",4268,5,"",python,content +9220,6928228,"genie.py",4256,12," ",python,content +9221,6928228,"genie.py",4204,12," ",python,content +9222,6928553,"TERMINAL",0,0,"58",,terminal_output +9223,6928810,"genie.py",4252,8," ",python,content +9224,6928811,"genie.py",4204,8," ",python,content +9225,6929173,"genie.py",4204,81,"",python,content +9226,6929632,"genie.py",4204,0," mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)",python,content +9227,6929723,"TERMINAL",0,0,"69",,terminal_output +9228,6930673,"TERMINAL",0,0,"710",,terminal_output +9229,6931266,"genie.py",4342,0,"",python,selection_mouse +9230,6931571,"genie.py",4394,0,"",python,selection_mouse +9231,6931676,"TERMINAL",0,0,"81",,terminal_output +9232,6932250,"genie.py",4393,0,"",python,selection_mouse +9233,6932843,"TERMINAL",0,0,"92",,terminal_output +9234,6933353,"genie.py",4393,0,"\n ",python,content +9235,6933789,"TERMINAL",0,0,"103",,terminal_output +9236,6934820,"TERMINAL",0,0,"14",,terminal_output +9237,6935908,"TERMINAL",0,0,"25",,terminal_output +9238,6936251,"genie.py",4406,0,"t",python,content +9239,6936252,"genie.py",4407,0,"",python,selection_keyboard +9240,6936267,"genie.py",4407,0,"o",python,content +9241,6936268,"genie.py",4408,0,"",python,selection_keyboard +9242,6936351,"genie.py",4408,0,"k",python,content +9243,6936352,"genie.py",4409,0,"",python,selection_keyboard +9244,6936494,"genie.py",4409,0,"e",python,content +9245,6936496,"genie.py",4410,0,"",python,selection_keyboard +9246,6936975,"TERMINAL",0,0,"36",,terminal_output +9247,6937441,"genie.py",4406,4,"token_idxs",python,content +9248,6938012,"TERMINAL",0,0,"47",,terminal_output +9249,6938653,"genie.py",4416,0," ",python,content +9250,6938654,"genie.py",4417,0,"",python,selection_keyboard +9251,6938919,"genie.py",4417,0,"+",python,content +9252,6938919,"genie.py",4418,0,"",python,selection_keyboard +9253,6939014,"TERMINAL",0,0,"58",,terminal_output +9254,6939359,"genie.py",4417,1,"",python,content +9255,6939543,"genie.py",4417,0,"*",python,content +9256,6939544,"genie.py",4418,0,"",python,selection_keyboard +9257,6940073,"TERMINAL",0,0,"69",,terminal_output +9258,6940399,"genie.py",4418,0,"=",python,content +9259,6940399,"genie.py",4419,0,"",python,selection_keyboard +9260,6940770,"genie.py",4419,0," ",python,content +9261,6940771,"genie.py",4420,0,"",python,selection_keyboard +9262,6941181,"TERMINAL",0,0,"721",,terminal_output +9263,6941202,"genie.py",4420,0,"~",python,content +9264,6941203,"genie.py",4421,0,"",python,selection_keyboard +9265,6942151,"TERMINAL",0,0,"92",,terminal_output +9266,6942398,"genie.py",4421,0,"i",python,content +9267,6942398,"genie.py",4422,0,"",python,selection_keyboard +9268,6942537,"genie.py",4422,0,"n",python,content +9269,6942538,"genie.py",4423,0,"",python,selection_keyboard +9270,6942674,"genie.py",4423,0,"i",python,content +9271,6942675,"genie.py",4424,0,"",python,selection_keyboard +9272,6943236,"TERMINAL",0,0,"203",,terminal_output +9273,6943532,"genie.py",4421,3,"init_mask",python,content +9274,6944289,"TERMINAL",0,0,"14",,terminal_output +9275,6945316,"TERMINAL",0,0,"25",,terminal_output +9276,6945516,"genie.py",4430,0,"\n ",python,content +9277,6946344,"TERMINAL",0,0,"36",,terminal_output +9278,6946529,"genie.py",4443,0,"b",python,content +9279,6946530,"genie.py",4444,0,"",python,selection_keyboard +9280,6946623,"genie.py",4444,0,"r",python,content +9281,6946624,"genie.py",4445,0,"",python,selection_keyboard +9282,6946841,"genie.py",4445,0,"e",python,content +9283,6946842,"genie.py",4446,0,"",python,selection_keyboard +9284,6947064,"genie.py",4446,0,"a",python,content +9285,6947065,"genie.py",4447,0,"",python,selection_keyboard +9286,6947418,"TERMINAL",0,0,"47",,terminal_output +9287,6947627,"genie.py",4443,4,"breakpoint",python,content +9288,6948296,"genie.py",4453,0,")",python,content +9289,6948296,"genie.py",4454,0,"",python,selection_keyboard +9290,6948479,"TERMINAL",0,0,"58",,terminal_output +9291,6949466,"genie.py",4453,1,"",python,content +9292,6949516,"TERMINAL",0,0,"69",,terminal_output +9293,6949982,"genie.py",4453,0,"()",python,content +9294,6949983,"genie.py",4454,0,"",python,selection_keyboard +9295,6950296,"genie.py",4453,0,"",python,selection_command +9296,6950540,"TERMINAL",0,0,"730",,terminal_output +9297,6951579,"TERMINAL",0,0,"81",,terminal_output +9298,6952109,"TERMINAL",0,0,"^D\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 129, in sample_mihir\r\n breakpoint()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 129, in sample_mihir\r\n breakpoint()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +9299,6952619,"TERMINAL",0,0,"92",,terminal_output +9300,6953304,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +9301,6953713,"TERMINAL",0,0,"303",,terminal_output +9302,6953766,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +9303,6954151,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +9304,6954265,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +9305,6954738,"TERMINAL",0,0,"14",,terminal_output +9306,6955753,"TERMINAL",0,0,"25",,terminal_output +9307,6956802,"TERMINAL",0,0,"36",,terminal_output +9308,6956910,"TERMINAL",0,0,"2025-07-03 18:11:33.810898: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9309,6957912,"TERMINAL",0,0,"47",,terminal_output +9310,6958894,"TERMINAL",0,0,"58",,terminal_output +9311,6959962,"TERMINAL",0,0,"69",,terminal_output +9312,6961086,"TERMINAL",0,0,"740",,terminal_output +9313,6962110,"TERMINAL",0,0,"81",,terminal_output +9314,6963082,"TERMINAL",0,0,"92",,terminal_output +9315,6964158,"TERMINAL",0,0,"404",,terminal_output +9316,6965184,"TERMINAL",0,0,"25",,terminal_output +9317,6966308,"TERMINAL",0,0,"36",,terminal_output +9318,6967267,"TERMINAL",0,0,"47",,terminal_output +9319,6968307,"TERMINAL",0,0,"58",,terminal_output +9320,6969383,"TERMINAL",0,0,"69",,terminal_output +9321,6969687,"TERMINAL",0,0,"2025-07-03 18:11:46.563168: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9322,6970509,"TERMINAL",0,0,"750",,terminal_output +9323,6971533,"TERMINAL",0,0,"81",,terminal_output +9324,6972559,"TERMINAL",0,0,"92",,terminal_output +9325,6973580,"TERMINAL",0,0,"503",,terminal_output +9326,6974603,"TERMINAL",0,0,"14",,terminal_output +9327,6975631,"TERMINAL",0,0,"25",,terminal_output +9328,6976679,"TERMINAL",0,0,"36",,terminal_output +9329,6977725,"TERMINAL",0,0,"47",,terminal_output +9330,6978801,"TERMINAL",0,0,"58",,terminal_output +9331,6979928,"TERMINAL",0,0,"69",,terminal_output +9332,6980951,"TERMINAL",0,0,"76:00",,terminal_output +9333,6981976,"TERMINAL",0,0,"81",,terminal_output +9334,6982980,"TERMINAL",0,0,"92",,terminal_output +9335,6984126,"TERMINAL",0,0,"2:003",,terminal_output +9336,6985047,"TERMINAL",0,0,"2025-07-03 18:12:01.860313: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9337,6985101,"TERMINAL",0,0,"14",,terminal_output +9338,6986122,"TERMINAL",0,0,"26",,terminal_output +9339,6987198,"TERMINAL",0,0,"47",,terminal_output +9340,6988325,"TERMINAL",0,0,"58",,terminal_output +9341,6989348,"TERMINAL",0,0,"69",,terminal_output +9342,6990373,"TERMINAL",0,0,"710",,terminal_output +9343,6991398,"TERMINAL",0,0,"81",,terminal_output +9344,6992319,"TERMINAL",0,0,"2025-07-03 18:12:09.215829: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9345,6992427,"TERMINAL",0,0,"92",,terminal_output +9346,6993547,"TERMINAL",0,0,"103",,terminal_output +9347,6994513,"TERMINAL",0,0,"14",,terminal_output +9348,6995596,"TERMINAL",0,0,"25",,terminal_output +9349,6995904,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +9350,6996621,"TERMINAL",0,0,"36",,terminal_output +9351,6997657,"TERMINAL",0,0,"47",,terminal_output +9352,6998769,"TERMINAL",0,0,"58",,terminal_output +9353,6999794,"TERMINAL",0,0,"69",,terminal_output +9354,7000799,"TERMINAL",0,0,"720",,terminal_output +9355,7001944,"TERMINAL",0,0,"81",,terminal_output +9356,7002900,"TERMINAL",0,0,"92",,terminal_output +9357,7003584,"TERMINAL",0,0,"2025-07-03 18:12:20.427657: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9358,7003992,"TERMINAL",0,0,"203",,terminal_output +9359,7005022,"TERMINAL",0,0,"14",,terminal_output +9360,7006143,"TERMINAL",0,0,"25",,terminal_output +9361,7006456,"TERMINAL",0,0,"2025-07-03 18:12:23.261276: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9362,7007166,"TERMINAL",0,0,"36",,terminal_output +9363,7008190,"TERMINAL",0,0,"58",,terminal_output +9364,7009214,"TERMINAL",0,0,"69",,terminal_output +9365,7010341,"TERMINAL",0,0,"730",,terminal_output +9366,7011367,"TERMINAL",0,0,"81",,terminal_output +9367,7012366,"TERMINAL",0,0,"92",,terminal_output +9368,7013399,"TERMINAL",0,0,"303",,terminal_output +9369,7014540,"TERMINAL",0,0,"2025-07-03 18:12:31.352306: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9370,7014559,"TERMINAL",0,0,"14",,terminal_output +9371,7015569,"TERMINAL",0,0,"25",,terminal_output +9372,7015972,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +9373,7016201,"TERMINAL",0,0,"token_idxs shape: (1, 16, 920)\r\naction_tokens.shape: (1, 15, 1, 32)\r\n",,terminal_output +9374,7016423,"TERMINAL",0,0,"> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(131)sample_mihir()\r\n-> assert init_mask.shape == (B, S, N), ""Wrong mask shape""\r\n(Pdb) ",,terminal_output +9375,7016552,"TERMINAL",0,0,"36",,terminal_output +9376,7017601,"TERMINAL",0,0,"47",,terminal_output +9377,7018738,"TERMINAL",0,0,"58",,terminal_output +9378,7019761,"TERMINAL",0,0,"69",,terminal_output +9379,7020786,"TERMINAL",0,0,"740",,terminal_output +9380,7021813,"TERMINAL",0,0,"81",,terminal_output +9381,7022980,"TERMINAL",0,0,"92",,terminal_output +9382,7023960,"TERMINAL",0,0,"403",,terminal_output +9383,7024981,"TERMINAL",0,0,"14",,terminal_output +9384,7026111,"TERMINAL",0,0,"25",,terminal_output +9385,7027135,"TERMINAL",0,0,"36",,terminal_output +9386,7028159,"TERMINAL",0,0,"48",,terminal_output +9387,7029287,"TERMINAL",0,0,"69",,terminal_output +9388,7030245,"TERMINAL",0,0,"750",,terminal_output +9389,7031334,"TERMINAL",0,0,"81",,terminal_output +9390,7032358,"TERMINAL",0,0,"92",,terminal_output +9391,7033393,"TERMINAL",0,0,"503",,terminal_output +9392,7034508,"TERMINAL",0,0,"14",,terminal_output +9393,7035491,"TERMINAL",0,0,"25",,terminal_output +9394,7036580,"TERMINAL",0,0,"36",,terminal_output +9395,7037590,"TERMINAL",0,0,"47",,terminal_output +9396,7038634,"TERMINAL",0,0,"58",,terminal_output +9397,7039685,"TERMINAL",0,0,"69",,terminal_output +9398,7040754,"TERMINAL",0,0,"77:00",,terminal_output +9399,7041839,"TERMINAL",0,0,"81",,terminal_output +9400,7042905,"TERMINAL",0,0,"92",,terminal_output +9401,7043929,"TERMINAL",0,0,"3:003",,terminal_output +9402,7044952,"TERMINAL",0,0,"14",,terminal_output +9403,7045977,"TERMINAL",0,0,"25",,terminal_output +9404,7047103,"TERMINAL",0,0,"36",,terminal_output +9405,7047881,"TERMINAL",0,0,"[?25lto[?25h[?25lo[?25h",,terminal_output +9406,7047986,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +9407,7048156,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +9408,7048156,"TERMINAL",0,0,"47",,terminal_output +9409,7048286,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +9410,7048579,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +9411,7048839,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +9412,7049151,"TERMINAL",0,0,"59",,terminal_output +9413,7049497,"TERMINAL",0,0,"[?25lo\r[?25h",,terminal_output +9414,7049703,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +9415,7049857,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +9416,7050150,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +9417,7050209,"TERMINAL",0,0,"710",,terminal_output +9418,7050319,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +9419,7051302,"TERMINAL",0,0,"81",,terminal_output +9420,7052288,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +9421,7052289,"TERMINAL",0,0,"92",,terminal_output +9422,7052510,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +9423,7052753,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +9424,7053107,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +9425,7053375,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +9426,7053375,"TERMINAL",0,0,"103",,terminal_output +9427,7053484,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +9428,7053566,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +9429,7053676,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +9430,7053739,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(Pdb) ",,terminal_output +9431,7054477,"TERMINAL",0,0,"14",,terminal_output +9432,7054529,"TERMINAL",0,0,"\rtoken_idxs.shape",,terminal_output +9433,7055500,"TERMINAL",0,0,"25",,terminal_output +9434,7056274,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +9435,7056457,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +9436,7056497,"TERMINAL",0,0,"36",,terminal_output +9437,7056583,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +9438,7056690,"TERMINAL",0,0,"[?25lh\r[?25h",,terminal_output +9439,7056843,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +9440,7057184,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +9441,7057549,"TERMINAL",0,0,"47",,terminal_output +9442,7057655,"TERMINAL",0,0,"[",,terminal_output +9443,7058135,"TERMINAL",0,0,"[?25l:[?25h",,terminal_output +9444,7058583,"TERMINAL",0,0,"58",,terminal_output +9445,7059443,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +9446,7059634,"TERMINAL",0,0,"69",,terminal_output +9447,7060680,"TERMINAL",0,0,"720",,terminal_output +9448,7061240,"TERMINAL",0,0,"[?25l,\r[?25h",,terminal_output +9449,7061347,"TERMINAL",0,0,"[?25l:\r[?25h",,terminal_output +9450,7061758,"TERMINAL",0,0,"81",,terminal_output +9451,7061819,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +9452,7062574,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +9453,7062872,"TERMINAL",0,0,"92",,terminal_output +9454,7063773,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +9455,7063836,"TERMINAL",0,0,"203",,terminal_output +9456,7064350,"TERMINAL",0,0,"[?25l0\r[?25h",,terminal_output +9457,7064932,"TERMINAL",0,0,"14",,terminal_output +9458,7065112,"TERMINAL",0,0,"[?25l:[?25h",,terminal_output +9459,7065417,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +9460,7065674,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +9461,7065966,"TERMINAL",0,0,"25",,terminal_output +9462,7065967,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +9463,7066226,"TERMINAL",0,0,"\r\nArray([151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9464,7066971,"TERMINAL",0,0,"36",,terminal_output +9465,7068021,"TERMINAL",0,0,"47",,terminal_output +9466,7069119,"TERMINAL",0,0,"58",,terminal_output +9467,7070143,"TERMINAL",0,0,"630",,terminal_output +9468,7071269,"TERMINAL",0,0,"81",,terminal_output +9469,7071577,"TERMINAL",0,0,"\rtoken_idxs[0,:,0]",,terminal_output +9470,7072324,"TERMINAL",0,0,"92",,terminal_output +9471,7072383,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +9472,7072649,"TERMINAL",0,0,"[?25l0\r][?25h",,terminal_output +9473,7073258,"TERMINAL",0,0,"303",,terminal_output +9474,7073384,"TERMINAL",0,0,"\r1]",,terminal_output +9475,7073438,"TERMINAL",0,0,"[?25l]\r0][?25h",,terminal_output +9476,7073650,"TERMINAL",0,0,"\r\nArray([287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9477,7074123,"TERMINAL",0,0,"\rtoken_idxs[0,:,10]",,terminal_output +9478,7074322,"TERMINAL",0,0,"14",,terminal_output +9479,7074860,"TERMINAL",0,0,"[?25l0\r][?25h",,terminal_output +9480,7075221,"TERMINAL",0,0,"\r1]",,terminal_output +9481,7075327,"TERMINAL",0,0,"\r\nArray([287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9482,7075380,"TERMINAL",0,0,"25",,terminal_output +9483,7075780,"TERMINAL",0,0,"\rtoken_idxs[0,:,11]",,terminal_output +9484,7076421,"TERMINAL",0,0,"36",,terminal_output +9485,7076646,"TERMINAL",0,0,"[?25l1\r][?25h",,terminal_output +9486,7076754,"TERMINAL",0,0,"\r2]",,terminal_output +9487,7076890,"TERMINAL",0,0,"\r\nArray([842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9488,7077332,"TERMINAL",0,0,"\rtoken_idxs[0,:,12]",,terminal_output +9489,7077479,"TERMINAL",0,0,"47",,terminal_output +9490,7077910,"TERMINAL",0,0,"[?25l2\r][?25h",,terminal_output +9491,7078090,"TERMINAL",0,0,"\r3]",,terminal_output +9492,7078153,"TERMINAL",0,0,"\r\nArray([521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9493,7078563,"TERMINAL",0,0,"\rtoken_idxs[0,:,13]",,terminal_output +9494,7078563,"TERMINAL",0,0,"58",,terminal_output +9495,7079581,"TERMINAL",0,0,"69",,terminal_output +9496,7080352,"TERMINAL",0,0,"[?25l3\r][?25h",,terminal_output +9497,7080643,"TERMINAL",0,0,"\r4]",,terminal_output +9498,7080679,"TERMINAL",0,0,"740",,terminal_output +9499,7081020,"TERMINAL",0,0,"\r\nArray([694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\r\n 0, 0, 0], dtype=int32)\r\n(Pdb) ",,terminal_output +9500,7081710,"TERMINAL",0,0,"81",,terminal_output +9501,7081711,"TERMINAL",0,0,"\rtoken_idxs[0,:,14]",,terminal_output +9502,7082243,"TERMINAL",0,0,"\r",,terminal_output +9503,7082724,"TERMINAL",0,0,"92",,terminal_output +9504,7083125,"TERMINAL",0,0,"^D\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 131, in sample_mihir\r\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 131, in sample_mihir\r\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +9505,7083777,"TERMINAL",0,0,"403",,terminal_output +9506,7084392,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +9507,7084889,"TERMINAL",0,0,"14",,terminal_output +9508,7085912,"TERMINAL",0,0,"25",,terminal_output +9509,7086037,"genie.py",0,0,"",python,tab +9510,7086038,"genie.py",4455,0,"",python,selection_mouse +9511,7086183,"genie.py",4454,0,"",python,selection_command +9512,7087041,"TERMINAL",0,0,"36",,terminal_output +9513,7087326,"genie.py",4431,25,"",python,content +9514,7088032,"TERMINAL",0,0,"47",,terminal_output +9515,7089021,"TERMINAL",0,0,"58",,terminal_output +9516,7090081,"TERMINAL",0,0,"69",,terminal_output +9517,7090615,"genie.py",4431,0,"\n breakpoint()",python,content +9518,7090694,"genie.py",4444,0,"",python,selection_command +9519,7091105,"genie.py",4431,0,"",python,selection_command +9520,7091160,"TERMINAL",0,0,"751",,terminal_output +9521,7092226,"genie.py",4431,1,"",python,content +9522,7092227,"TERMINAL",0,0,"92",,terminal_output +9523,7092305,"genie.py",4443,0,"",python,selection_command +9524,7093083,"genie.py",4455,0,"\n ",python,content +9525,7093222,"TERMINAL",0,0,"503",,terminal_output +9526,7093555,"genie.py",4456,12,"",python,content +9527,7093862,"genie.py",4431,0,"",python,selection_command +9528,7094009,"genie.py",4394,0,"",python,selection_command +9529,7094156,"genie.py",4352,0,"",python,selection_command +9530,7094302,"genie.py",4278,0,"",python,selection_command +9531,7094314,"TERMINAL",0,0,"14",,terminal_output +9532,7094605,"genie.py",4352,0,"",python,selection_command +9533,7094925,"genie.py",4394,0,"",python,selection_command +9534,7095226,"genie.py",4430,0,"\n ",python,content +9535,7095350,"TERMINAL",0,0,"25",,terminal_output +9536,7096201,"genie.py",4443,0,"p",python,content +9537,7096201,"genie.py",4444,0,"",python,selection_keyboard +9538,7096271,"genie.py",4444,0,"r",python,content +9539,7096272,"genie.py",4445,0,"",python,selection_keyboard +9540,7096408,"genie.py",4445,0,"i",python,content +9541,7096410,"genie.py",4446,0,"",python,selection_keyboard +9542,7096450,"TERMINAL",0,0,"36",,terminal_output +9543,7097479,"TERMINAL",0,0,"47",,terminal_output +9544,7097582,"genie.py",4443,3,"print",python,content +9545,7098362,"genie.py",4448,0,"()",python,content +9546,7098363,"genie.py",4449,0,"",python,selection_keyboard +9547,7098495,"TERMINAL",0,0,"58",,terminal_output +9548,7099541,"TERMINAL",0,0,"69",,terminal_output +9549,7100637,"TERMINAL",0,0,"78:00",,terminal_output +9550,7101693,"TERMINAL",0,0,"81",,terminal_output +9551,7102709,"TERMINAL",0,0,"92",,terminal_output +9552,7103034,"genie.py",0,0,"",python,tab +9553,7103035,"genie.py",4450,0,"",python,selection_mouse +9554,7103742,"TERMINAL",0,0,"4:003",,terminal_output +9555,7103990,"genie.py",4448,0,"",python,selection_mouse +9556,7104628,"genie.py",4449,0,"",python,selection_mouse +9557,7104787,"TERMINAL",0,0,"14",,terminal_output +9558,7104939,"genie.py",4449,0,"token_idxs[0,:,10]",python,content +9559,7105762,"genie.py",4468,0,"",python,selection_mouse +9560,7105837,"TERMINAL",0,0,"25",,terminal_output +9561,7106889,"TERMINAL",0,0,"36",,terminal_output +9562,7107006,"genie.py",4465,0,"",python,selection_mouse +9563,7107540,"genie.py",4464,1,"",python,content +9564,7107933,"TERMINAL",0,0,"47",,terminal_output +9565,7108886,"genie.py",4463,0,"",python,selection_command +9566,7108996,"TERMINAL",0,0,"58",,terminal_output +9567,7110046,"TERMINAL",0,0,"69",,terminal_output +9568,7110437,"genie.py",4467,0,"\n print(token_idxs[0,:,0])",python,content +9569,7110506,"genie.py",4480,0,"",python,selection_command +9570,7110864,"genie.py",4481,0,"",python,selection_command +9571,7111100,"TERMINAL",0,0,"710",,terminal_output +9572,7111272,"genie.py",4482,0,"",python,selection_command +9573,7111329,"genie.py",4483,0,"",python,selection_command +9574,7111345,"genie.py",4484,0,"",python,selection_command +9575,7111442,"genie.py",4485,0,"",python,selection_command +9576,7111619,"genie.py",4486,0,"",python,selection_command +9577,7112169,"genie.py",4486,10,"",python,content +9578,7112170,"TERMINAL",0,0,"82",,terminal_output +9579,7112606,"genie.py",4486,0,"i",python,content +9580,7112606,"genie.py",4487,0,"",python,selection_keyboard +9581,7112724,"genie.py",4487,0,"n",python,content +9582,7112726,"genie.py",4488,0,"",python,selection_keyboard +9583,7112885,"genie.py",4488,0,"i",python,content +9584,7112886,"genie.py",4489,0,"",python,selection_keyboard +9585,7112948,"genie.py",4489,0,"t",python,content +9586,7112950,"genie.py",4490,0,"",python,selection_keyboard +9587,7113218,"TERMINAL",0,0,"103",,terminal_output +9588,7113723,"genie.py",4486,4,"init_mask",python,content +9589,7114080,"genie.py",4494,0,"",python,selection_command +9590,7114311,"TERMINAL",0,0,"14",,terminal_output +9591,7114799,"genie.py",4485,0,"",python,selection_mouse +9592,7115272,"genie.py",4521,0,"",python,selection_mouse +9593,7115328,"TERMINAL",0,0,"25",,terminal_output +9594,7115761,"genie.py",4504,25,"",python,content +9595,7116386,"TERMINAL",0,0,"36",,terminal_output +9596,7117143,"genie.py",4503,0,"",python,selection_mouse +9597,7117143,"genie.py",4502,0,"",python,selection_command +9598,7117453,"TERMINAL",0,0,"47",,terminal_output +9599,7117648,"genie.py",4467,0,"",python,selection_mouse +9600,7117691,"genie.py",4466,0,"",python,selection_command +9601,7118573,"genie.py",4503,0,"",python,selection_mouse +9602,7118575,"genie.py",4502,0,"",python,selection_command +9603,7118578,"TERMINAL",0,0,"58",,terminal_output +9604,7118794,"genie.py",4503,0,"",python,selection_mouse +9605,7118805,"genie.py",4502,0,"",python,selection_command +9606,7118909,"genie.py",4502,1,")",python,selection_mouse +9607,7118952,"genie.py",4503,0,"",python,selection_command +9608,7118953,"genie.py",4498,5,":,0])",python,selection_mouse +9609,7119001,"genie.py",4443,60,"print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9610,7119035,"genie.py",4441,62," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9611,7119075,"genie.py",4439,64," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9612,7119111,"genie.py",4438,65," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9613,7119146,"genie.py",4437,66," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9614,7119184,"genie.py",4436,67," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9615,7119262,"genie.py",4435,68," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9616,7119343,"genie.py",4434,69," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9617,7119406,"genie.py",4433,70," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9618,7119464,"genie.py",4432,71," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9619,7119568,"TERMINAL",0,0,"69",,terminal_output +9620,7119594,"genie.py",4431,72," print(token_idxs[0,:,0])\n print(init_mask[0,:,0])",python,selection_mouse +9621,7120301,"genie.py",4443,0,"",python,selection_command +9622,7120580,"TERMINAL",0,0,"720",,terminal_output +9623,7121671,"TERMINAL",0,0,"81",,terminal_output +9624,7122737,"TERMINAL",0,0,"92",,terminal_output +9625,7123734,"TERMINAL",0,0,"203",,terminal_output +9626,7124836,"TERMINAL",0,0,"14",,terminal_output +9627,7125365,"genie.py",4431,0,"",python,selection_command +9628,7125832,"TERMINAL",0,0,"25",,terminal_output +9629,7126709,"genie.py",4431,0," print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n",python,content +9630,7126899,"genie.py",4490,0," print(""init_mask[0,:,0]:"", init_mask[0,:,0])\n",python,content +9631,7126902,"genie.py",4547,73,"",python,content +9632,7127040,"TERMINAL",0,0,"36",,terminal_output +9633,7127911,"TERMINAL",0,0,"47",,terminal_output +9634,7128921,"genie.py",4431,0,"",python,selection_command +9635,7129031,"TERMINAL",0,0,"58",,terminal_output +9636,7129432,"genie.py",3906,0,"",python,selection_mouse +9637,7129908,"genie.py",3863,135,"",python,content +9638,7130094,"TERMINAL",0,0,"69",,terminal_output +9639,7130828,"genie.py",3863,1,"",python,content +9640,7130866,"genie.py",3875,0,"",python,selection_command +9641,7131046,"TERMINAL",0,0,"730",,terminal_output +9642,7131114,"genie.py",3941,0,"",python,selection_command +9643,7131654,"genie.py",4017,0,"",python,selection_command +9644,7131719,"genie.py",4080,0,"",python,selection_command +9645,7131720,"genie.py",4154,0,"",python,selection_command +9646,7131733,"genie.py",4228,0,"",python,selection_command +9647,7131734,"genie.py",4270,0,"",python,selection_command +9648,7131894,"genie.py",4307,0,"",python,selection_command +9649,7131894,"genie.py",4366,0,"",python,selection_command +9650,7131985,"genie.py",4411,0,"",python,selection_command +9651,7131986,"genie.py",4424,0,"",python,selection_command +9652,7131986,"genie.py",4480,0,"",python,selection_command +9653,7131987,"genie.py",4493,0,"",python,selection_command +9654,7132082,"genie.py",4534,0,"",python,selection_command +9655,7132083,"genie.py",4561,0,"",python,selection_command +9656,7132083,"genie.py",4591,0,"",python,selection_command +9657,7132122,"genie.py",4618,0,"",python,selection_command +9658,7132123,"genie.py",4646,0,"",python,selection_command +9659,7132123,"genie.py",4677,0,"",python,selection_command +9660,7132209,"TERMINAL",0,0,"81",,terminal_output +9661,7133376,"TERMINAL",0,0,"93",,terminal_output +9662,7134181,"TERMINAL",0,0,"314",,terminal_output +9663,7135336,"TERMINAL",0,0,"25",,terminal_output +9664,7136555,"TERMINAL",0,0,"36",,terminal_output +9665,7136724,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +9666,7137065,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +9667,7137216,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +9668,7137329,"TERMINAL",0,0,"47",,terminal_output +9669,7138384,"TERMINAL",0,0,"58",,terminal_output +9670,7139470,"TERMINAL",0,0,"69",,terminal_output +9671,7140084,"TERMINAL",0,0,"2025-07-03 18:14:36.871632: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9672,7140595,"TERMINAL",0,0,"740",,terminal_output +9673,7141619,"TERMINAL",0,0,"81",,terminal_output +9674,7142638,"TERMINAL",0,0,"92",,terminal_output +9675,7143668,"TERMINAL",0,0,"403",,terminal_output +9676,7144920,"TERMINAL",0,0,"14",,terminal_output +9677,7145820,"TERMINAL",0,0,"25",,terminal_output +9678,7146842,"TERMINAL",0,0,"36",,terminal_output +9679,7147830,"TERMINAL",0,0,"47",,terminal_output +9680,7148890,"TERMINAL",0,0,"58",,terminal_output +9681,7149908,"TERMINAL",0,0,"69",,terminal_output +9682,7151053,"TERMINAL",0,0,"750",,terminal_output +9683,7151996,"TERMINAL",0,0,"81",,terminal_output +9684,7152679,"TERMINAL",0,0,"2025-07-03 18:14:49.544673: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9685,7153059,"TERMINAL",0,0,"92",,terminal_output +9686,7154113,"TERMINAL",0,0,"504",,terminal_output +9687,7155156,"TERMINAL",0,0,"25",,terminal_output +9688,7156194,"TERMINAL",0,0,"36",,terminal_output +9689,7157287,"TERMINAL",0,0,"47",,terminal_output +9690,7158311,"TERMINAL",0,0,"58",,terminal_output +9691,7159437,"TERMINAL",0,0,"69",,terminal_output +9692,7160462,"TERMINAL",0,0,"79:00",,terminal_output +9693,7161439,"TERMINAL",0,0,"81",,terminal_output +9694,7162510,"TERMINAL",0,0,"92",,terminal_output +9695,7163636,"TERMINAL",0,0,"5:003",,terminal_output +9696,7164660,"TERMINAL",0,0,"14",,terminal_output +9697,7165684,"TERMINAL",0,0,"25",,terminal_output +9698,7166669,"TERMINAL",0,0,"36",,terminal_output +9699,7167738,"TERMINAL",0,0,"47",,terminal_output +9700,7168131,"TERMINAL",0,0,"2025-07-03 18:15:05.031641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9701,7168762,"TERMINAL",0,0,"58",,terminal_output +9702,7169883,"TERMINAL",0,0,"69",,terminal_output +9703,7170906,"TERMINAL",0,0,"710",,terminal_output +9704,7171930,"TERMINAL",0,0,"81",,terminal_output +9705,7172980,"TERMINAL",0,0,"92",,terminal_output +9706,7174006,"TERMINAL",0,0,"103",,terminal_output +9707,7175076,"TERMINAL",0,0,"14",,terminal_output +9708,7175412,"TERMINAL",0,0,"2025-07-03 18:15:12.283407: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9709,7176130,"TERMINAL",0,0,"26",,terminal_output +9710,7177154,"TERMINAL",0,0,"47",,terminal_output +9711,7178203,"TERMINAL",0,0,"58",,terminal_output +9712,7178850,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +9713,7179248,"TERMINAL",0,0,"69",,terminal_output +9714,7180330,"TERMINAL",0,0,"720",,terminal_output +9715,7181454,"TERMINAL",0,0,"81",,terminal_output +9716,7182478,"TERMINAL",0,0,"92",,terminal_output +9717,7183503,"TERMINAL",0,0,"203",,terminal_output +9718,7184526,"TERMINAL",0,0,"14",,terminal_output +9719,7185652,"TERMINAL",0,0,"25",,terminal_output +9720,7186472,"TERMINAL",0,0,"2025-07-03 18:15:23.330515: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9721,7186615,"TERMINAL",0,0,"36",,terminal_output +9722,7187671,"TERMINAL",0,0,"47",,terminal_output +9723,7188827,"TERMINAL",0,0,"58",,terminal_output +9724,7189489,"TERMINAL",0,0,"2025-07-03 18:15:26.385974: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9725,7189851,"TERMINAL",0,0,"69",,terminal_output +9726,7190875,"TERMINAL",0,0,"730",,terminal_output +9727,7191903,"TERMINAL",0,0,"81",,terminal_output +9728,7192980,"TERMINAL",0,0,"92",,terminal_output +9729,7194049,"TERMINAL",0,0,"303",,terminal_output +9730,7195040,"TERMINAL",0,0,"14",,terminal_output +9731,7196110,"TERMINAL",0,0,"25",,terminal_output +9732,7197154,"TERMINAL",0,0,"37",,terminal_output +9733,7197501,"TERMINAL",0,0,"2025-07-03 18:15:34.371814: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9734,7198247,"TERMINAL",0,0,"58",,terminal_output +9735,7199079,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +9736,7199247,"TERMINAL",0,0,"69",,terminal_output +9737,7199579,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +9738,7200398,"TERMINAL",0,0,"740",,terminal_output +9739,7201002,"TERMINAL",0,0,"2025-07-03 18:15:37.898980: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9740,7201348,"TERMINAL",0,0,"81",,terminal_output +9741,7202445,"TERMINAL",0,0,"92",,terminal_output +9742,7203470,"TERMINAL",0,0,"403",,terminal_output +9743,7204596,"TERMINAL",0,0,"14",,terminal_output +9744,7205620,"TERMINAL",0,0,"25",,terminal_output +9745,7206644,"TERMINAL",0,0,"36",,terminal_output +9746,7207275,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +9747,7207670,"TERMINAL",0,0,"47",,terminal_output +9748,7208795,"TERMINAL",0,0,"58",,terminal_output +9749,7209818,"TERMINAL",0,0,"69",,terminal_output +9750,7210467,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +9751,7210798,"TERMINAL",0,0,"750",,terminal_output +9752,7211868,"TERMINAL",0,0,"81",,terminal_output +9753,7212980,"TERMINAL",0,0,"92",,terminal_output +9754,7213848,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False True True True True True True True True\r\n True True True True]\r\n",,terminal_output +9755,7213948,"TERMINAL",0,0,"503",,terminal_output +9756,7215043,"TERMINAL",0,0,"14",,terminal_output +9757,7216083,"TERMINAL",0,0,"25",,terminal_output +9758,7216901,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False True True True True True True True\r\n True True True True]\r\n",,terminal_output +9759,7217093,"TERMINAL",0,0,"36",,terminal_output +9760,7218149,"TERMINAL",0,0,"48",,terminal_output +9761,7219207,"TERMINAL",0,0,"69",,terminal_output +9762,7220013,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False True True True True True True\r\n True True True True]\r\n",,terminal_output +9763,7220247,"TERMINAL",0,0,"71:00:00",,terminal_output +9764,7221298,"TERMINAL",0,0,"81",,terminal_output +9765,7222414,"TERMINAL",0,0,"92",,terminal_output +9766,7223338,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False True True True True True\r\n True True True True]\r\n",,terminal_output +9767,7223392,"TERMINAL",0,0,"6:003",,terminal_output +9768,7224537,"TERMINAL",0,0,"14",,terminal_output +9769,7225735,"TERMINAL",0,0,"25",,terminal_output +9770,7226494,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False True True True True\r\n True True True True]\r\n",,terminal_output +9771,7227022,"TERMINAL",0,0,"36",,terminal_output +9772,7228001,"TERMINAL",0,0,"47",,terminal_output +9773,7229052,"TERMINAL",0,0,"58",,terminal_output +9774,7229684,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False True True True\r\n True True True True]\r\n",,terminal_output +9775,7230102,"TERMINAL",0,0,"69",,terminal_output +9776,7231139,"TERMINAL",0,0,"711",,terminal_output +9777,7232180,"TERMINAL",0,0,"92",,terminal_output +9778,7232904,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False True True\r\n True True True True]\r\n",,terminal_output +9779,7233230,"TERMINAL",0,0,"103",,terminal_output +9780,7234284,"TERMINAL",0,0,"14",,terminal_output +9781,7235420,"TERMINAL",0,0,"25",,terminal_output +9782,7236171,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False True\r\n True True True True]\r\nsalloc: Job 3315885 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3315885.interactive ON hkn0401 CANCELLED AT 2025-07-03T18:16:13 DUE TO TIME LIMIT ***\r\nTerminated\r\n]0;tum_cte0515@hkn0401:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0401 jafar]$ ",,terminal_output +9783,7236546,"TERMINAL",0,0,"3CG6",,terminal_output +9784,7237497,"TERMINAL",0,0,"4",,terminal_output +9785,7238594,"TERMINAL",0,0,"5",,terminal_output +9786,7239721,"TERMINAL",0,0,"6",,terminal_output +9787,7240239,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +9788,7240744,"TERMINAL",0,0,"7",,terminal_output +9789,7241780,"TERMINAL",0,0,"8",,terminal_output +9790,7242099,"TERMINAL",0,0,"\r",,terminal_output +9791,7242356,"TERMINAL",0,0,"",,terminal_output +9792,7242980,"TERMINAL",0,0,"9",,terminal_output +9793,7243189,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\n",,terminal_output +9794,7243234,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 143\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;143",,terminal_output +9795,7244025,"TERMINAL",0,0,"20",,terminal_output +9796,7244086,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +9797,7244187,"TERMINAL",0,0,"]633;E;2025-07-03 18:16:20 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Pending job allocation 3315977\r\nsalloc: job 3315977 queued and waiting for resources\r\n",,terminal_output +9798,7245149,"TERMINAL",0,0,"13315977 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +9799,7245603,"TERMINAL",0,0,"watch",,terminal_focus +9800,7246123,"TERMINAL",0,0,"2",,terminal_output +9801,7246296,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +9802,7247552,"TERMINAL",0,0,"idling",,terminal_command +9803,7247616,"TERMINAL",0,0,"]633;E;2025-07-03 18:16:24 idling;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 18:16:24 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 6 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +9804,7248731,"TERMINAL",0,0,"5",,terminal_output +9805,7249756,"TERMINAL",0,0,"6",,terminal_output +9806,7250118,"TERMINAL",0,0,"salloc",,terminal_focus +9807,7250871,"TERMINAL",0,0,"7",,terminal_output +9808,7251753,"TERMINAL",0,0,"watch",,terminal_focus +9809,7251914,"TERMINAL",0,0,"8",,terminal_output +9810,7252927,"TERMINAL",0,0,"9",,terminal_output +9811,7254058,"TERMINAL",0,0,"30",,terminal_output +9812,7254918,"TERMINAL",0,0,"salloc",,terminal_focus +9813,7255026,"TERMINAL",0,0,"17",,terminal_output +9814,7256113,"TERMINAL",0,0,"2",,terminal_output +9815,7257134,"TERMINAL",0,0,"3",,terminal_output +9816,7257392,"TERMINAL",0,0,"^Csalloc: Job allocation 3315977 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +9817,7258154,"TERMINAL",0,0,"5",,terminal_output +9818,7258602,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +9819,7258645,"TERMINAL",0,0,"]633;E;2025-07-03 18:16:35 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Pending job allocation 3315979\r\nsalloc: job 3315979 queued and waiting for resources\r\n",,terminal_output +9820,7259196,"TERMINAL",0,0,"6",,terminal_output +9821,7259410,"TERMINAL",0,0,"^Csalloc: Job allocation 3315979 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +9822,7260260,"TERMINAL",0,0,"7",,terminal_output +9823,7261285,"TERMINAL",0,0,"8",,terminal_output +9824,7262340,"TERMINAL",0,0,"9",,terminal_output +9825,7263370,"TERMINAL",0,0,"40",,terminal_output +9826,7264515,"TERMINAL",0,0,"1",,terminal_output +9827,7265298,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +9828,7265349,"TERMINAL",0,0,"]633;E;2025-07-03 18:16:42 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Granted job allocation 3315981\r\n",,terminal_output +9829,7265484,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +9830,7265484,"TERMINAL",0,0,"20",,terminal_output +9831,7266500,"TERMINAL",0,0,"3",,terminal_output +9832,7267573,"TERMINAL",0,0,"4",,terminal_output +9833,7268601,"TERMINAL",0,0,"5",,terminal_output +9834,7268900,"genie.py",0,0,"",python,tab +9835,7268903,"genie.py",9205,0,"",python,selection_mouse +9836,7269000,"genie.py",9204,0,"",python,selection_command +9837,7269722,"TERMINAL",0,0,"6",,terminal_output +9838,7269878,"genie.py",9240,0,"",python,selection_mouse +9839,7269906,"genie.py",9239,0,"",python,selection_command +9840,7270748,"TERMINAL",0,0,"7",,terminal_output +9841,7271771,"TERMINAL",0,0,"8",,terminal_output +9842,7272755,"TERMINAL",0,0,"9",,terminal_output +9843,7273819,"TERMINAL",0,0,"50",,terminal_output +9844,7274839,"TERMINAL",0,0,"1",,terminal_output +9845,7275970,"TERMINAL",0,0,"2",,terminal_output +9846,7276995,"TERMINAL",0,0,"3",,terminal_output +9847,7278059,"TERMINAL",0,0,"4",,terminal_output +9848,7279041,"TERMINAL",0,0,"5",,terminal_output +9849,7280048,"TERMINAL",0,0,"6",,terminal_output +9850,7280900,"genie.py",8229,0,"",python,selection_mouse +9851,7281049,"genie.py",8220,13,"mask_expanded",python,selection_mouse +9852,7281162,"TERMINAL",0,0,"7",,terminal_output +9853,7282145,"TERMINAL",0,0,"8",,terminal_output +9854,7283157,"TERMINAL",0,0,"7:00",,terminal_output +9855,7284198,"TERMINAL",0,0,"1",,terminal_output +9856,7285232,"TERMINAL",0,0,"2",,terminal_output +9857,7286271,"TERMINAL",0,0,"3",,terminal_output +9858,7287016,"genie.py",8258,0,"",python,selection_mouse +9859,7287337,"TERMINAL",0,0,"4",,terminal_output +9860,7288240,"genie.py",8228,0,"",python,selection_mouse +9861,7288366,"TERMINAL",0,0,"513",,terminal_output +9862,7289644,"TERMINAL",0,0,"6",,terminal_output +9863,7290512,"TERMINAL",0,0,"7",,terminal_output +9864,7291476,"TERMINAL",0,0,"8",,terminal_output +9865,7292558,"TERMINAL",0,0,"9",,terminal_output +9866,7293337,"TERMINAL",0,0,"salloc: Prolog hung on node hkn0901\r\n",,terminal_output +9867,7293579,"genie.py",10588,0,"",python,selection_mouse +9868,7293590,"TERMINAL",0,0,"10",,terminal_output +9869,7294202,"genie.py",10486,0,"",python,selection_mouse +9870,7294721,"TERMINAL",0,0,"1",,terminal_output +9871,7294940,"genie.py",10535,0,"",python,selection_mouse +9872,7295092,"genie.py",10531,8,"dynamics",python,selection_mouse +9873,7295733,"TERMINAL",0,0,"2",,terminal_output +9874,7296098,"genie.py",10585,0,"",python,selection_mouse +9875,7296231,"genie.py",10579,9,"vid_embed",python,selection_mouse +9876,7296679,"TERMINAL",0,0,"3",,terminal_output +9877,7297005,"genie.py",10537,0,"",python,selection_mouse +9878,7297143,"genie.py",10531,8,"dynamics",python,selection_mouse +9879,7297788,"TERMINAL",0,0,"4",,terminal_output +9880,7298265,"genie.py",10446,0,"",python,selection_mouse +9881,7298768,"TERMINAL",0,0,"5",,terminal_output +9882,7299037,"genie.py",10494,0,"",python,selection_mouse +9883,7299199,"genie.py",10491,4,"mask",python,selection_mouse +9884,7299830,"TERMINAL",0,0,"6",,terminal_output +9885,7300855,"TERMINAL",0,0,"7",,terminal_output +9886,7301980,"TERMINAL",0,0,"8",,terminal_output +9887,7302980,"TERMINAL",0,0,"9",,terminal_output +9888,7304065,"TERMINAL",0,0,"20",,terminal_output +9889,7304066,"TERMINAL",0,0,"salloc: Nodes hkn0901 are ready for job\r\n",,terminal_output +9890,7304959,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h[tum_cte0515@hkn0901 jafar]$ ",,terminal_output +9891,7305021,"TERMINAL",0,0,"1",,terminal_output +9892,7306108,"TERMINAL",0,0,"2",,terminal_output +9893,7307233,"TERMINAL",0,0,"3",,terminal_output +9894,7308816,"TERMINAL",0,0,"420",,terminal_output +9895,7309702,"TERMINAL",0,0,"6",,terminal_output +9896,7310786,"TERMINAL",0,0,"7",,terminal_output +9897,7311826,"TERMINAL",0,0,"8",,terminal_output +9898,7312836,"TERMINAL",0,0,"9",,terminal_output +9899,7313970,"TERMINAL",0,0,"30",,terminal_output +9900,7314908,"TERMINAL",0,0,"1",,terminal_output +9901,7315951,"TERMINAL",0,0,"2",,terminal_output +9902,7316392,"genie.py",6251,0,"",python,selection_mouse +9903,7316531,"genie.py",6246,9,"ones_like",python,selection_mouse +9904,7317033,"TERMINAL",0,0,"3",,terminal_output +9905,7318022,"TERMINAL",0,0,"4",,terminal_output +9906,7319090,"TERMINAL",0,0,"5",,terminal_output +9907,7319170,"genie.py",5530,0,"",python,selection_command +9908,7320103,"TERMINAL",0,0,"6",,terminal_output +9909,7321165,"TERMINAL",0,0,"7",,terminal_output +9910,7322233,"TERMINAL",0,0,"9",,terminal_output +9911,7323214,"TERMINAL",0,0,"40",,terminal_output +9912,7324281,"TERMINAL",0,0,"1",,terminal_output +9913,7325555,"TERMINAL",0,0,"2",,terminal_output +9914,7326352,"TERMINAL",0,0,"3",,terminal_output +9915,7327380,"TERMINAL",0,0,"4",,terminal_output +9916,7328809,"TERMINAL",0,0,"5",,terminal_output +9917,7329834,"TERMINAL",0,0,"6",,terminal_output +9918,7330958,"TERMINAL",0,0,"7",,terminal_output +9919,7331982,"TERMINAL",0,0,"8",,terminal_output +9920,7332979,"TERMINAL",0,0,"9",,terminal_output +9921,7334032,"TERMINAL",0,0,"50",,terminal_output +9922,7335055,"TERMINAL",0,0,"1",,terminal_output +9923,7336079,"TERMINAL",0,0,"2",,terminal_output +9924,7337207,"TERMINAL",0,0,"3",,terminal_output +9925,7338143,"TERMINAL",0,0,"5",,terminal_output +9926,7339188,"TERMINAL",0,0,"6",,terminal_output +9927,7340223,"TERMINAL",0,0,"7",,terminal_output +9928,7341261,"TERMINAL",0,0,"8",,terminal_output +9929,7342327,"TERMINAL",0,0,"9",,terminal_output +9930,7343394,"TERMINAL",0,0,"8:00",,terminal_output +9931,7344476,"TERMINAL",0,0,"1",,terminal_output +9932,7345482,"TERMINAL",0,0,"2",,terminal_output +9933,7346462,"TERMINAL",0,0,"3",,terminal_output +9934,7347548,"TERMINAL",0,0,"4",,terminal_output +9935,7348880,"TERMINAL",0,0,"5",,terminal_output +9936,7349904,"TERMINAL",0,0,"6",,terminal_output +9937,7351029,"TERMINAL",0,0,"7",,terminal_output +9938,7351650,"genie.py",4234,0,"",python,selection_mouse +9939,7351787,"genie.py",4228,9,"init_mask",python,selection_mouse +9940,7351983,"TERMINAL",0,0,"8",,terminal_output +9941,7353022,"TERMINAL",0,0,"9",,terminal_output +9942,7354102,"TERMINAL",0,0,"10",,terminal_output +9943,7355133,"TERMINAL",0,0,"1",,terminal_output +9944,7356160,"TERMINAL",0,0,"33",,terminal_output +9945,7357187,"TERMINAL",0,0,"4",,terminal_output +9946,7358244,"TERMINAL",0,0,"5",,terminal_output +9947,7359325,"TERMINAL",0,0,"6",,terminal_output +9948,7360351,"TERMINAL",0,0,"7",,terminal_output +9949,7361375,"TERMINAL",0,0,"8",,terminal_output +9950,7362406,"TERMINAL",0,0,"9",,terminal_output +9951,7363468,"TERMINAL",0,0,"20",,terminal_output +9952,7364547,"TERMINAL",0,0,"1",,terminal_output +9953,7365545,"TERMINAL",0,0,"2",,terminal_output +9954,7366764,"TERMINAL",0,0,"3",,terminal_output +9955,7367720,"TERMINAL",0,0,"4",,terminal_output +9956,7368745,"TERMINAL",0,0,"52",,terminal_output +9957,7369781,"TERMINAL",0,0,"6",,terminal_output +9958,7370785,"TERMINAL",0,0,"7",,terminal_output +9959,7371914,"TERMINAL",0,0,"8",,terminal_output +9960,7372840,"TERMINAL",0,0,"9",,terminal_output +9961,7373968,"TERMINAL",0,0,"30",,terminal_output +9962,7374991,"TERMINAL",0,0,"1",,terminal_output +9963,7375455,"genie.py",0,0,"",python,tab +9964,7375455,"genie.py",6250,0,"",python,selection_mouse +9965,7375598,"genie.py",6246,9,"ones_like",python,selection_mouse +9966,7375942,"TERMINAL",0,0,"2",,terminal_output +9967,7376158,"genie.py",6260,0,"",python,selection_mouse +9968,7376190,"genie.py",6256,10,"token_idxs",python,selection_mouse +9969,7376957,"genie.py",6250,0,"",python,selection_mouse +9970,7377049,"genie.py",6246,9,"ones_like",python,selection_mouse +9971,7377069,"TERMINAL",0,0,"3",,terminal_output +9972,7378263,"TERMINAL",0,0,"4",,terminal_output +9973,7379136,"TERMINAL",0,0,"5",,terminal_output +9974,7380117,"TERMINAL",0,0,"6",,terminal_output +9975,7381135,"TERMINAL",0,0,"7",,terminal_output +9976,7382356,"TERMINAL",0,0,"9",,terminal_output +9977,7383212,"TERMINAL",0,0,"40",,terminal_output +9978,7384251,"TERMINAL",0,0,"1",,terminal_output +9979,7385334,"TERMINAL",0,0,"2",,terminal_output +9980,7386358,"TERMINAL",0,0,"3",,terminal_output +9981,7387385,"TERMINAL",0,0,"4",,terminal_output +9982,7388402,"TERMINAL",0,0,"5",,terminal_output +9983,7389534,"TERMINAL",0,0,"6",,terminal_output +9984,7390557,"TERMINAL",0,0,"7",,terminal_output +9985,7391581,"TERMINAL",0,0,"8",,terminal_output +9986,7392605,"TERMINAL",0,0,"9",,terminal_output +9987,7393629,"TERMINAL",0,0,"50",,terminal_output +9988,7394655,"TERMINAL",0,0,"1",,terminal_output +9989,7395782,"TERMINAL",0,0,"23",,terminal_output +9990,7396803,"TERMINAL",0,0,"3",,terminal_output +9991,7397828,"TERMINAL",0,0,"4",,terminal_output +9992,7398851,"TERMINAL",0,0,"5",,terminal_output +9993,7399876,"TERMINAL",0,0,"6",,terminal_output +9994,7400900,"TERMINAL",0,0,"7",,terminal_output +9995,7401924,"TERMINAL",0,0,"8",,terminal_output +9996,7402932,"TERMINAL",0,0,"9",,terminal_output +9997,7404075,"TERMINAL",0,0,"9:00",,terminal_output +9998,7405099,"TERMINAL",0,0,"1",,terminal_output +9999,7406130,"TERMINAL",0,0,"2",,terminal_output +10000,7407146,"TERMINAL",0,0,"34",,terminal_output +10001,7408138,"TERMINAL",0,0,"4",,terminal_output +10002,7409197,"TERMINAL",0,0,"6",,terminal_output +10003,7410221,"TERMINAL",0,0,"7",,terminal_output +10004,7411260,"TERMINAL",0,0,"8",,terminal_output +10005,7412367,"TERMINAL",0,0,"9",,terminal_output +10006,7413333,"TERMINAL",0,0,"10",,terminal_output +10007,7414365,"TERMINAL",0,0,"1",,terminal_output +10008,7415440,"TERMINAL",0,0,"2",,terminal_output +10009,7416465,"TERMINAL",0,0,"3",,terminal_output +10010,7417621,"TERMINAL",0,0,"4",,terminal_output +10011,7418618,"TERMINAL",0,0,"5",,terminal_output +10012,7419637,"TERMINAL",0,0,"6",,terminal_output +10013,7419822,"genie.py",4274,0,"",python,selection_mouse +10014,7419945,"genie.py",4270,10,"token_idxs",python,selection_mouse +10015,7420898,"TERMINAL",0,0,"7",,terminal_output +10016,7421686,"TERMINAL",0,0,"8",,terminal_output +10017,7422711,"TERMINAL",0,0,"9",,terminal_output +10018,7423736,"TERMINAL",0,0,"20",,terminal_output +10019,7424862,"TERMINAL",0,0,"1",,terminal_output +10020,7425801,"TERMINAL",0,0,"2",,terminal_output +10021,7426921,"TERMINAL",0,0,"3",,terminal_output +10022,7427981,"TERMINAL",0,0,"4",,terminal_output +10023,7428959,"TERMINAL",0,0,"52",,terminal_output +10024,7429982,"TERMINAL",0,0,"6",,terminal_output +10025,7431009,"TERMINAL",0,0,"7",,terminal_output +10026,7432140,"TERMINAL",0,0,"8",,terminal_output +10027,7433092,"TERMINAL",0,0,"9",,terminal_output +10028,7434181,"TERMINAL",0,0,"30",,terminal_output +10029,7435207,"TERMINAL",0,0,"2",,terminal_output +10030,7436215,"TERMINAL",0,0,"3",,terminal_output +10031,7437260,"TERMINAL",0,0,"4",,terminal_output +10032,7438317,"TERMINAL",0,0,"5",,terminal_output +10033,7439329,"TERMINAL",0,0,"6",,terminal_output +10034,7440370,"TERMINAL",0,0,"7",,terminal_output +10035,7441450,"TERMINAL",0,0,"8",,terminal_output +10036,7442476,"TERMINAL",0,0,"9",,terminal_output +10037,7443498,"TERMINAL",0,0,"40",,terminal_output +10038,7444625,"TERMINAL",0,0,"1",,terminal_output +10039,7445566,"TERMINAL",0,0,"2",,terminal_output +10040,7446674,"TERMINAL",0,0,"33",,terminal_output +10041,7447698,"TERMINAL",0,0,"4",,terminal_output +10042,7448720,"TERMINAL",0,0,"5",,terminal_output +10043,7449745,"TERMINAL",0,0,"6",,terminal_output +10044,7450873,"TERMINAL",0,0,"7",,terminal_output +10045,7451817,"TERMINAL",0,0,"8",,terminal_output +10046,7452922,"TERMINAL",0,0,"9",,terminal_output +10047,7453945,"TERMINAL",0,0,"50",,terminal_output +10048,7454968,"TERMINAL",0,0,"1",,terminal_output +10049,7455983,"TERMINAL",0,0,"2",,terminal_output +10050,7457119,"TERMINAL",0,0,"3",,terminal_output +10051,7458065,"TERMINAL",0,0,"4",,terminal_output +10052,7459173,"TERMINAL",0,0,"5",,terminal_output +10053,7460200,"TERMINAL",0,0,"7",,terminal_output +10054,7461189,"TERMINAL",0,0,"8",,terminal_output +10055,7462229,"TERMINAL",0,0,"9",,terminal_output +10056,7463310,"TERMINAL",0,0,"20:00",,terminal_output +10057,7464313,"TERMINAL",0,0,"1",,terminal_output +10058,7465356,"TERMINAL",0,0,"2",,terminal_output +10059,7466397,"TERMINAL",0,0,"3",,terminal_output +10060,7467463,"TERMINAL",0,0,"4",,terminal_output +10061,7468488,"TERMINAL",0,0,"5",,terminal_output +10062,7469618,"TERMINAL",0,0,"6",,terminal_output +10063,7470554,"TERMINAL",0,0,"7",,terminal_output +10064,7471658,"TERMINAL",0,0,"8",,terminal_output +10065,7472685,"TERMINAL",0,0,"9",,terminal_output +10066,7473723,"TERMINAL",0,0,"10",,terminal_output +10067,7474726,"TERMINAL",0,0,"1",,terminal_output +10068,7475858,"TERMINAL",0,0,"2",,terminal_output +10069,7476882,"TERMINAL",0,0,"3",,terminal_output +10070,7477908,"TERMINAL",0,0,"4",,terminal_output +10071,7478930,"TERMINAL",0,0,"5",,terminal_output +10072,7479954,"TERMINAL",0,0,"6",,terminal_output +10073,7481080,"TERMINAL",0,0,"7",,terminal_output +10074,7482103,"TERMINAL",0,0,"8",,terminal_output +10075,7483049,"TERMINAL",0,0,"9",,terminal_output +10076,7484151,"TERMINAL",0,0,"20",,terminal_output +10077,7485131,"TERMINAL",0,0,"1",,terminal_output +10078,7486201,"TERMINAL",0,0,"3",,terminal_output +10079,7487217,"TERMINAL",0,0,"4",,terminal_output +10080,7488264,"TERMINAL",0,0,"5",,terminal_output +10081,7489294,"TERMINAL",0,0,"6",,terminal_output +10082,7490332,"TERMINAL",0,0,"7",,terminal_output +10083,7491367,"TERMINAL",0,0,"8",,terminal_output +10084,7492410,"TERMINAL",0,0,"9",,terminal_output +10085,7493472,"TERMINAL",0,0,"30",,terminal_output +10086,7494597,"TERMINAL",0,0,"1",,terminal_output +10087,7495621,"TERMINAL",0,0,"2",,terminal_output +10088,7496583,"TERMINAL",0,0,"3",,terminal_output +10089,7497771,"TERMINAL",0,0,"4",,terminal_output +10090,7498694,"TERMINAL",0,0,"5",,terminal_output +10091,7499402,"genie.py",5353,0,"",python,selection_mouse +10092,7499761,"TERMINAL",0,0,"6",,terminal_output +10093,7500755,"TERMINAL",0,0,"7",,terminal_output +10094,7501493,"genie.py",5333,49," new_frame_pixels = self.tokenizer.decode(",python,selection_command +10095,7501869,"TERMINAL",0,0,"8",,terminal_output +10096,7502100,"genie.py",5333,73," new_frame_pixels = self.tokenizer.decode(\n token_idxs,",python,selection_command +10097,7502225,"genie.py",5333,122," new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],",python,selection_command +10098,7502549,"genie.py",5333,132," new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )",python,selection_command +10099,7502800,"TERMINAL",0,0,"9",,terminal_output +10100,7503047,"genie.py",5341,0,"",python,selection_command +10101,7503915,"TERMINAL",0,0,"40",,terminal_output +10102,7504941,"TERMINAL",0,0,"1",,terminal_output +10103,7505963,"TERMINAL",0,0,"2",,terminal_output +10104,7506047,"genie.py",5464,0,"^",python,content +10105,7506047,"genie.py",5419,0,"^",python,content +10106,7506047,"genie.py",5395,0,"^",python,content +10107,7506047,"genie.py",5341,0,"^",python,content +10108,7506048,"genie.py",5342,0,"",python,selection_keyboard +10109,7506850,"genie.py",5467,1,"",python,content +10110,7506850,"genie.py",5421,1,"",python,content +10111,7506850,"genie.py",5396,1,"",python,content +10112,7506850,"genie.py",5341,1,"",python,content +10113,7506851,"genie.py",5341,0,"",python,selection_keyboard +10114,7506984,"TERMINAL",0,0,"3",,terminal_output +10115,7507516,"genie.py",5340,0,"",python,selection_command +10116,7508019,"TERMINAL",0,0,"4",,terminal_output +10117,7508985,"genie.py",5465,0,"",python,selection_mouse +10118,7508986,"genie.py",5464,0,"",python,selection_command +10119,7509095,"TERMINAL",0,0,"5",,terminal_output +10120,7509615,"genie.py",5406,0,"",python,selection_mouse +10121,7509616,"genie.py",5405,0,"",python,selection_command +10122,7509736,"genie.py",5406,0,"",python,selection_mouse +10123,7509743,"genie.py",5405,0,"",python,selection_command +10124,7510077,"TERMINAL",0,0,"6",,terminal_output +10125,7510296,"genie.py",5432,0,"",python,selection_mouse +10126,7510453,"genie.py",5428,5,"batch",python,selection_mouse +10127,7511119,"TERMINAL",0,0,"7",,terminal_output +10128,7512210,"TERMINAL",0,0,"9",,terminal_output +10129,7513236,"TERMINAL",0,0,"50",,terminal_output +10130,7513529,"genie.py",5449,0,"",python,selection_mouse +10131,7514261,"TERMINAL",0,0,"1",,terminal_output +10132,7514963,"genie.py",5441,0,"",python,selection_mouse +10133,7515104,"genie.py",5435,6,"videos",python,selection_mouse +10134,7515276,"TERMINAL",0,0,"2",,terminal_output +10135,7516319,"genie.py",5452,0,"",python,selection_mouse +10136,7516466,"TERMINAL",0,0,"3",,terminal_output +10137,7517367,"TERMINAL",0,0,"4",,terminal_output +10138,7518309,"genie.py",5451,0,"",python,selection_mouse +10139,7518513,"TERMINAL",0,0,"5",,terminal_output +10140,7518781,"genie.py",5449,0,"",python,selection_mouse +10141,7519684,"TERMINAL",0,0,"6",,terminal_output +10142,7520504,"TERMINAL",0,0,"7",,terminal_output +10143,7520970,"genie.py",5487,0,"",python,selection_mouse +10144,7521512,"TERMINAL",0,0,"8",,terminal_output +10145,7522552,"TERMINAL",0,0,"9",,terminal_output +10146,7523268,"genie.py",5423,0,"",python,selection_mouse +10147,7523629,"TERMINAL",0,0,"1:00",,terminal_output +10148,7524474,"genie.py",5465,0,"",python,selection_mouse +10149,7524501,"genie.py",5464,0,"",python,selection_command +10150,7524631,"TERMINAL",0,0,"1",,terminal_output +10151,7525128,"genie.py",5426,0,"",python,selection_mouse +10152,7525641,"genie.py",5421,0,"",python,selection_mouse +10153,7525694,"TERMINAL",0,0,"2",,terminal_output +10154,7525765,"genie.py",5419,8,"video_hw",python,selection_mouse +10155,7526334,"genie.py",5405,0,"",python,selection_mouse +10156,7526750,"TERMINAL",0,0,"3",,terminal_output +10157,7527776,"TERMINAL",0,0,"4",,terminal_output +10158,7528793,"TERMINAL",0,0,"5",,terminal_output +10159,7529926,"TERMINAL",0,0,"6",,terminal_output +10160,7531001,"TERMINAL",0,0,"7",,terminal_output +10161,7532039,"TERMINAL",0,0,"8",,terminal_output +10162,7532992,"TERMINAL",0,0,"9",,terminal_output +10163,7534020,"TERMINAL",0,0,"10",,terminal_output +10164,7535043,"TERMINAL",0,0,"1",,terminal_output +10165,7536071,"TERMINAL",0,0,"2",,terminal_output +10166,7537107,"TERMINAL",0,0,"3",,terminal_output +10167,7538154,"TERMINAL",0,0,"5",,terminal_output +10168,7539188,"TERMINAL",0,0,"6",,terminal_output +10169,7540230,"TERMINAL",0,0,"7",,terminal_output +10170,7541270,"TERMINAL",0,0,"8",,terminal_output +10171,7542311,"TERMINAL",0,0,"9",,terminal_output +10172,7543353,"TERMINAL",0,0,"20",,terminal_output +10173,7544391,"TERMINAL",0,0,"1",,terminal_output +10174,7545437,"TERMINAL",0,0,"2",,terminal_output +10175,7546528,"TERMINAL",0,0,"3",,terminal_output +10176,7547541,"TERMINAL",0,0,"4",,terminal_output +10177,7548544,"TERMINAL",0,0,"5",,terminal_output +10178,7549688,"TERMINAL",0,0,"6",,terminal_output +10179,7550625,"TERMINAL",0,0,"7",,terminal_output +10180,7551669,"TERMINAL",0,0,"8",,terminal_output +10181,7552706,"TERMINAL",0,0,"9",,terminal_output +10182,7553785,"TERMINAL",0,0,"30",,terminal_output +10183,7554809,"TERMINAL",0,0,"1",,terminal_output +10184,7555832,"TERMINAL",0,0,"2",,terminal_output +10185,7556861,"TERMINAL",0,0,"3",,terminal_output +10186,7557913,"TERMINAL",0,0,"4",,terminal_output +10187,7558948,"TERMINAL",0,0,"5",,terminal_output +10188,7559989,"TERMINAL",0,0,"6",,terminal_output +10189,7561030,"TERMINAL",0,0,"7",,terminal_output +10190,7562189,"TERMINAL",0,0,"8",,terminal_output +10191,7563112,"TERMINAL",0,0,"9",,terminal_output +10192,7564230,"TERMINAL",0,0,"41",,terminal_output +10193,7565253,"TERMINAL",0,0,"2",,terminal_output +10194,7566276,"TERMINAL",0,0,"3",,terminal_output +10195,7567281,"TERMINAL",0,0,"4",,terminal_output +10196,7568321,"TERMINAL",0,0,"5",,terminal_output +10197,7569862,"TERMINAL",0,0,"6",,terminal_output +10198,7570807,"TERMINAL",0,0,"7",,terminal_output +10199,7571909,"TERMINAL",0,0,"8",,terminal_output +10200,7572879,"TERMINAL",0,0,"9",,terminal_output +10201,7573959,"TERMINAL",0,0,"50",,terminal_output +10202,7575022,"TERMINAL",0,0,"1",,terminal_output +10203,7576004,"TERMINAL",0,0,"2",,terminal_output +10204,7577132,"TERMINAL",0,0,"3",,terminal_output +10205,7578075,"TERMINAL",0,0,"4",,terminal_output +10206,7579125,"TERMINAL",0,0,"5",,terminal_output +10207,7580204,"TERMINAL",0,0,"7",,terminal_output +10208,7581237,"TERMINAL",0,0,"8",,terminal_output +10209,7582266,"TERMINAL",0,0,"9",,terminal_output +10210,7583290,"TERMINAL",0,0,"2:00",,terminal_output +10211,7584321,"TERMINAL",0,0,"1",,terminal_output +10212,7585376,"TERMINAL",0,0,"2",,terminal_output +10213,7586409,"TERMINAL",0,0,"3",,terminal_output +10214,7587476,"TERMINAL",0,0,"4",,terminal_output +10215,7588499,"TERMINAL",0,0,"5",,terminal_output +10216,7589625,"TERMINAL",0,0,"6",,terminal_output +10217,7590649,"TERMINAL",0,0,"7",,terminal_output +10218,7591674,"TERMINAL",0,0,"8",,terminal_output +10219,7592647,"TERMINAL",0,0,"9",,terminal_output +10220,7593721,"TERMINAL",0,0,"10",,terminal_output +10221,7594725,"TERMINAL",0,0,"1",,terminal_output +10222,7595770,"TERMINAL",0,0,"2",,terminal_output +10223,7596795,"TERMINAL",0,0,"3",,terminal_output +10224,7597921,"TERMINAL",0,0,"4",,terminal_output +10225,7598944,"TERMINAL",0,0,"5",,terminal_output +10226,7599968,"TERMINAL",0,0,"6",,terminal_output +10227,7600992,"TERMINAL",0,0,"7",,terminal_output +10228,7602016,"TERMINAL",0,0,"8",,terminal_output +10229,7603028,"TERMINAL",0,0,"9",,terminal_output +10230,7604166,"TERMINAL",0,0,"20",,terminal_output +10231,7605199,"TERMINAL",0,0,"1",,terminal_output +10232,7606227,"TERMINAL",0,0,"3",,terminal_output +10233,7607243,"TERMINAL",0,0,"4",,terminal_output +10234,7608231,"TERMINAL",0,0,"5",,terminal_output +10235,7609292,"TERMINAL",0,0,"6",,terminal_output +10236,7610316,"TERMINAL",0,0,"7",,terminal_output +10237,7611354,"TERMINAL",0,0,"8",,terminal_output +10238,7612390,"TERMINAL",0,0,"9",,terminal_output +10239,7613487,"TERMINAL",0,0,"30",,terminal_output +10240,7614509,"TERMINAL",0,0,"1",,terminal_output +10241,7615534,"TERMINAL",0,0,"2",,terminal_output +10242,7616600,"TERMINAL",0,0,"3",,terminal_output +10243,7617618,"TERMINAL",0,0,"4",,terminal_output +10244,7618710,"TERMINAL",0,0,"5",,terminal_output +10245,7619692,"TERMINAL",0,0,"6",,terminal_output +10246,7620755,"TERMINAL",0,0,"7",,terminal_output +10247,7621882,"TERMINAL",0,0,"8",,terminal_output +10248,7622906,"TERMINAL",0,0,"9",,terminal_output +10249,7623930,"TERMINAL",0,0,"40",,terminal_output +10250,7624954,"TERMINAL",0,0,"1",,terminal_output +10251,7625978,"TERMINAL",0,0,"2",,terminal_output +10252,7627004,"TERMINAL",0,0,"3",,terminal_output +10253,7628023,"TERMINAL",0,0,"4",,terminal_output +10254,7629152,"TERMINAL",0,0,"5",,terminal_output +10255,7630105,"TERMINAL",0,0,"6",,terminal_output +10256,7631150,"TERMINAL",0,0,"8",,terminal_output +10257,7632238,"TERMINAL",0,0,"9",,terminal_output +10258,7633235,"TERMINAL",0,0,"50",,terminal_output +10259,7634176,"genie.py",0,0,"",python,tab +10260,7634178,"genie.py",4788,0,"",python,selection_mouse +10261,7634358,"TERMINAL",0,0,"1",,terminal_output +10262,7634757,"genie.py",4865,0,"",python,selection_mouse +10263,7634763,"genie.py",4864,0,"",python,selection_command +10264,7635308,"TERMINAL",0,0,"2",,terminal_output +10265,7635678,"genie.py",4923,0,"",python,selection_mouse +10266,7635699,"genie.py",4922,0,"",python,selection_command +10267,7636424,"TERMINAL",0,0,"3",,terminal_output +10268,7636539,"genie.py",4893,0,"",python,selection_mouse +10269,7636551,"genie.py",4892,0,"",python,selection_command +10270,7637038,"genie.py",4893,0,"",python,selection_mouse +10271,7637040,"genie.py",4892,0,"",python,selection_command +10272,7637201,"genie.py",4893,0,"",python,selection_mouse +10273,7637210,"genie.py",4892,0,"",python,selection_command +10274,7637388,"TERMINAL",0,0,"4",,terminal_output +10275,7638430,"TERMINAL",0,0,"5",,terminal_output +10276,7639494,"TERMINAL",0,0,"6",,terminal_output +10277,7639793,"genie.py",4339,0,"",python,selection_mouse +10278,7639921,"genie.py",4335,10,"token_idxs",python,selection_mouse +10279,7640518,"TERMINAL",0,0,"7",,terminal_output +10280,7641645,"TERMINAL",0,0,"8",,terminal_output +10281,7642595,"TERMINAL",0,0,"9",,terminal_output +10282,7643694,"TERMINAL",0,0,"3:00",,terminal_output +10283,7644720,"TERMINAL",0,0,"1",,terminal_output +10284,7645721,"TERMINAL",0,0,"2",,terminal_output +10285,7646868,"TERMINAL",0,0,"3",,terminal_output +10286,7647678,"genie.py",5257,0,"",python,selection_mouse +10287,7647807,"genie.py",5253,7,"loop_fn",python,selection_mouse +10288,7647871,"TERMINAL",0,0,"4",,terminal_output +10289,7648581,"genie.py",5223,0,"",python,selection_mouse +10290,7648658,"genie.py",5222,0,"",python,selection_command +10291,7648717,"genie.py",5223,0,"",python,selection_mouse +10292,7648728,"genie.py",5222,0,"",python,selection_command +10293,7648844,"TERMINAL",0,0,"5",,terminal_output +10294,7649261,"genie.py",5264,0,"",python,selection_mouse +10295,7649361,"genie.py",5261,10,"init_carry",python,selection_mouse +10296,7649940,"TERMINAL",0,0,"6",,terminal_output +10297,7650964,"TERMINAL",0,0,"7",,terminal_output +10298,7651989,"TERMINAL",0,0,"8",,terminal_output +10299,7653011,"TERMINAL",0,0,"9",,terminal_output +10300,7654139,"TERMINAL",0,0,"10",,terminal_output +10301,7655093,"TERMINAL",0,0,"1",,terminal_output +10302,7655527,"genie.py",5278,0,"",python,selection_mouse +10303,7655685,"genie.py",5277,6,"arange",python,selection_mouse +10304,7656137,"TERMINAL",0,0,"2",,terminal_output +10305,7657181,"TERMINAL",0,0,"4",,terminal_output +10306,7658270,"TERMINAL",0,0,"5",,terminal_output +10307,7659268,"TERMINAL",0,0,"6",,terminal_output +10308,7660313,"TERMINAL",0,0,"7",,terminal_output +10309,7661350,"TERMINAL",0,0,"8",,terminal_output +10310,7662390,"TERMINAL",0,0,"9",,terminal_output +10311,7663456,"TERMINAL",0,0,"20",,terminal_output +10312,7664480,"TERMINAL",0,0,"1",,terminal_output +10313,7665607,"TERMINAL",0,0,"2",,terminal_output +10314,7666631,"TERMINAL",0,0,"3",,terminal_output +10315,7667656,"TERMINAL",0,0,"4",,terminal_output +10316,7668649,"TERMINAL",0,0,"5",,terminal_output +10317,7669690,"TERMINAL",0,0,"6",,terminal_output +10318,7670805,"TERMINAL",0,0,"7",,terminal_output +10319,7671854,"TERMINAL",0,0,"8",,terminal_output +10320,7672837,"TERMINAL",0,0,"9",,terminal_output +10321,7673903,"TERMINAL",0,0,"30",,terminal_output +10322,7674898,"TERMINAL",0,0,"1",,terminal_output +10323,7676052,"TERMINAL",0,0,"2",,terminal_output +10324,7677077,"TERMINAL",0,0,"3",,terminal_output +10325,7678029,"TERMINAL",0,0,"4",,terminal_output +10326,7679124,"TERMINAL",0,0,"5",,terminal_output +10327,7680148,"TERMINAL",0,0,"6",,terminal_output +10328,7681172,"TERMINAL",0,0,"8",,terminal_output +10329,7682218,"TERMINAL",0,0,"9",,terminal_output +10330,7683324,"TERMINAL",0,0,"40",,terminal_output +10331,7684348,"TERMINAL",0,0,"1",,terminal_output +10332,7685325,"TERMINAL",0,0,"2",,terminal_output +10333,7686364,"TERMINAL",0,0,"3",,terminal_output +10334,7687408,"TERMINAL",0,0,"4",,terminal_output +10335,7688450,"TERMINAL",0,0,"5",,terminal_output +10336,7689491,"TERMINAL",0,0,"6",,terminal_output +10337,7691631,"TERMINAL",0,0,"7 9",,terminal_output +10338,7692667,"TERMINAL",0,0,"9",,terminal_output +10339,7693773,"TERMINAL",0,0,"50",,terminal_output +10340,7694792,"TERMINAL",0,0,"1",,terminal_output +10341,7695966,"TERMINAL",0,0,"2",,terminal_output +10342,7696858,"TERMINAL",0,0,"3",,terminal_output +10343,7697890,"TERMINAL",0,0,"4",,terminal_output +10344,7698989,"TERMINAL",0,0,"5",,terminal_output +10345,7699220,"genie.py",7805,0,"",python,selection_mouse +10346,7699342,"genie.py",7795,14,"vid_token_idxs",python,selection_mouse +10347,7699945,"genie.py",7816,0,"",python,selection_mouse +10348,7700001,"TERMINAL",0,0,"6",,terminal_output +10349,7700102,"genie.py",7812,10,"token_idxs",python,selection_mouse +10350,7701000,"TERMINAL",0,0,"7",,terminal_output +10351,7702061,"TERMINAL",0,0,"8",,terminal_output +10352,7703089,"genie.py",7804,0,"",python,selection_mouse +10353,7703225,"genie.py",7795,14,"vid_token_idxs",python,selection_mouse +10354,7703310,"TERMINAL",0,0,"9",,terminal_output +10355,7704480,"TERMINAL",0,0,"4:00",,terminal_output +10356,7705267,"TERMINAL",0,0,"2",,terminal_output +10357,7706201,"TERMINAL",0,0,"3",,terminal_output +10358,7706489,"genie.py",7807,0,"",python,selection_mouse +10359,7706947,"genie.py",7801,0,"",python,selection_mouse +10360,7707303,"TERMINAL",0,0,"4",,terminal_output +10361,7708310,"TERMINAL",0,0,"5",,terminal_output +10362,7709345,"TERMINAL",0,0,"6",,terminal_output +10363,7709562,"genie.py",7800,0,"",python,selection_command +10364,7709689,"genie.py",7799,0,"",python,selection_command +10365,7710198,"genie.py",7798,0,"",python,selection_command +10366,7710225,"genie.py",7797,0,"",python,selection_command +10367,7710309,"genie.py",7796,0,"",python,selection_command +10368,7710399,"TERMINAL",0,0,"7",,terminal_output +10369,7710486,"genie.py",7795,0,"",python,selection_command +10370,7711494,"TERMINAL",0,0,"8",,terminal_output +10371,7712092,"genie.py",7795,15,"",python,content +10372,7712363,"genie.py",7795,1,"",python,content +10373,7712471,"TERMINAL",0,0,"9",,terminal_output +10374,7713245,"genie.py",7795,1,"",python,content +10375,7713477,"TERMINAL",0,0,"10",,terminal_output +10376,7713815,"genie.py",7795,10,"",python,content +10377,7713908,"genie.py",7794,0,"",python,selection_command +10378,7714567,"TERMINAL",0,0,"1",,terminal_output +10379,7714919,"genie.py",7787,9,"",python,content +10380,7715003,"genie.py",7795,0,"",python,selection_command +10381,7715589,"TERMINAL",0,0,"2",,terminal_output +10382,7716542,"genie.py",7833,0,"",python,selection_mouse +10383,7716617,"TERMINAL",0,0,"3",,terminal_output +10384,7717066,"genie.py",7833,1,"",python,content +10385,7717241,"genie.py",7833,1,"",python,content +10386,7717389,"genie.py",7833,1,"",python,content +10387,7717560,"genie.py",7833,1,"",python,content +10388,7717647,"TERMINAL",0,0,"4",,terminal_output +10389,7718678,"TERMINAL",0,0,"5",,terminal_output +10390,7719778,"TERMINAL",0,0,"6",,terminal_output +10391,7720751,"TERMINAL",0,0,"7",,terminal_output +10392,7721913,"TERMINAL",0,0,"8",,terminal_output +10393,7722849,"TERMINAL",0,0,"9",,terminal_output +10394,7723874,"TERMINAL",0,0,"20",,terminal_output +10395,7725000,"TERMINAL",0,0,"1",,terminal_output +10396,7726024,"TERMINAL",0,0,"2",,terminal_output +10397,7726997,"TERMINAL",0,0,"3",,terminal_output +10398,7728043,"TERMINAL",0,0,"4",,terminal_output +10399,7729087,"TERMINAL",0,0,"5",,terminal_output +10400,7730222,"TERMINAL",0,0,"6",,terminal_output +10401,7731247,"TERMINAL",0,0,"8",,terminal_output +10402,7732222,"TERMINAL",0,0,"9",,terminal_output +10403,7732901,"genie.py",9750,0,"",python,selection_mouse +10404,7733096,"genie.py",9745,10,"token_idxs",python,selection_mouse +10405,7733257,"TERMINAL",0,0,"30",,terminal_output +10406,7734280,"TERMINAL",0,0,"1",,terminal_output +10407,7734832,"genie.py",7728,0,"",python,selection_mouse +10408,7734989,"genie.py",7722,10,"token_idxs",python,selection_mouse +10409,7735351,"TERMINAL",0,0,"2",,terminal_output +10410,7735933,"genie.py",7835,0,"",python,selection_mouse +10411,7736060,"genie.py",7833,10,"token_idxs",python,selection_mouse +10412,7736362,"TERMINAL",0,0,"3",,terminal_output +10413,7736960,"genie.py",7801,0,"",python,selection_mouse +10414,7737094,"genie.py",7795,9,"vid_embed",python,selection_mouse +10415,7737386,"TERMINAL",0,0,"4",,terminal_output +10416,7738517,"TERMINAL",0,0,"5",,terminal_output +10417,7739457,"TERMINAL",0,0,"6",,terminal_output +10418,7740565,"TERMINAL",0,0,"7",,terminal_output +10419,7741525,"TERMINAL",0,0,"8",,terminal_output +10420,7742613,"TERMINAL",0,0,"9",,terminal_output +10421,7743236,"genie.py",8010,0,"",python,selection_mouse +10422,7743599,"TERMINAL",0,0,"4010",,terminal_output +10423,7744660,"TERMINAL",0,0,"12",,terminal_output +10424,7745690,"TERMINAL",0,0,"2",,terminal_output +10425,7745986,"genie.py",8138,0,"",python,selection_mouse +10426,7746552,"genie.py",8128,0,"",python,selection_mouse +10427,7746704,"TERMINAL",0,0,"3",,terminal_output +10428,7747733,"TERMINAL",0,0,"4",,terminal_output +10429,7748747,"TERMINAL",0,0,"51",,terminal_output +10430,7749883,"TERMINAL",0,0,"6",,terminal_output +10431,7750909,"TERMINAL",0,0,"7",,terminal_output +10432,7751882,"TERMINAL",0,0,"8",,terminal_output +10433,7752920,"TERMINAL",0,0,"9",,terminal_output +10434,7753980,"TERMINAL",0,0,"50",,terminal_output +10435,7755118,"TERMINAL",0,0,"1",,terminal_output +10436,7755919,"genie.py",8149,0,"",python,selection_mouse +10437,7755932,"genie.py",8148,0,"",python,selection_command +10438,7756050,"TERMINAL",0,0,"22",,terminal_output +10439,7757153,"TERMINAL",0,0,"3",,terminal_output +10440,7758137,"TERMINAL",0,0,"4",,terminal_output +10441,7759203,"TERMINAL",0,0,"6",,terminal_output +10442,7760204,"TERMINAL",0,0,"7",,terminal_output +10443,7761235,"TERMINAL",0,0,"8",,terminal_output +10444,7762281,"TERMINAL",0,0,"9",,terminal_output +10445,7763351,"TERMINAL",0,0,"5:00",,terminal_output +10446,7764360,"TERMINAL",0,0,"1",,terminal_output +10447,7765425,"TERMINAL",0,0,"2",,terminal_output +10448,7766568,"TERMINAL",0,0,"3",,terminal_output +10449,7767488,"TERMINAL",0,0,"4",,terminal_output +10450,7767545,"genie.py",4370,0,"",python,selection_mouse +10451,7767698,"genie.py",4366,5,"print",python,selection_mouse +10452,7767844,"genie.py",4354,57," print(""init_mask[0,:,0]:"", init_mask[0,:,0])\n",python,selection_mouse +10453,7768622,"TERMINAL",0,0,"5",,terminal_output +10454,7769659,"TERMINAL",0,0,"6",,terminal_output +10455,7770630,"TERMINAL",0,0,"7",,terminal_output +10456,7771656,"TERMINAL",0,0,"8",,terminal_output +10457,7772698,"TERMINAL",0,0,"96",,terminal_output +10458,7773743,"TERMINAL",0,0,"10",,terminal_output +10459,7774868,"TERMINAL",0,0,"1",,terminal_output +10460,7775894,"TERMINAL",0,0,"2",,terminal_output +10461,7776920,"TERMINAL",0,0,"3",,terminal_output +10462,7777979,"TERMINAL",0,0,"4",,terminal_output +10463,7778949,"TERMINAL",0,0,"5",,terminal_output +10464,7779596,"genie.py",8218,0,"",python,selection_mouse +10465,7779997,"TERMINAL",0,0,"6",,terminal_output +10466,7780167,"genie.py",8175,0,"",python,selection_mouse +10467,7781116,"TERMINAL",0,0,"7",,terminal_output +10468,7781550,"genie.py",8176,0,"_",python,content +10469,7781583,"genie.py",8176,0,"",python,selection_command +10470,7782077,"TERMINAL",0,0,"8",,terminal_output +10471,7782719,"genie.py",8176,1,"",python,content +10472,7782772,"genie.py",8175,0,"",python,selection_command +10473,7783122,"TERMINAL",0,0,"9",,terminal_output +10474,7784011,"genie.py",8217,0,"\n ",python,content +10475,7784205,"genie.py",8226,0," print(""init_mask[0,:,0]:"", init_mask[0,:,0])\n",python,content +10476,7784242,"TERMINAL",0,0,"21",,terminal_output +10477,7785213,"TERMINAL",0,0,"2",,terminal_output +10478,7785495,"genie.py",8218,0,"",python,selection_command +10479,7785971,"genie.py",8238,0,"",python,selection_command +10480,7786284,"genie.py",8234,4,"",python,content +10481,7786340,"TERMINAL",0,0,"3",,terminal_output +10482,7786499,"genie.py",8230,4,"",python,content +10483,7786683,"genie.py",8226,4,"",python,content +10484,7787352,"genie.py",8225,0,"",python,selection_command +10485,7787371,"TERMINAL",0,0,"4",,terminal_output +10486,7788001,"genie.py",8256,0,"",python,selection_mouse +10487,7788343,"TERMINAL",0,0,"530",,terminal_output +10488,7788571,"genie.py",8257,0,"",python,selection_command +10489,7788757,"genie.py",8258,0,"",python,selection_command +10490,7789360,"genie.py",8257,1,"",python,content +10491,7789396,"TERMINAL",0,0,"6",,terminal_output +10492,7789511,"genie.py",8256,1,"",python,content +10493,7789621,"genie.py",8255,1,"",python,content +10494,7789786,"genie.py",8254,1,"",python,content +10495,7789925,"genie.py",8253,1,"",python,content +10496,7790468,"TERMINAL",0,0,"72",,terminal_output +10497,7791186,"genie.py",8263,0,"",python,selection_mouse +10498,7791514,"TERMINAL",0,0,"8",,terminal_output +10499,7792009,"genie.py",8263,0,",",python,content +10500,7792010,"genie.py",8264,0,"",python,selection_keyboard +10501,7792099,"genie.py",8264,0," ",python,content +10502,7792101,"genie.py",8265,0,"",python,selection_keyboard +10503,7792397,"genie.py",8265,0,"0",python,content +10504,7792398,"genie.py",8266,0,"",python,selection_keyboard +10505,7792550,"TERMINAL",0,0,"9",,terminal_output +10506,7792666,"genie.py",8265,0,"",python,selection_command +10507,7793565,"TERMINAL",0,0,"30",,terminal_output +10508,7794686,"TERMINAL",0,0,"1",,terminal_output +10509,7795632,"TERMINAL",0,0,"2",,terminal_output +10510,7796681,"TERMINAL",0,0,"3",,terminal_output +10511,7797737,"TERMINAL",0,0,"4",,terminal_output +10512,7798771,"TERMINAL",0,0,"5",,terminal_output +10513,7799855,"TERMINAL",0,0,"6",,terminal_output +10514,7799911,"genie.py",4367,0,"",python,selection_mouse +10515,7800090,"genie.py",4366,5,"print",python,selection_mouse +10516,7800604,"genie.py",4311,0,"",python,selection_mouse +10517,7800768,"genie.py",4307,5,"print",python,selection_mouse +10518,7800913,"genie.py",4295,59," print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n",python,selection_mouse +10519,7800932,"TERMINAL",0,0,"7",,terminal_output +10520,7802005,"TERMINAL",0,0,"8",,terminal_output +10521,7802996,"TERMINAL",0,0,"9",,terminal_output +10522,7804072,"TERMINAL",0,0,"40",,terminal_output +10523,7805078,"TERMINAL",0,0,"1",,terminal_output +10524,7806100,"TERMINAL",0,0,"2",,terminal_output +10525,7807125,"TERMINAL",0,0,"3",,terminal_output +10526,7808251,"TERMINAL",0,0,"5",,terminal_output +10527,7809195,"TERMINAL",0,0,"6",,terminal_output +10528,7809512,"genie.py",6455,0,"",python,selection_mouse +10529,7809520,"genie.py",6454,0,"",python,selection_command +10530,7810367,"TERMINAL",0,0,"7",,terminal_output +10531,7811323,"TERMINAL",0,0,"8",,terminal_output +10532,7811648,"genie.py",8242,0,"",python,selection_mouse +10533,7812332,"TERMINAL",0,0,"9",,terminal_output +10534,7812575,"genie.py",8268,0,"\n ",python,content +10535,7812751,"genie.py",8277,0," print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n",python,content +10536,7813370,"TERMINAL",0,0,"50",,terminal_output +10537,7813822,"genie.py",8289,0,"",python,selection_mouse +10538,7814248,"genie.py",8285,4,"",python,content +10539,7814437,"TERMINAL",0,0,"1",,terminal_output +10540,7814466,"genie.py",8281,4,"",python,content +10541,7814632,"genie.py",8277,4,"",python,content +10542,7815292,"genie.py",8276,0,"",python,selection_command +10543,7815458,"TERMINAL",0,0,"2",,terminal_output +10544,7816211,"genie.py",8310,0,"",python,selection_mouse +10545,7816383,"genie.py",8305,10,"token_idxs",python,selection_mouse +10546,7816509,"TERMINAL",0,0,"3",,terminal_output +10547,7817396,"genie.py",8325,0,"",python,selection_mouse +10548,7817539,"TERMINAL",0,0,"4",,terminal_output +10549,7818595,"TERMINAL",0,0,"5",,terminal_output +10550,7818979,"genie.py",8325,1,"",python,content +10551,7819642,"TERMINAL",0,0,"6",,terminal_output +10552,7820154,"genie.py",8325,1,"",python,content +10553,7820271,"genie.py",8333,0,"",python,selection_command +10554,7820829,"TERMINAL",0,0,"7",,terminal_output +10555,7821569,"genie.py",8265,0,"",python,selection_mouse +10556,7821801,"TERMINAL",0,0,"8",,terminal_output +10557,7822000,"genie.py",8317,0,"",python,selection_mouse +10558,7822579,"genie.py",8318,0,"",python,selection_mouse +10559,7822754,"TERMINAL",0,0,"9",,terminal_output +10560,7823819,"TERMINAL",0,0,"6:00",,terminal_output +10561,7824013,"genie.py",8234,0,"",python,selection_mouse +10562,7824846,"TERMINAL",0,0,"1",,terminal_output +10563,7824953,"genie.py",8233,0,"",python,selection_command +10564,7825866,"TERMINAL",0,0,"24",,terminal_output +10565,7826889,"TERMINAL",0,0,"3",,terminal_output +10566,7827980,"TERMINAL",0,0,"4",,terminal_output +10567,7828201,"genie.py",8233,0,"m",python,content +10568,7828201,"genie.py",8234,0,"",python,selection_keyboard +10569,7828305,"genie.py",8234,0,"a",python,content +10570,7828306,"genie.py",8235,0,"",python,selection_keyboard +10571,7828441,"genie.py",8235,0,"s",python,content +10572,7828442,"genie.py",8236,0,"",python,selection_keyboard +10573,7828509,"genie.py",8236,0,"k",python,content +10574,7828510,"genie.py",8237,0,"",python,selection_keyboard +10575,7828848,"genie.py",8237,0,"g",python,content +10576,7828848,"genie.py",8238,0,"",python,selection_keyboard +10577,7829030,"TERMINAL",0,0,"5",,terminal_output +10578,7829278,"genie.py",8238,0,"u",python,content +10579,7829280,"genie.py",8239,0,"",python,selection_keyboard +10580,7829391,"genie.py",8239,0,"t",python,content +10581,7829392,"genie.py",8240,0,"",python,selection_keyboard +10582,7829877,"genie.py",8239,1,"",python,content +10583,7830040,"genie.py",8238,1,"",python,content +10584,7830066,"TERMINAL",0,0,"6",,terminal_output +10585,7830261,"genie.py",8238,0,"i",python,content +10586,7830262,"genie.py",8239,0,"",python,selection_keyboard +10587,7830384,"genie.py",8239,0,"t",python,content +10588,7830385,"genie.py",8240,0,"",python,selection_keyboard +10589,7831115,"TERMINAL",0,0,"75",,terminal_output +10590,7831167,"genie.py",8240,0,"-",python,content +10591,7831168,"genie.py",8241,0,"",python,selection_keyboard +10592,7831650,"genie.py",8300,0,"",python,selection_command +10593,7832130,"TERMINAL",0,0,"8",,terminal_output +10594,7832840,"genie.py",8292,0,"m",python,content +10595,7832840,"genie.py",8293,0,"",python,selection_keyboard +10596,7832897,"genie.py",8293,0,"a",python,content +10597,7832898,"genie.py",8294,0,"",python,selection_keyboard +10598,7833002,"genie.py",8294,0,"s",python,content +10599,7833004,"genie.py",8295,0,"",python,selection_keyboard +10600,7833086,"genie.py",8295,0,"k",python,content +10601,7833087,"genie.py",8296,0,"",python,selection_keyboard +10602,7833190,"TERMINAL",0,0,"10",,terminal_output +10603,7833357,"genie.py",8296,0,"g",python,content +10604,7833358,"genie.py",8297,0,"",python,selection_keyboard +10605,7833485,"genie.py",8297,0,"i",python,content +10606,7833486,"genie.py",8298,0,"",python,selection_keyboard +10607,7833524,"genie.py",8298,0,"t",python,content +10608,7833525,"genie.py",8299,0,"",python,selection_keyboard +10609,7833703,"genie.py",8299,0,"-",python,content +10610,7833703,"genie.py",8300,0,"",python,selection_keyboard +10611,7834140,"genie.py",8299,0,"",python,selection_command +10612,7834265,"TERMINAL",0,0,"1",,terminal_output +10613,7834688,"genie.py",8340,0,"",python,selection_mouse +10614,7835279,"genie.py",8309,0,"",python,selection_mouse +10615,7835345,"TERMINAL",0,0,"2",,terminal_output +10616,7835821,"genie.py",8301,0,"",python,selection_mouse +10617,7835969,"genie.py",8300,10,"token_idxs",python,selection_mouse +10618,7836315,"TERMINAL",0,0,"3",,terminal_output +10619,7836591,"genie.py",8295,0,"",python,selection_mouse +10620,7836725,"genie.py",8292,7,"maskgit",python,selection_mouse +10621,7837255,"genie.py",8238,0,"",python,selection_mouse +10622,7837402,"genie.py",8233,7,"maskgit",python,selection_mouse +10623,7837412,"TERMINAL",0,0,"4",,terminal_output +10624,7837973,"genie.py",8304,0,"",python,selection_mouse +10625,7838147,"genie.py",8300,10,"token_idxs",python,selection_mouse +10626,7838476,"TERMINAL",0,0,"5",,terminal_output +10627,7839484,"TERMINAL",0,0,"6",,terminal_output +10628,7839899,"genie.py",8261,0,"",python,selection_mouse +10629,7840494,"TERMINAL",0,0,"7",,terminal_output +10630,7841544,"TERMINAL",0,0,"8",,terminal_output +10631,7842588,"TERMINAL",0,0,"9",,terminal_output +10632,7843683,"TERMINAL",0,0,"20",,terminal_output +10633,7844651,"TERMINAL",0,0,"1",,terminal_output +10634,7845674,"TERMINAL",0,0,"2",,terminal_output +10635,7846756,"TERMINAL",0,0,"3",,terminal_output +10636,7847779,"TERMINAL",0,0,"4",,terminal_output +10637,7848906,"TERMINAL",0,0,"5",,terminal_output +10638,7849930,"TERMINAL",0,0,"630",,terminal_output +10639,7850887,"TERMINAL",0,0,"7",,terminal_output +10640,7851928,"TERMINAL",0,0,"8",,terminal_output +10641,7852979,"TERMINAL",0,0,"9",,terminal_output +10642,7854025,"TERMINAL",0,0,"30",,terminal_output +10643,7855153,"TERMINAL",0,0,"1",,terminal_output +10644,7856166,"TERMINAL",0,0,"2",,terminal_output +10645,7857201,"TERMINAL",0,0,"3",,terminal_output +10646,7858224,"TERMINAL",0,0,"4",,terminal_output +10647,7859161,"TERMINAL",0,0,"6",,terminal_output +10648,7860201,"TERMINAL",0,0,"7",,terminal_output +10649,7861295,"TERMINAL",0,0,"8",,terminal_output +10650,7862284,"TERMINAL",0,0,"9",,terminal_output +10651,7863363,"TERMINAL",0,0,"40",,terminal_output +10652,7864472,"TERMINAL",0,0,"1",,terminal_output +10653,7865392,"TERMINAL",0,0,"2",,terminal_output +10654,7866518,"TERMINAL",0,0,"3",,terminal_output +10655,7867466,"TERMINAL",0,0,"4",,terminal_output +10656,7868567,"TERMINAL",0,0,"5",,terminal_output +10657,7869548,"TERMINAL",0,0,"6",,terminal_output +10658,7870590,"TERMINAL",0,0,"7",,terminal_output +10659,7871621,"TERMINAL",0,0,"8",,terminal_output +10660,7872665,"TERMINAL",0,0,"9",,terminal_output +10661,7873787,"TERMINAL",0,0,"50",,terminal_output +10662,7874735,"TERMINAL",0,0,"1",,terminal_output +10663,7875757,"TERMINAL",0,0,"2",,terminal_output +10664,7876871,"genie.py",9871,0,"",python,selection_mouse +10665,7876907,"TERMINAL",0,0,"3",,terminal_output +10666,7877042,"genie.py",9867,10,"token_idxs",python,selection_mouse +10667,7877966,"TERMINAL",0,0,"4",,terminal_output +10668,7878907,"TERMINAL",0,0,"5",,terminal_output +10669,7879928,"TERMINAL",0,0,"6",,terminal_output +10670,7881104,"TERMINAL",0,0,"7",,terminal_output +10671,7882110,"TERMINAL",0,0,"8",,terminal_output +10672,7883081,"TERMINAL",0,0,"94",,terminal_output +10673,7884131,"TERMINAL",0,0,"7:00",,terminal_output +10674,7885279,"TERMINAL",0,0,"2",,terminal_output +10675,7886185,"TERMINAL",0,0,"3",,terminal_output +10676,7887223,"TERMINAL",0,0,"4",,terminal_output +10677,7888336,"TERMINAL",0,0,"5",,terminal_output +10678,7889353,"TERMINAL",0,0,"6",,terminal_output +10679,7890361,"TERMINAL",0,0,"7",,terminal_output +10680,7891504,"TERMINAL",0,0,"8",,terminal_output +10681,7892528,"TERMINAL",0,0,"9",,terminal_output +10682,7893554,"TERMINAL",0,0,"10",,terminal_output +10683,7894526,"TERMINAL",0,0,"1",,terminal_output +10684,7895812,"TERMINAL",0,0,"2",,terminal_output +10685,7896617,"TERMINAL",0,0,"3",,terminal_output +10686,7897727,"TERMINAL",0,0,"4",,terminal_output +10687,7898775,"TERMINAL",0,0,"5",,terminal_output +10688,7899793,"TERMINAL",0,0,"6",,terminal_output +10689,7900822,"TERMINAL",0,0,"7",,terminal_output +10690,7901541,"genie.py",5329,0,"",python,selection_mouse +10691,7901724,"genie.py",5329,1,"2",python,selection_mouse +10692,7901848,"TERMINAL",0,0,"8",,terminal_output +10693,7902872,"TERMINAL",0,0,"9",,terminal_output +10694,7903923,"TERMINAL",0,0,"20",,terminal_output +10695,7905083,"TERMINAL",0,0,"1",,terminal_output +10696,7906195,"TERMINAL",0,0,"2",,terminal_output +10697,7907138,"TERMINAL",0,0,"3",,terminal_output +10698,7908082,"TERMINAL",0,0,"4",,terminal_output +10699,7908243,"genie.py",9870,0,"",python,selection_mouse +10700,7908379,"genie.py",9867,10,"token_idxs",python,selection_mouse +10701,7908990,"genie.py",9924,0,"",python,selection_mouse +10702,7909047,"genie.py",9923,0,"",python,selection_command +10703,7909132,"genie.py",9920,4,"None",python,selection_mouse +10704,7909156,"genie.py",9921,3,"one",python,selection_command +10705,7909157,"TERMINAL",0,0,"53",,terminal_output +10706,7909616,"genie.py",9887,0,"",python,selection_mouse +10707,7909755,"genie.py",9879,13,"action_tokens",python,selection_mouse +10708,7910175,"TERMINAL",0,0,"7",,terminal_output +10709,7911269,"TERMINAL",0,0,"825",,terminal_output +10710,7912252,"TERMINAL",0,0,"9",,terminal_output +10711,7913316,"TERMINAL",0,0,"30",,terminal_output +10712,7914341,"TERMINAL",0,0,"149",,terminal_output +10713,7915472,"TERMINAL",0,0,"2",,terminal_output +10714,7916407,"TERMINAL",0,0,"3",,terminal_output +10715,7917494,"genie.py",9874,0,"",python,selection_mouse +10716,7917526,"TERMINAL",0,0,"4",,terminal_output +10717,7917649,"genie.py",9867,10,"token_idxs",python,selection_mouse +10718,7918543,"TERMINAL",0,0,"5",,terminal_output +10719,7919521,"TERMINAL",0,0,"6",,terminal_output +10720,7920568,"TERMINAL",0,0,"7",,terminal_output +10721,7921613,"TERMINAL",0,0,"8",,terminal_output +10722,7922736,"TERMINAL",0,0,"9",,terminal_output +10723,7923761,"TERMINAL",0,0,"40",,terminal_output +10724,7924786,"TERMINAL",0,0,"1",,terminal_output +10725,7925807,"TERMINAL",0,0,"2",,terminal_output +10726,7926053,"genie.py",9830,0,"",python,selection_mouse +10727,7926834,"TERMINAL",0,0,"3",,terminal_output +10728,7927125,"genie.py",9793,0,"",python,selection_mouse +10729,7927647,"genie.py",9791,0,"",python,selection_mouse +10730,7927797,"genie.py",9785,8,"new_mask",python,selection_mouse +10731,7927892,"TERMINAL",0,0,"4",,terminal_output +10732,7928417,"genie.py",9634,0,"",python,selection_mouse +10733,7928536,"genie.py",9628,11,"sorted_idxs",python,selection_mouse +10734,7928912,"TERMINAL",0,0,"5",,terminal_output +10735,7929103,"genie.py",9790,0,"",python,selection_mouse +10736,7929229,"genie.py",9785,8,"new_mask",python,selection_mouse +10737,7929742,"genie.py",9712,0,"",python,selection_mouse +10738,7929908,"genie.py",9707,14,"mask_update_fn",python,selection_mouse +10739,7929965,"TERMINAL",0,0,"6",,terminal_output +10740,7930414,"genie.py",9634,0,"",python,selection_mouse +10741,7930569,"genie.py",9628,11,"sorted_idxs",python,selection_mouse +10742,7930970,"TERMINAL",0,0,"7",,terminal_output +10743,7931075,"genie.py",9553,0,"",python,selection_mouse +10744,7931226,"genie.py",9547,8,"idx_mask",python,selection_mouse +10745,7932077,"genie.py",9554,0,"",python,selection_mouse +10746,7932099,"TERMINAL",0,0,"8",,terminal_output +10747,7932411,"genie.py",9475,0,"",python,selection_mouse +10748,7932605,"genie.py",9467,19,"num_unmasked_tokens",python,selection_mouse +10749,7933053,"TERMINAL",0,0,"913",,terminal_output +10750,7933268,"genie.py",9633,0,"",python,selection_mouse +10751,7933392,"genie.py",9628,11,"sorted_idxs",python,selection_mouse +10752,7934103,"TERMINAL",0,0,"50",,terminal_output +10753,7935139,"TERMINAL",0,0,"1",,terminal_output +10754,7936080,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +10755,7936189,"TERMINAL",0,0,"3",,terminal_output +10756,7937278,"TERMINAL",0,0,"4",,terminal_output +10757,7937747,"TERMINAL",0,0,"\r",,terminal_output +10758,7938241,"TERMINAL",0,0,"5",,terminal_output +10759,7938366,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +10760,7938514,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +10761,7938617,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +10762,7938777,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +10763,7938847,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +10764,7938955,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +10765,7939062,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +10766,7939114,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +10767,7939264,"TERMINAL",0,0,"6",,terminal_output +10768,7939278,"TERMINAL",0,0,"env/",,terminal_output +10769,7939529,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +10770,7939648,"TERMINAL",0,0,"in/",,terminal_output +10771,7939886,"TERMINAL",0,0,"[?25lac[?25h",,terminal_output +10772,7940110,"TERMINAL",0,0,"tivate",,terminal_output +10773,7940315,"TERMINAL",0,0,"7",,terminal_output +10774,7940473,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +10775,7940893,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +10776,7940996,"TERMINAL",0,0,"h scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +10777,7941375,"TERMINAL",0,0,"8",,terminal_output +10778,7941556,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +10779,7941677,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +10780,7942377,"TERMINAL",0,0,"9",,terminal_output +10781,7943425,"TERMINAL",0,0,"8:00",,terminal_output +10782,7944549,"TERMINAL",0,0,"1",,terminal_output +10783,7945575,"TERMINAL",0,0,"2",,terminal_output +10784,7946533,"TERMINAL",0,0,"3",,terminal_output +10785,7947579,"TERMINAL",0,0,"4",,terminal_output +10786,7948620,"TERMINAL",0,0,"5",,terminal_output +10787,7949665,"TERMINAL",0,0,"6",,terminal_output +10788,7950717,"TERMINAL",0,0,"7",,terminal_output +10789,7951819,"TERMINAL",0,0,"8",,terminal_output +10790,7952842,"TERMINAL",0,0,"9",,terminal_output +10791,7953845,"TERMINAL",0,0,"10",,terminal_output +10792,7954891,"TERMINAL",0,0,"1",,terminal_output +10793,7955928,"TERMINAL",0,0,"2",,terminal_output +10794,7957043,"TERMINAL",0,0,"3",,terminal_output +10795,7958043,"TERMINAL",0,0,"4",,terminal_output +10796,7958055,"TERMINAL",0,0,"2025-07-03 18:28:14.950351: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10797,7959059,"TERMINAL",0,0,"5",,terminal_output +10798,7960114,"TERMINAL",0,0,"6",,terminal_output +10799,7961133,"TERMINAL",0,0,"7",,terminal_output +10800,7962183,"TERMINAL",0,0,"9",,terminal_output +10801,7963288,"TERMINAL",0,0,"20",,terminal_output +10802,7964311,"TERMINAL",0,0,"1",,terminal_output +10803,7965337,"TERMINAL",0,0,"2",,terminal_output +10804,7966463,"TERMINAL",0,0,"3",,terminal_output +10805,7967496,"TERMINAL",0,0,"4",,terminal_output +10806,7968511,"TERMINAL",0,0,"5",,terminal_output +10807,7969536,"TERMINAL",0,0,"6",,terminal_output +10808,7970567,"TERMINAL",0,0,"7",,terminal_output +10809,7971072,"TERMINAL",0,0,"2025-07-03 18:28:27.917056: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10810,7971592,"TERMINAL",0,0,"84",,terminal_output +10811,7972619,"TERMINAL",0,0,"9",,terminal_output +10812,7973733,"TERMINAL",0,0,"30",,terminal_output +10813,7974713,"TERMINAL",0,0,"1",,terminal_output +10814,7975781,"TERMINAL",0,0,"2",,terminal_output +10815,7976763,"TERMINAL",0,0,"3",,terminal_output +10816,7977829,"TERMINAL",0,0,"4",,terminal_output +10817,7978853,"TERMINAL",0,0,"5",,terminal_output +10818,7979879,"TERMINAL",0,0,"65",,terminal_output +10819,7980909,"TERMINAL",0,0,"7",,terminal_output +10820,7981419,"TERMINAL",0,0,"2025-07-03 18:28:38.284651: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10821,7981954,"TERMINAL",0,0,"8",,terminal_output +10822,7982980,"TERMINAL",0,0,"9",,terminal_output +10823,7984021,"TERMINAL",0,0,"40",,terminal_output +10824,7985099,"TERMINAL",0,0,"1",,terminal_output +10825,7986084,"TERMINAL",0,0,"2",,terminal_output +10826,7987148,"TERMINAL",0,0,"3",,terminal_output +10827,7988173,"TERMINAL",0,0,"5",,terminal_output +10828,7989299,"TERMINAL",0,0,"6",,terminal_output +10829,7990323,"TERMINAL",0,0,"7",,terminal_output +10830,7991347,"TERMINAL",0,0,"8",,terminal_output +10831,7992286,"TERMINAL",0,0,"9",,terminal_output +10832,7993394,"TERMINAL",0,0,"50",,terminal_output +10833,7994116,"TERMINAL",0,0,"2025-07-03 18:28:50.992932: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10834,7994456,"TERMINAL",0,0,"1",,terminal_output +10835,7995453,"TERMINAL",0,0,"2",,terminal_output +10836,7996466,"TERMINAL",0,0,"3",,terminal_output +10837,7996876,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +10838,7997592,"TERMINAL",0,0,"4",,terminal_output +10839,7998542,"TERMINAL",0,0,"5",,terminal_output +10840,7999568,"TERMINAL",0,0,"6",,terminal_output +10841,8000615,"TERMINAL",0,0,"7",,terminal_output +10842,8001636,"TERMINAL",0,0,"8",,terminal_output +10843,8002676,"TERMINAL",0,0,"9",,terminal_output +10844,8003736,"TERMINAL",0,0,"9:00",,terminal_output +10845,8004762,"TERMINAL",0,0,"1",,terminal_output +10846,8005170,"TERMINAL",0,0,"2025-07-03 18:29:02.003217: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10847,8005794,"TERMINAL",0,0,"2",,terminal_output +10848,8006912,"TERMINAL",0,0,"3",,terminal_output +10849,8007981,"TERMINAL",0,0,"4",,terminal_output +10850,8008101,"TERMINAL",0,0,"2025-07-03 18:29:05.000978: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10851,8008958,"TERMINAL",0,0,"5",,terminal_output +10852,8010001,"TERMINAL",0,0,"6",,terminal_output +10853,8011108,"TERMINAL",0,0,"7",,terminal_output +10854,8012058,"TERMINAL",0,0,"8",,terminal_output +10855,8013103,"TERMINAL",0,0,"9",,terminal_output +10856,8014181,"TERMINAL",0,0,"11",,terminal_output +10857,8014594,"TERMINAL",0,0,"2025-07-03 18:29:11.464033: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10858,8015205,"TERMINAL",0,0,"2",,terminal_output +10859,8016331,"TERMINAL",0,0,"3",,terminal_output +10860,8016741,"TERMINAL",0,0,"2025-07-03 18:29:13.617254: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10861,8017288,"TERMINAL",0,0,"4",,terminal_output +10862,8018099,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +10863,8018328,"TERMINAL",0,0,"5",,terminal_output +10864,8018790,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\n",,terminal_output +10865,8018844,"TERMINAL",0,0,"init_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +10866,8019027,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 245, in __call__\r\n print(""maskgit-init_mask[0,:,0]:"", mask[0,:,0, 0])\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1081, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 656, in _getitem\r\n return indexing.rewriting_take(self, item)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 651, in rewriting_take\r\n return internal_gather(arr, dynamic_idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 660, in _gather\r\n indexer = index_to_gather(np.shape(arr), idx) # shared with _scatter_update\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 797, in index_to_gather\r\n idx = _canonicalize_tuple_index(len(x_shape), idx)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/indexing.py"", line 1116, in _canonicalize_tuple_index\r\n raise IndexError(\r\nIndexError: Too many indices: 3-dimensional array indexed with 4 regular indices.\r\n",,terminal_output +10867,8019404,"TERMINAL",0,0,"6",,terminal_output +10868,8020443,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +10869,8020468,"TERMINAL",0,0,"7",,terminal_output +10870,8021562,"TERMINAL",0,0,"8",,terminal_output +10871,8022583,"TERMINAL",0,0,"9",,terminal_output +10872,8023544,"TERMINAL",0,0,"20",,terminal_output +10873,8024626,"TERMINAL",0,0,"1",,terminal_output +10874,8025640,"TERMINAL",0,0,"2",,terminal_output +10875,8026684,"TERMINAL",0,0,"3",,terminal_output +10876,8027721,"TERMINAL",0,0,"4",,terminal_output +10877,8028824,"TERMINAL",0,0,"53",,terminal_output +10878,8029849,"TERMINAL",0,0,"6",,terminal_output +10879,8030872,"TERMINAL",0,0,"7",,terminal_output +10880,8031897,"TERMINAL",0,0,"8",,terminal_output +10881,8032982,"TERMINAL",0,0,"9",,terminal_output +10882,8033944,"TERMINAL",0,0,"30",,terminal_output +10883,8035071,"TERMINAL",0,0,"1",,terminal_output +10884,8036111,"TERMINAL",0,0,"2",,terminal_output +10885,8037065,"TERMINAL",0,0,"3",,terminal_output +10886,8038143,"TERMINAL",0,0,"4",,terminal_output +10887,8039168,"TERMINAL",0,0,"6",,terminal_output +10888,8040295,"TERMINAL",0,0,"7",,terminal_output +10889,8041243,"TERMINAL",0,0,"8",,terminal_output +10890,8042342,"TERMINAL",0,0,"9",,terminal_output +10891,8043366,"TERMINAL",0,0,"40",,terminal_output +10892,8044390,"TERMINAL",0,0,"1",,terminal_output +10893,8045417,"TERMINAL",0,0,"2",,terminal_output +10894,8046542,"TERMINAL",0,0,"3",,terminal_output +10895,8047570,"TERMINAL",0,0,"4",,terminal_output +10896,8048523,"TERMINAL",0,0,"5",,terminal_output +10897,8049612,"TERMINAL",0,0,"6",,terminal_output +10898,8050637,"TERMINAL",0,0,"7",,terminal_output +10899,8051652,"TERMINAL",0,0,"8",,terminal_output +10900,8053042,"TERMINAL",0,0,"9",,terminal_output +10901,8054086,"TERMINAL",0,0,"50",,terminal_output +10902,8055142,"TERMINAL",0,0,"1",,terminal_output +10903,8056166,"TERMINAL",0,0,"3",,terminal_output +10904,8057192,"TERMINAL",0,0,"4",,terminal_output +10905,8058222,"TERMINAL",0,0,"5",,terminal_output +10906,8059340,"TERMINAL",0,0,"6",,terminal_output +10907,8060364,"TERMINAL",0,0,"7",,terminal_output +10908,8061388,"TERMINAL",0,0,"8",,terminal_output +10909,8062411,"TERMINAL",0,0,"9",,terminal_output +10910,8063436,"TERMINAL",0,0,"30:00",,terminal_output +10911,8064477,"TERMINAL",0,0,"1",,terminal_output +10912,8065543,"TERMINAL",0,0,"2",,terminal_output +10913,8066614,"TERMINAL",0,0,"3",,terminal_output +10914,8067636,"TERMINAL",0,0,"4",,terminal_output +10915,8068624,"TERMINAL",0,0,"5",,terminal_output +10916,8069662,"TERMINAL",0,0,"6",,terminal_output +10917,8070704,"TERMINAL",0,0,"7",,terminal_output +10918,8071834,"TERMINAL",0,0,"8",,terminal_output +10919,8072859,"TERMINAL",0,0,"9",,terminal_output +10920,8073881,"TERMINAL",0,0,"10",,terminal_output +10921,8074905,"TERMINAL",0,0,"1",,terminal_output +10922,8075929,"TERMINAL",0,0,"2",,terminal_output +10923,8076959,"TERMINAL",0,0,"3",,terminal_output +10924,8077978,"TERMINAL",0,0,"4",,terminal_output +10925,8079018,"TERMINAL",0,0,"5",,terminal_output +10926,8080074,"TERMINAL",0,0,"6",,terminal_output +10927,8081153,"TERMINAL",0,0,"7",,terminal_output +10928,8082176,"TERMINAL",0,0,"8",,terminal_output +10929,8083201,"TERMINAL",0,0,"20",,terminal_output +10930,8084223,"TERMINAL",0,0,"1",,terminal_output +10931,8085350,"TERMINAL",0,0,"2",,terminal_output +10932,8086374,"TERMINAL",0,0,"3",,terminal_output +10933,8087323,"TERMINAL",0,0,"4",,terminal_output +10934,8088366,"TERMINAL",0,0,"5",,terminal_output +10935,8089447,"TERMINAL",0,0,"6",,terminal_output +10936,8090471,"TERMINAL",0,0,"7",,terminal_output +10937,8091494,"TERMINAL",0,0,"8",,terminal_output +10938,8092552,"TERMINAL",0,0,"9",,terminal_output +10939,8093646,"TERMINAL",0,0,"30",,terminal_output +10940,8094617,"TERMINAL",0,0,"1",,terminal_output +10941,8095653,"TERMINAL",0,0,"2",,terminal_output +10942,8096690,"TERMINAL",0,0,"3",,terminal_output +10943,8097742,"TERMINAL",0,0,"4",,terminal_output +10944,8098768,"TERMINAL",0,0,"5",,terminal_output +10945,8099892,"TERMINAL",0,0,"6",,terminal_output +10946,8100835,"TERMINAL",0,0,"7",,terminal_output +10947,8101865,"TERMINAL",0,0,"8",,terminal_output +10948,8102979,"TERMINAL",0,0,"9",,terminal_output +10949,8103958,"TERMINAL",0,0,"40",,terminal_output +10950,8105115,"TERMINAL",0,0,"1",,terminal_output +10951,8106139,"TERMINAL",0,0,"2",,terminal_output +10952,8107161,"TERMINAL",0,0,"3",,terminal_output +10953,8108188,"TERMINAL",0,0,"4",,terminal_output +10954,8109210,"TERMINAL",0,0,"6",,terminal_output +10955,8110204,"TERMINAL",0,0,"7",,terminal_output +10956,8111258,"TERMINAL",0,0,"8",,terminal_output +10957,8112269,"TERMINAL",0,0,"9",,terminal_output +10958,8113409,"TERMINAL",0,0,"50",,terminal_output +10959,8114436,"TERMINAL",0,0,"1",,terminal_output +10960,8115462,"TERMINAL",0,0,"2",,terminal_output +10961,8116435,"TERMINAL",0,0,"3",,terminal_output +10962,8117505,"TERMINAL",0,0,"4",,terminal_output +10963,8118528,"TERMINAL",0,0,"5",,terminal_output +10964,8119580,"TERMINAL",0,0,"6",,terminal_output +10965,8120589,"TERMINAL",0,0,"7",,terminal_output +10966,8121633,"TERMINAL",0,0,"8",,terminal_output +10967,8122771,"TERMINAL",0,0,"9",,terminal_output +10968,8123724,"TERMINAL",0,0,"1:00",,terminal_output +10969,8124767,"TERMINAL",0,0,"1",,terminal_output +10970,8125811,"TERMINAL",0,0,"2",,terminal_output +10971,8126925,"TERMINAL",0,0,"3",,terminal_output +10972,8127980,"TERMINAL",0,0,"4",,terminal_output +10973,8128976,"TERMINAL",0,0,"5",,terminal_output +10974,8129998,"TERMINAL",0,0,"6",,terminal_output +10975,8131124,"TERMINAL",0,0,"7",,terminal_output +10976,8132148,"TERMINAL",0,0,"8",,terminal_output +10977,8133105,"TERMINAL",0,0,"9",,terminal_output +10978,8134196,"TERMINAL",0,0,"11",,terminal_output +10979,8135219,"TERMINAL",0,0,"2",,terminal_output +10980,8136244,"TERMINAL",0,0,"3",,terminal_output +10981,8137272,"TERMINAL",0,0,"4",,terminal_output +10982,8138437,"TERMINAL",0,0,"5",,terminal_output +10983,8139436,"TERMINAL",0,0,"6",,terminal_output +10984,8140442,"TERMINAL",0,0,"7",,terminal_output +10985,8141467,"TERMINAL",0,0,"8",,terminal_output +10986,8142452,"TERMINAL",0,0,"9",,terminal_output +10987,8143493,"TERMINAL",0,0,"20",,terminal_output +10988,8144640,"TERMINAL",0,0,"1",,terminal_output +10989,8145576,"TERMINAL",0,0,"2",,terminal_output +10990,8146604,"TERMINAL",0,0,"3",,terminal_output +10991,8147643,"TERMINAL",0,0,"4",,terminal_output +10992,8148685,"TERMINAL",0,0,"5",,terminal_output +10993,8149722,"TERMINAL",0,0,"6",,terminal_output +10994,8150785,"TERMINAL",0,0,"7",,terminal_output +10995,8151912,"TERMINAL",0,0,"8",,terminal_output +10996,8152841,"TERMINAL",0,0,"9",,terminal_output +10997,8154000,"TERMINAL",0,0,"30",,terminal_output +10998,8154985,"TERMINAL",0,0,"1",,terminal_output +10999,8156006,"TERMINAL",0,0,"2",,terminal_output +11000,8156987,"TERMINAL",0,0,"3",,terminal_output +11001,8158031,"TERMINAL",0,0,"4",,terminal_output +11002,8159182,"TERMINAL",0,0,"5",,terminal_output +11003,8160207,"TERMINAL",0,0,"6",,terminal_output +11004,8161230,"TERMINAL",0,0,"8",,terminal_output +11005,8162254,"TERMINAL",0,0,"9",,terminal_output +11006,8163308,"TERMINAL",0,0,"40",,terminal_output +11007,8164302,"TERMINAL",0,0,"1",,terminal_output +11008,8165327,"TERMINAL",0,0,"2",,terminal_output +11009,8166358,"TERMINAL",0,0,"3",,terminal_output +11010,8167397,"TERMINAL",0,0,"4",,terminal_output +11011,8168439,"TERMINAL",0,0,"5",,terminal_output +11012,8169525,"TERMINAL",0,0,"6",,terminal_output +11013,8170515,"TERMINAL",0,0,"7",,terminal_output +11014,8171593,"TERMINAL",0,0,"8",,terminal_output +11015,8172603,"TERMINAL",0,0,"9",,terminal_output +11016,8173638,"TERMINAL",0,0,"50",,terminal_output +11017,8174659,"TERMINAL",0,0,"1",,terminal_output +11018,8175700,"TERMINAL",0,0,"2",,terminal_output +11019,8176744,"TERMINAL",0,0,"3",,terminal_output +11020,8177777,"TERMINAL",0,0,"4",,terminal_output +11021,8178843,"TERMINAL",0,0,"5",,terminal_output +11022,8179861,"TERMINAL",0,0,"6",,terminal_output +11023,8180993,"TERMINAL",0,0,"7",,terminal_output +11024,8182017,"TERMINAL",0,0,"8",,terminal_output +11025,8182991,"TERMINAL",0,0,"9",,terminal_output +11026,8184065,"TERMINAL",0,0,"2:00",,terminal_output +11027,8185090,"TERMINAL",0,0,"1",,terminal_output +11028,8186110,"TERMINAL",0,0,"2",,terminal_output +11029,8187240,"TERMINAL",0,0,"4",,terminal_output +11030,8188264,"TERMINAL",0,0,"5",,terminal_output +11031,8189288,"TERMINAL",0,0,"64",,terminal_output +11032,8190312,"TERMINAL",0,0,"7",,terminal_output +11033,8191336,"TERMINAL",0,0,"8",,terminal_output +11034,8192360,"TERMINAL",0,0,"9",,terminal_output +11035,8193379,"TERMINAL",0,0,"10",,terminal_output +11036,8194511,"TERMINAL",0,0,"1",,terminal_output +11037,8195463,"TERMINAL",0,0,"2",,terminal_output +11038,8196557,"TERMINAL",0,0,"3",,terminal_output +11039,8197599,"TERMINAL",0,0,"4",,terminal_output +11040,8198606,"TERMINAL",0,0,"5",,terminal_output +11041,8199632,"TERMINAL",0,0,"6",,terminal_output +11042,8200667,"TERMINAL",0,0,"7",,terminal_output +11043,8201704,"TERMINAL",0,0,"8",,terminal_output +11044,8202736,"TERMINAL",0,0,"9",,terminal_output +11045,8203829,"TERMINAL",0,0,"20",,terminal_output +11046,8204853,"TERMINAL",0,0,"1",,terminal_output +11047,8205876,"TERMINAL",0,0,"2",,terminal_output +11048,8207006,"TERMINAL",0,0,"3",,terminal_output +11049,8207979,"TERMINAL",0,0,"4",,terminal_output +11050,8209052,"TERMINAL",0,0,"53",,terminal_output +11051,8210074,"TERMINAL",0,0,"6",,terminal_output +11052,8211099,"TERMINAL",0,0,"7",,terminal_output +11053,8212210,"TERMINAL",0,0,"8",,terminal_output +11054,8213250,"TERMINAL",0,0,"30",,terminal_output +11055,8214171,"TERMINAL",0,0,"1",,terminal_output +11056,8215297,"TERMINAL",0,0,"2",,terminal_output +11057,8216227,"TERMINAL",0,0,"3",,terminal_output +11058,8217345,"TERMINAL",0,0,"4",,terminal_output +11059,8218370,"TERMINAL",0,0,"5",,terminal_output +11060,8219395,"TERMINAL",0,0,"6",,terminal_output +11061,8220418,"TERMINAL",0,0,"7",,terminal_output +11062,8221545,"TERMINAL",0,0,"8",,terminal_output +11063,8222568,"TERMINAL",0,0,"9",,terminal_output +11064,8223528,"TERMINAL",0,0,"40",,terminal_output +11065,8224622,"TERMINAL",0,0,"1",,terminal_output +11066,8225641,"TERMINAL",0,0,"2",,terminal_output +11067,8226665,"TERMINAL",0,0,"3",,terminal_output +11068,8227689,"TERMINAL",0,0,"4",,terminal_output +11069,8228713,"TERMINAL",0,0,"5",,terminal_output +11070,8229757,"TERMINAL",0,0,"6",,terminal_output +11071,8230797,"TERMINAL",0,0,"7",,terminal_output +11072,8231887,"TERMINAL",0,0,"8",,terminal_output +11073,8232911,"TERMINAL",0,0,"9",,terminal_output +11074,8233935,"TERMINAL",0,0,"50",,terminal_output +11075,8234958,"TERMINAL",0,0,"1",,terminal_output +11076,8235990,"TERMINAL",0,0,"2",,terminal_output +11077,8237033,"TERMINAL",0,0,"3",,terminal_output +11078,8238077,"TERMINAL",0,0,"4",,terminal_output +11079,8239121,"TERMINAL",0,0,"5",,terminal_output +11080,8240180,"TERMINAL",0,0,"7",,terminal_output +11081,8241308,"TERMINAL",0,0,"8",,terminal_output +11082,8242331,"TERMINAL",0,0,"9",,terminal_output +11083,8243270,"TERMINAL",0,0,"3:00",,terminal_output +11084,8244379,"TERMINAL",0,0,"1",,terminal_output +11085,8245426,"TERMINAL",0,0,"2",,terminal_output +11086,8246429,"TERMINAL",0,0,"3",,terminal_output +11087,8247451,"TERMINAL",0,0,"4",,terminal_output +11088,8248465,"TERMINAL",0,0,"54",,terminal_output +11089,8249603,"TERMINAL",0,0,"65",,terminal_output +11090,8250632,"TERMINAL",0,0,"7",,terminal_output +11091,8251656,"TERMINAL",0,0,"8",,terminal_output +11092,8252676,"TERMINAL",0,0,"9",,terminal_output +11093,8253699,"TERMINAL",0,0,"10",,terminal_output +11094,8254723,"TERMINAL",0,0,"1",,terminal_output +11095,8255849,"TERMINAL",0,0,"2",,terminal_output +11096,8256795,"TERMINAL",0,0,"3",,terminal_output +11097,8257838,"TERMINAL",0,0,"4",,terminal_output +11098,8258920,"TERMINAL",0,0,"5",,terminal_output +11099,8259920,"TERMINAL",0,0,"6",,terminal_output +11100,8261071,"TERMINAL",0,0,"7",,terminal_output +11101,8262004,"TERMINAL",0,0,"8",,terminal_output +11102,8263037,"TERMINAL",0,0,"9",,terminal_output +11103,8264143,"TERMINAL",0,0,"20",,terminal_output +11104,8265125,"TERMINAL",0,0,"1",,terminal_output +11105,8266191,"TERMINAL",0,0,"3",,terminal_output +11106,8267214,"TERMINAL",0,0,"4",,terminal_output +11107,8268238,"TERMINAL",0,0,"5",,terminal_output +11108,8269366,"TERMINAL",0,0,"63",,terminal_output +11109,8270303,"TERMINAL",0,0,"7",,terminal_output +11110,8271343,"TERMINAL",0,0,"8",,terminal_output +11111,8272376,"TERMINAL",0,0,"9",,terminal_output +11112,8273220,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",0,0,"",python,tab +11113,8273506,"TERMINAL",0,0,"30",,terminal_output +11114,8274505,"TERMINAL",0,0,"1",,terminal_output +11115,8275510,"TERMINAL",0,0,"2",,terminal_output +11116,8276597,"TERMINAL",0,0,"3",,terminal_output +11117,8277604,"TERMINAL",0,0,"4",,terminal_output +11118,8278357,"genie.py",0,0,"",python,tab +11119,8278358,"genie.py",8205,0,"",python,selection_mouse +11120,8278661,"TERMINAL",0,0,"5",,terminal_output +11121,8279111,"genie.py",8274,0,"",python,selection_mouse +11122,8279701,"TERMINAL",0,0,"6",,terminal_output +11123,8280342,"genie.py",8273,1,"",python,content +11124,8280706,"TERMINAL",0,0,"7",,terminal_output +11125,8281757,"genie.py",8272,1,"",python,content +11126,8281767,"TERMINAL",0,0,"8",,terminal_output +11127,8281890,"genie.py",8271,1,"",python,content +11128,8282775,"TERMINAL",0,0,"9",,terminal_output +11129,8283815,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +11130,8283824,"TERMINAL",0,0,"40",,terminal_output +11131,8283984,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +11132,8284090,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +11133,8284913,"TERMINAL",0,0,"1",,terminal_output +11134,8285970,"TERMINAL",0,0,"2",,terminal_output +11135,8286978,"TERMINAL",0,0,"3",,terminal_output +11136,8286989,"TERMINAL",0,0,"2025-07-03 18:33:43.889271: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11137,8288053,"TERMINAL",0,0,"4",,terminal_output +11138,8289027,"TERMINAL",0,0,"5",,terminal_output +11139,8290049,"TERMINAL",0,0,"6",,terminal_output +11140,8291193,"TERMINAL",0,0,"7",,terminal_output +11141,8292106,"TERMINAL",0,0,"8",,terminal_output +11142,8293224,"TERMINAL",0,0,"50",,terminal_output +11143,8296363,"TERMINAL",0,0,"1 9",,terminal_output +11144,8297423,"TERMINAL",0,0,"4",,terminal_output +11145,8298441,"TERMINAL",0,0,"5",,terminal_output +11146,8299480,"TERMINAL",0,0,"6",,terminal_output +11147,8300013,"TERMINAL",0,0,"2025-07-03 18:33:56.901621: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11148,8300598,"TERMINAL",0,0,"7",,terminal_output +11149,8301558,"TERMINAL",0,0,"8",,terminal_output +11150,8302648,"TERMINAL",0,0,"9",,terminal_output +11151,8303659,"TERMINAL",0,0,"4:00",,terminal_output +11152,8304684,"TERMINAL",0,0,"1",,terminal_output +11153,8305721,"TERMINAL",0,0,"2",,terminal_output +11154,8306768,"TERMINAL",0,0,"3",,terminal_output +11155,8307808,"TERMINAL",0,0,"4",,terminal_output +11156,8308873,"TERMINAL",0,0,"5",,terminal_output +11157,8309915,"TERMINAL",0,0,"6",,terminal_output +11158,8310430,"TERMINAL",0,0,"2025-07-03 18:34:07.273110: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11159,8311089,"TERMINAL",0,0,"7",,terminal_output +11160,8312367,"TERMINAL",0,0,"8",,terminal_output +11161,8313121,"genie.py",0,0,"",python,tab +11162,8313122,"genie.py",8254,0,"",python,selection_mouse +11163,8313271,"TERMINAL",0,0,"9",,terminal_output +11164,8314114,"TERMINAL",0,0,"10",,terminal_output +11165,8315145,"TERMINAL",0,0,"1",,terminal_output +11166,8316162,"TERMINAL",0,0,"2",,terminal_output +11167,8317191,"TERMINAL",0,0,"4",,terminal_output +11168,8318312,"TERMINAL",0,0,"5",,terminal_output +11169,8319337,"TERMINAL",0,0,"6",,terminal_output +11170,8320360,"TERMINAL",0,0,"7",,terminal_output +11171,8321386,"TERMINAL",0,0,"8",,terminal_output +11172,8322409,"TERMINAL",0,0,"9",,terminal_output +11173,8323374,"TERMINAL",0,0,"2025-07-03 18:34:20.273275: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11174,8323395,"TERMINAL",0,0,"20",,terminal_output +11175,8324457,"TERMINAL",0,0,"1",,terminal_output +11176,8325583,"TERMINAL",0,0,"2",,terminal_output +11177,8325994,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +11178,8326608,"TERMINAL",0,0,"3",,terminal_output +11179,8327648,"TERMINAL",0,0,"4",,terminal_output +11180,8328665,"TERMINAL",0,0,"5",,terminal_output +11181,8329680,"TERMINAL",0,0,"6",,terminal_output +11182,8330537,"genie.py",8265,0,"",python,selection_mouse +11183,8330695,"TERMINAL",0,0,"7",,terminal_output +11184,8331738,"TERMINAL",0,0,"8",,terminal_output +11185,8331791,"genie.py",8265,0,"_",python,content +11186,8331792,"genie.py",8266,0,"",python,selection_keyboard +11187,8331921,"genie.py",8266,0,"e",python,content +11188,8331923,"genie.py",8267,0,"",python,selection_keyboard +11189,8332119,"genie.py",8267,0,"x",python,content +11190,8332120,"genie.py",8268,0,"",python,selection_keyboard +11191,8332214,"genie.py",8268,0,"p",python,content +11192,8332214,"genie.py",8269,0,"",python,selection_keyboard +11193,8332340,"genie.py",8269,0,"a",python,content +11194,8332341,"genie.py",8270,0,"",python,selection_keyboard +11195,8332495,"genie.py",8270,0,"n",python,content +11196,8332497,"genie.py",8271,0,"",python,selection_keyboard +11197,8332846,"TERMINAL",0,0,"9",,terminal_output +11198,8332897,"genie.py",8271,0,"d",python,content +11199,8332898,"genie.py",8272,0,"",python,selection_keyboard +11200,8333016,"genie.py",8272,0,"e",python,content +11201,8333017,"genie.py",8273,0,"",python,selection_keyboard +11202,8333146,"genie.py",8273,0,"d",python,content +11203,8333146,"genie.py",8274,0,"",python,selection_keyboard +11204,8333894,"TERMINAL",0,0,"30",,terminal_output +11205,8334131,"TERMINAL",0,0,"2025-07-03 18:34:31.007820: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11206,8334311,"genie.py",8280,0,"",python,selection_mouse +11207,8334901,"TERMINAL",0,0,"1",,terminal_output +11208,8335355,"genie.py",8280,0,",",python,content +11209,8335356,"genie.py",8281,0,"",python,selection_keyboard +11210,8335431,"genie.py",8281,0," ",python,content +11211,8335431,"genie.py",8282,0,"",python,selection_keyboard +11212,8335912,"genie.py",8282,0,"0",python,content +11213,8335913,"genie.py",8283,0,"",python,selection_keyboard +11214,8335947,"TERMINAL",0,0,"2",,terminal_output +11215,8336098,"genie.py",8282,0,"",python,selection_command +11216,8337070,"TERMINAL",0,0,"3",,terminal_output +11217,8337225,"TERMINAL",0,0,"2025-07-03 18:34:34.121712: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11218,8337990,"TERMINAL",0,0,"4",,terminal_output +11219,8339101,"TERMINAL",0,0,"5",,terminal_output +11220,8340125,"TERMINAL",0,0,"6",,terminal_output +11221,8341152,"TERMINAL",0,0,"7",,terminal_output +11222,8341631,"genie.py",0,0,"",python,tab +11223,8342151,"TERMINAL",0,0,"9",,terminal_output +11224,8342924,"genie.py",8282,1,"",python,content +11225,8343041,"genie.py",8281,0,"",python,selection_command +11226,8343205,"TERMINAL",0,0,"40",,terminal_output +11227,8343420,"genie.py",8280,0,"",python,selection_command +11228,8343818,"TERMINAL",0,0,"2025-07-03 18:34:40.676364: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11229,8343850,"genie.py",8280,1,"",python,content +11230,8343994,"genie.py",8280,1,"",python,content +11231,8344217,"TERMINAL",0,0,"1",,terminal_output +11232,8344872,"genie.py",8780,0,"",python,selection_mouse +11233,8344905,"genie.py",8779,0,"",python,selection_command +11234,8345262,"TERMINAL",0,0,"2",,terminal_output +11235,8345657,"TERMINAL",0,0,"2025-07-03 18:34:42.559143: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11236,8346361,"TERMINAL",0,0,"3",,terminal_output +11237,8347191,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +11238,8347332,"TERMINAL",0,0,"4",,terminal_output +11239,8347909,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11240,8348008,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\n",,terminal_output +11241,8348420,"TERMINAL",0,0,"5",,terminal_output +11242,8348885,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11243,8349481,"TERMINAL",0,0,"6",,terminal_output +11244,8350467,"TERMINAL",0,0,"7",,terminal_output +11245,8351594,"TERMINAL",0,0,"8",,terminal_output +11246,8352620,"TERMINAL",0,0,"9",,terminal_output +11247,8353640,"TERMINAL",0,0,"50",,terminal_output +11248,8353851,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False True True True True True True True True True True\r\n True True True True]\r\nmaskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\n",,terminal_output +11249,8353916,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11250,8354608,"genie.py",8346,0,"",python,selection_mouse +11251,8354677,"TERMINAL",0,0,"1",,terminal_output +11252,8355664,"TERMINAL",0,0,"2",,terminal_output +11253,8356254,"genie.py",8283,0,"",python,selection_command +11254,8356714,"TERMINAL",0,0,"3",,terminal_output +11255,8356951,"genie.py",8218,128,"",python,content +11256,8357011,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11257,8357067,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\nmaskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11258,8357777,"TERMINAL",0,0,"4",,terminal_output +11259,8358786,"TERMINAL",0,0,"5",,terminal_output +11260,8359873,"TERMINAL",0,0,"6",,terminal_output +11261,8360027,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False True True True True True True True True\r\n True True True True]\r\nmaskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\n",,terminal_output +11262,8360162,"TERMINAL",0,0,"umaskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11263,8360917,"TERMINAL",0,0,"7",,terminal_output +11264,8361252,"genie.py",0,0,"",python,tab +11265,8361253,"genie.py",8040,0,"",python,selection_mouse +11266,8361913,"genie.py",8218,0," print(""maskgit-init_mask[0,:,0]:"", mask_expanded[0,:,0])\n print(""maskgit-token_idxs[0,:,0]:"", token_idxs[0,:,0])\n",python,content +11267,8361914,"TERMINAL",0,0,"8",,terminal_output +11268,8362005,"genie.py",8283,0,"",python,selection_command +11269,8362806,"genie.py",8218,0,"",python,selection_command +11270,8362941,"TERMINAL",0,0,"9",,terminal_output +11271,8363080,"genie.py",8219,0,"",python,selection_command +11272,8363161,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False True True True True True True True\r\n True True True True]\r\nmaskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\n",,terminal_output +11273,8363197,"genie.py",8220,0,"",python,selection_command +11274,8363279,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11275,8363395,"genie.py",8221,0,"",python,selection_command +11276,8363507,"genie.py",8222,0,"",python,selection_command +11277,8363651,"genie.py",8223,0,"",python,selection_command +11278,8363770,"genie.py",8224,0,"",python,selection_command +11279,8363957,"genie.py",8225,0,"",python,selection_command +11280,8363961,"TERMINAL",0,0,"5:00",,terminal_output +11281,8364259,"genie.py",8226,0,"",python,selection_command +11282,8365007,"TERMINAL",0,0,"1",,terminal_output +11283,8365401,"genie.py",8226,1,"p",python,selection_command +11284,8365796,"genie.py",8226,1,"p",python,selection_command +11285,8366036,"TERMINAL",0,0,"2",,terminal_output +11286,8366091,"genie.py",8226,0,"",python,selection_command +11287,8366189,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False True True True True True True\r\n True True True True]\r\n",,terminal_output +11288,8366323,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\n",,terminal_output +11289,8366432,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11290,8366683,"genie.py",8291,0,"j",python,content +11291,8366683,"genie.py",8226,0,"j",python,content +11292,8366684,"genie.py",8227,0,"",python,selection_keyboard +11293,8366762,"genie.py",8293,0,"a",python,content +11294,8366762,"genie.py",8227,0,"a",python,content +11295,8366764,"genie.py",8228,0,"",python,selection_keyboard +11296,8366974,"genie.py",8295,0,"x",python,content +11297,8366975,"genie.py",8228,0,"x",python,content +11298,8366975,"genie.py",8229,0,"",python,selection_keyboard +11299,8367074,"genie.py",8297,0,".",python,content +11300,8367074,"genie.py",8229,0,".",python,content +11301,8367075,"genie.py",8230,0,"",python,selection_keyboard +11302,8367133,"TERMINAL",0,0,"3",,terminal_output +11303,8367501,"genie.py",8299,0,"d",python,content +11304,8367502,"genie.py",8230,0,"d",python,content +11305,8367502,"genie.py",8231,0,"",python,selection_keyboard +11306,8367725,"genie.py",8301,0,"e",python,content +11307,8367726,"genie.py",8231,0,"e",python,content +11308,8367726,"genie.py",8232,0,"",python,selection_keyboard +11309,8367797,"genie.py",8303,0,"b",python,content +11310,8367797,"genie.py",8232,0,"b",python,content +11311,8367798,"genie.py",8233,0,"",python,selection_keyboard +11312,8367888,"genie.py",8305,0,"u",python,content +11313,8367888,"genie.py",8233,0,"u",python,content +11314,8367889,"genie.py",8234,0,"",python,selection_keyboard +11315,8368039,"genie.py",8307,0,"g",python,content +11316,8368040,"genie.py",8234,0,"g",python,content +11317,8368040,"genie.py",8235,0,"",python,selection_keyboard +11318,8368152,"genie.py",8309,0,".",python,content +11319,8368152,"genie.py",8235,0,".",python,content +11320,8368153,"genie.py",8236,0,"",python,selection_keyboard +11321,8368165,"TERMINAL",0,0,"4",,terminal_output +11322,8368602,"genie.py",8235,0,"",python,selection_command +11323,8369183,"TERMINAL",0,0,"6",,terminal_output +11324,8369570,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False True True True True True\r\n True True True True]\r\nmaskgit-init_mask[0,:,0]: Traced\r\nmaskgit-token_idxs[0,:,0]: Traced\r\nmaskgit-init_mask[0,:,0]: Tracedwith\r\nmaskgit-token_idxs[0,:,0]: Tracedwith\r\n",,terminal_output +11325,8369651,"TERMINAL",0,0,"^C",,terminal_output +11326,8369923,"TERMINAL",0,0,"^C",,terminal_output +11327,8370152,"TERMINAL",0,0,"^C",,terminal_output +11328,8370216,"TERMINAL",0,0,"7",,terminal_output +11329,8371290,"TERMINAL",0,0,"8",,terminal_output +11330,8371343,"TERMINAL",0,0,"^C",,terminal_output +11331,8371676,"TERMINAL",0,0,"^C",,terminal_output +11332,8372382,"TERMINAL",0,0,"9",,terminal_output +11333,8372439,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n",,terminal_output +11334,8372603,"TERMINAL",0,0," return apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n ret = trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n y, out_variable_groups_xs_t = fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n broadcast_vars, (carry_vars, c), (ys, scan_vars) = scanned(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n out = scan_p.bind(*consts, *in_flat,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n executable = UnloadedMeshExecutable.from_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n",,terminal_output +11335,8372654,"TERMINAL",0,0," return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 335, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +11336,8373405,"TERMINAL",0,0,"10",,terminal_output +11337,8373792,"TERMINAL",0,0,"^C",,terminal_output +11338,8373951,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +11339,8374427,"TERMINAL",0,0,"1",,terminal_output +11340,8374440,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +11341,8374578,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +11342,8374705,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +11343,8375453,"TERMINAL",0,0,"213",,terminal_output +11344,8376503,"TERMINAL",0,0,"3",,terminal_output +11345,8377458,"TERMINAL",0,0,"4",,terminal_output +11346,8377553,"TERMINAL",0,0,"2025-07-03 18:35:14.418289: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11347,8378503,"TERMINAL",0,0,"5",,terminal_output +11348,8379550,"TERMINAL",0,0,"6",,terminal_output +11349,8380582,"TERMINAL",0,0,"7",,terminal_output +11350,8381699,"TERMINAL",0,0,"8",,terminal_output +11351,8382724,"TERMINAL",0,0,"9",,terminal_output +11352,8383747,"TERMINAL",0,0,"20",,terminal_output +11353,8384770,"TERMINAL",0,0,"1",,terminal_output +11354,8385795,"TERMINAL",0,0,"2",,terminal_output +11355,8386928,"TERMINAL",0,0,"3",,terminal_output +11356,8387873,"TERMINAL",0,0,"4",,terminal_output +11357,8388921,"TERMINAL",0,0,"5",,terminal_output +11358,8389687,"TERMINAL",0,0,"2025-07-03 18:35:26.519964: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11359,8389969,"TERMINAL",0,0,"6",,terminal_output +11360,8390992,"TERMINAL",0,0,"7",,terminal_output +11361,8392022,"TERMINAL",0,0,"8",,terminal_output +11362,8393051,"TERMINAL",0,0,"9",,terminal_output +11363,8394196,"TERMINAL",0,0,"30",,terminal_output +11364,8395198,"TERMINAL",0,0,"1",,terminal_output +11365,8396201,"TERMINAL",0,0,"3",,terminal_output +11366,8397264,"TERMINAL",0,0,"4",,terminal_output +11367,8398293,"TERMINAL",0,0,"5",,terminal_output +11368,8399311,"TERMINAL",0,0,"6",,terminal_output +11369,8399619,"TERMINAL",0,0,"2025-07-03 18:35:36.450189: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11370,8400336,"TERMINAL",0,0,"7",,terminal_output +11371,8401463,"TERMINAL",0,0,"8",,terminal_output +11372,8402486,"TERMINAL",0,0,"9",,terminal_output +11373,8403458,"TERMINAL",0,0,"40",,terminal_output +11374,8404497,"TERMINAL",0,0,"1",,terminal_output +11375,8405559,"TERMINAL",0,0,"2",,terminal_output +11376,8406685,"TERMINAL",0,0,"3",,terminal_output +11377,8407608,"TERMINAL",0,0,"4",,terminal_output +11378,8408733,"TERMINAL",0,0,"5",,terminal_output +11379,8409701,"TERMINAL",0,0,"6",,terminal_output +11380,8410799,"TERMINAL",0,0,"7",,terminal_output +11381,8411677,"TERMINAL",0,0,"2025-07-03 18:35:48.577734: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11382,8411795,"TERMINAL",0,0,"8",,terminal_output +11383,8412853,"TERMINAL",0,0,"94",,terminal_output +11384,8413955,"TERMINAL",0,0,"50",,terminal_output +11385,8414366,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +11386,8414888,"TERMINAL",0,0,"1",,terminal_output +11387,8415921,"TERMINAL",0,0,"2",,terminal_output +11388,8416973,"TERMINAL",0,0,"33",,terminal_output +11389,8418015,"TERMINAL",0,0,"4",,terminal_output +11390,8419076,"TERMINAL",0,0,"5",,terminal_output +11391,8420099,"TERMINAL",0,0,"6",,terminal_output +11392,8421123,"TERMINAL",0,0,"7",,terminal_output +11393,8421876,"TERMINAL",0,0,"2025-07-03 18:35:58.696730: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11394,8422250,"TERMINAL",0,0,"9",,terminal_output +11395,8423273,"TERMINAL",0,0,"6:00",,terminal_output +11396,8424287,"TERMINAL",0,0,"1",,terminal_output +11397,8424792,"TERMINAL",0,0,"2025-07-03 18:36:01.578567: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11398,8425366,"TERMINAL",0,0,"2",,terminal_output +11399,8426345,"TERMINAL",0,0,"3",,terminal_output +11400,8427377,"TERMINAL",0,0,"4",,terminal_output +11401,8428396,"TERMINAL",0,0,"5",,terminal_output +11402,8429530,"TERMINAL",0,0,"6",,terminal_output +11403,8430570,"TERMINAL",0,0,"7",,terminal_output +11404,8431180,"TERMINAL",0,0,"2025-07-03 18:36:07.964892: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11405,8431594,"TERMINAL",0,0,"8",,terminal_output +11406,8432593,"TERMINAL",0,0,"9",,terminal_output +11407,8433117,"TERMINAL",0,0,"2025-07-03 18:36:10.002561: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11408,8433616,"TERMINAL",0,0,"10",,terminal_output +11409,8434436,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +11410,8434632,"TERMINAL",0,0,"1",,terminal_output +11411,8434956,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\n",,terminal_output +11412,8435018,"TERMINAL",0,0,"init_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11413,8435149,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 245, in __call__\r\n jax.debug.print(""maskgit-init_mask[0,:,0]:"", mask_expanded[0,:,0])\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 406, in debug_print\r\n formatter.format(fmt, *args, **kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/string.py"", line 161, in format\r\n return self.vformat(format_string, args, kwargs)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/string.py"", line 166, in vformat\r\n self.check_unused_args(used_args, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 353, in check_unused_args\r\n raise ValueError(\r\nValueError: Unused positional arguments to `jax.debug.print`: [Traced]\r\n",,terminal_output +11414,8435768,"TERMINAL",0,0,"2",,terminal_output +11415,8436501,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +11416,8436802,"TERMINAL",0,0,"3",,terminal_output +11417,8437747,"TERMINAL",0,0,"4",,terminal_output +11418,8438991,"TERMINAL",0,0,"5",,terminal_output +11419,8439871,"TERMINAL",0,0,"6",,terminal_output +11420,8440889,"TERMINAL",0,0,"7",,terminal_output +11421,8441913,"TERMINAL",0,0,"8",,terminal_output +11422,8442982,"TERMINAL",0,0,"9",,terminal_output +11423,8443972,"TERMINAL",0,0,"20",,terminal_output +11424,8445018,"TERMINAL",0,0,"1",,terminal_output +11425,8446061,"TERMINAL",0,0,"2",,terminal_output +11426,8447098,"TERMINAL",0,0,"3",,terminal_output +11427,8448158,"TERMINAL",0,0,"4",,terminal_output +11428,8449181,"TERMINAL",0,0,"6",,terminal_output +11429,8450207,"TERMINAL",0,0,"7",,terminal_output +11430,8451235,"TERMINAL",0,0,"8",,terminal_output +11431,8452364,"TERMINAL",0,0,"9",,terminal_output +11432,8453383,"TERMINAL",0,0,"30",,terminal_output +11433,8454406,"TERMINAL",0,0,"1",,terminal_output +11434,8455410,"TERMINAL",0,0,"2",,terminal_output +11435,8456458,"TERMINAL",0,0,"3",,terminal_output +11436,8457590,"TERMINAL",0,0,"4",,terminal_output +11437,8458591,"TERMINAL",0,0,"5",,terminal_output +11438,8459627,"TERMINAL",0,0,"6",,terminal_output +11439,8460614,"TERMINAL",0,0,"7",,terminal_output +11440,8461675,"TERMINAL",0,0,"8",,terminal_output +11441,8462679,"TERMINAL",0,0,"9",,terminal_output +11442,8463720,"TERMINAL",0,0,"40",,terminal_output +11443,8464764,"TERMINAL",0,0,"1",,terminal_output +11444,8465883,"TERMINAL",0,0,"2",,terminal_output +11445,8466843,"TERMINAL",0,0,"3",,terminal_output +11446,8467922,"TERMINAL",0,0,"4",,terminal_output +11447,8468915,"TERMINAL",0,0,"5",,terminal_output +11448,8469939,"TERMINAL",0,0,"6",,terminal_output +11449,8470985,"TERMINAL",0,0,"7",,terminal_output +11450,8472035,"TERMINAL",0,0,"8",,terminal_output +11451,8473085,"TERMINAL",0,0,"9",,terminal_output +11452,8473533,"genie.py",0,0,"",python,tab +11453,8473533,"genie.py",8366,0,"",python,selection_mouse +11454,8473924,"genie.py",8316,0,"",python,selection_mouse +11455,8474141,"TERMINAL",0,0,"50",,terminal_output +11456,8475174,"TERMINAL",0,0,"2",,terminal_output +11457,8475214,"genie.py",8343,0,"",python,selection_mouse +11458,8476002,"genie.py",8344,0,"",python,selection_mouse +11459,8476213,"TERMINAL",0,0,"3",,terminal_output +11460,8477366,"TERMINAL",0,0,"4",,terminal_output +11461,8477447,"genie.py",8344,0," ",python,content +11462,8477448,"genie.py",8345,0,"",python,selection_keyboard +11463,8477751,"genie.py",8345,0,"{}",python,content +11464,8477752,"genie.py",8346,0,"",python,selection_keyboard +11465,8477996,"genie.py",8346,1,"}",python,content +11466,8477997,"genie.py",8347,0,"",python,selection_keyboard +11467,8478338,"TERMINAL",0,0,"54",,terminal_output +11468,8479343,"TERMINAL",0,0,"6",,terminal_output +11469,8479699,"genie.py",8346,0,"",python,selection_command +11470,8479913,"genie.py",8271,0,"",python,selection_command +11471,8480198,"genie.py",8270,0,"",python,selection_command +11472,8480499,"genie.py",8269,0,"",python,selection_command +11473,8480500,"TERMINAL",0,0,"7",,terminal_output +11474,8480532,"genie.py",8268,0,"",python,selection_command +11475,8481014,"genie.py",8268,0," ",python,content +11476,8481015,"genie.py",8269,0,"",python,selection_keyboard +11477,8481459,"TERMINAL",0,0,"8",,terminal_output +11478,8481833,"genie.py",8269,0,"{}",python,content +11479,8481833,"genie.py",8270,0,"",python,selection_keyboard +11480,8482196,"genie.py",8270,1,"}",python,content +11481,8482196,"genie.py",8271,0,"",python,selection_keyboard +11482,8482482,"TERMINAL",0,0,"9",,terminal_output +11483,8482537,"genie.py",8270,0,"",python,selection_command +11484,8483528,"TERMINAL",0,0,"7:00",,terminal_output +11485,8484613,"TERMINAL",0,0,"1",,terminal_output +11486,8485651,"TERMINAL",0,0,"2",,terminal_output +11487,8486666,"TERMINAL",0,0,"3",,terminal_output +11488,8487787,"TERMINAL",0,0,"4",,terminal_output +11489,8488778,"TERMINAL",0,0,"5",,terminal_output +11490,8489836,"TERMINAL",0,0,"6",,terminal_output +11491,8490962,"TERMINAL",0,0,"7",,terminal_output +11492,8491986,"TERMINAL",0,0,"8",,terminal_output +11493,8492979,"TERMINAL",0,0,"9",,terminal_output +11494,8493965,"TERMINAL",0,0,"10",,terminal_output +11495,8495008,"TERMINAL",0,0,"1",,terminal_output +11496,8496083,"TERMINAL",0,0,"2",,terminal_output +11497,8497110,"TERMINAL",0,0,"3",,terminal_output +11498,8497462,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +11499,8497652,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +11500,8497767,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +11501,8498232,"TERMINAL",0,0,"5",,terminal_output +11502,8499272,"TERMINAL",0,0,"6",,terminal_output +11503,8500202,"TERMINAL",0,0,"7",,terminal_output +11504,8500611,"TERMINAL",0,0,"2025-07-03 18:37:17.513304: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11505,8501305,"TERMINAL",0,0,"8",,terminal_output +11506,8502328,"TERMINAL",0,0,"9",,terminal_output +11507,8503352,"TERMINAL",0,0,"20",,terminal_output +11508,8504376,"TERMINAL",0,0,"1",,terminal_output +11509,8505399,"TERMINAL",0,0,"2",,terminal_output +11510,8506527,"TERMINAL",0,0,"3",,terminal_output +11511,8507550,"TERMINAL",0,0,"45",,terminal_output +11512,8508512,"TERMINAL",0,0,"5",,terminal_output +11513,8509598,"TERMINAL",0,0,"64",,terminal_output +11514,8510567,"TERMINAL",0,0,"7",,terminal_output +11515,8511648,"TERMINAL",0,0,"8",,terminal_output +11516,8512662,"TERMINAL",0,0,"9",,terminal_output +11517,8513797,"TERMINAL",0,0,"30",,terminal_output +11518,8513850,"TERMINAL",0,0,"2025-07-03 18:37:30.740200: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11519,8514821,"TERMINAL",0,0,"1",,terminal_output +11520,8515845,"TERMINAL",0,0,"2",,terminal_output +11521,8516819,"TERMINAL",0,0,"3",,terminal_output +11522,8517894,"TERMINAL",0,0,"4",,terminal_output +11523,8519019,"TERMINAL",0,0,"5",,terminal_output +11524,8520021,"TERMINAL",0,0,"6",,terminal_output +11525,8521064,"TERMINAL",0,0,"7",,terminal_output +11526,8522047,"TERMINAL",0,0,"8",,terminal_output +11527,8523093,"TERMINAL",0,0,"9",,terminal_output +11528,8523663,"sample.py",0,0,"",python,tab +11529,8524131,"TERMINAL",0,0,"40",,terminal_output +11530,8524352,"TERMINAL",0,0,"2025-07-03 18:37:41.252681: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11531,8525266,"TERMINAL",0,0,"2",,terminal_output +11532,8526290,"TERMINAL",0,0,"3",,terminal_output +11533,8527313,"TERMINAL",0,0,"4",,terminal_output +11534,8528337,"TERMINAL",0,0,"5",,terminal_output +11535,8529364,"TERMINAL",0,0,"6",,terminal_output +11536,8530384,"TERMINAL",0,0,"7",,terminal_output +11537,8531546,"TERMINAL",0,0,"8",,terminal_output +11538,8532669,"TERMINAL",0,0,"9",,terminal_output +11539,8533561,"TERMINAL",0,0,"50",,terminal_output +11540,8534585,"TERMINAL",0,0,"1",,terminal_output +11541,8535723,"TERMINAL",0,0,"2",,terminal_output +11542,8536349,"genie.py",0,0,"",python,tab +11543,8536718,"TERMINAL",0,0,"3",,terminal_output +11544,8537350,"TERMINAL",0,0,"2025-07-03 18:37:54.160944: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11545,8537703,"TERMINAL",0,0,"4",,terminal_output +11546,8538783,"TERMINAL",0,0,"5",,terminal_output +11547,8539807,"TERMINAL",0,0,"6",,terminal_output +11548,8539859,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +11549,8540832,"TERMINAL",0,0,"7",,terminal_output +11550,8541957,"TERMINAL",0,0,"8",,terminal_output +11551,8542980,"TERMINAL",0,0,"9",,terminal_output +11552,8544005,"TERMINAL",0,0,"8:00",,terminal_output +11553,8544986,"TERMINAL",0,0,"1",,terminal_output +11554,8546054,"TERMINAL",0,0,"2",,terminal_output +11555,8547077,"TERMINAL",0,0,"3",,terminal_output +11556,8548098,"genie.py",9439,0,"",python,selection_mouse +11557,8548148,"TERMINAL",0,0,"4",,terminal_output +11558,8548238,"genie.py",9428,18,"sampled_token_idxs",python,selection_mouse +11559,8548288,"TERMINAL",0,0,"2025-07-03 18:38:05.093620: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11560,8549164,"TERMINAL",0,0,"6",,terminal_output +11561,8550263,"TERMINAL",0,0,"7",,terminal_output +11562,8551026,"genie.py",9424,0,"",python,selection_mouse +11563,8551170,"genie.py",9422,4,"mask",python,selection_mouse +11564,8551268,"TERMINAL",0,0,"2025-07-03 18:38:08.144564: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11565,8551268,"TERMINAL",0,0,"8",,terminal_output +11566,8552300,"TERMINAL",0,0,"9",,terminal_output +11567,8553428,"TERMINAL",0,0,"10",,terminal_output +11568,8554450,"TERMINAL",0,0,"1",,terminal_output +11569,8555474,"TERMINAL",0,0,"2",,terminal_output +11570,8556498,"TERMINAL",0,0,"3",,terminal_output +11571,8557515,"TERMINAL",0,0,"4",,terminal_output +11572,8557830,"TERMINAL",0,0,"2025-07-03 18:38:14.697205: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11573,8558650,"TERMINAL",0,0,"5",,terminal_output +11574,8559673,"TERMINAL",0,0,"6",,terminal_output +11575,8559875,"TERMINAL",0,0,"2025-07-03 18:38:16.776261: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11576,8560697,"TERMINAL",0,0,"7",,terminal_output +11577,8561420,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +11578,8561733,"TERMINAL",0,0,"8",,terminal_output +11579,8562060,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11580,8562754,"TERMINAL",0,0,"9",,terminal_output +11581,8563787,"TERMINAL",0,0,"20",,terminal_output +11582,8564895,"TERMINAL",0,0,"1",,terminal_output +11583,8565918,"TERMINAL",0,0,"2",,terminal_output +11584,8566955,"TERMINAL",0,0,"3",,terminal_output +11585,8567864,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11586,8567967,"TERMINAL",0,0,"4",,terminal_output +11587,8568992,"TERMINAL",0,0,"5",,terminal_output +11588,8570019,"TERMINAL",0,0,"6",,terminal_output +11589,8570738,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11590,8571062,"TERMINAL",0,0,"7",,terminal_output +11591,8572088,"TERMINAL",0,0,"8",,terminal_output +11592,8573134,"TERMINAL",0,0,"9",,terminal_output +11593,8573701,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11594,8574214,"TERMINAL",0,0,"31",,terminal_output +11595,8575255,"TERMINAL",0,0,"2",,terminal_output +11596,8576252,"TERMINAL",0,0,"3",,terminal_output +11597,8576671,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False True True True True True True True\r\n True True True True]\r\n",,terminal_output +11598,8577388,"TERMINAL",0,0,"4",,terminal_output +11599,8578413,"TERMINAL",0,0,"5",,terminal_output +11600,8579436,"TERMINAL",0,0,"6",,terminal_output +11601,8579547,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False True True True True True True\r\n True True True True]\r\n",,terminal_output +11602,8580459,"TERMINAL",0,0,"7",,terminal_output +11603,8581484,"TERMINAL",0,0,"8",,terminal_output +11604,8582508,"TERMINAL",0,0,"9",,terminal_output +11605,8582635,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False True True True True True\r\n True True True True]\r\n",,terminal_output +11606,8583540,"TERMINAL",0,0,"40",,terminal_output +11607,8584659,"TERMINAL",0,0,"1",,terminal_output +11608,8585583,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False True True True True\r\n True True True True]\r\n",,terminal_output +11609,8585637,"TERMINAL",0,0,"2",,terminal_output +11610,8586707,"TERMINAL",0,0,"3",,terminal_output +11611,8587731,"TERMINAL",0,0,"4",,terminal_output +11612,8588419,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False True True True\r\n True True True True]\r\n",,terminal_output +11613,8588755,"TERMINAL",0,0,"5",,terminal_output +11614,8589881,"TERMINAL",0,0,"6",,terminal_output +11615,8590906,"TERMINAL",0,0,"7",,terminal_output +11616,8591417,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False True True\r\n True True True True]\r\n",,terminal_output +11617,8591930,"TERMINAL",0,0,"8",,terminal_output +11618,8592988,"TERMINAL",0,0,"9",,terminal_output +11619,8593982,"TERMINAL",0,0,"50",,terminal_output +11620,8594284,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False True\r\n True True True True]\r\n",,terminal_output +11621,8594991,"TERMINAL",0,0,"1",,terminal_output +11622,8596036,"TERMINAL",0,0,"2",,terminal_output +11623,8597079,"TERMINAL",0,0,"3",,terminal_output +11624,8597229,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n True True True True]\r\n",,terminal_output +11625,8598134,"TERMINAL",0,0,"4",,terminal_output +11626,8599199,"TERMINAL",0,0,"6",,terminal_output +11627,8600138,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False True True True]\r\n",,terminal_output +11628,8600254,"TERMINAL",0,0,"7",,terminal_output +11629,8601250,"TERMINAL",0,0,"8",,terminal_output +11630,8602374,"TERMINAL",0,0,"9",,terminal_output +11631,8603200,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False False True True]\r\n",,terminal_output +11632,8603331,"TERMINAL",0,0,"9:00",,terminal_output +11633,8604356,"TERMINAL",0,0,"1",,terminal_output +11634,8605547,"TERMINAL",0,0,"2",,terminal_output +11635,8606077,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False False False True]\r\n",,terminal_output +11636,8606419,"TERMINAL",0,0,"3",,terminal_output +11637,8607495,"TERMINAL",0,0,"4",,terminal_output +11638,8608497,"TERMINAL",0,0,"5",,terminal_output +11639,8609030,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [False]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 0]\r\n",,terminal_output +11640,8609542,"TERMINAL",0,0,"6",,terminal_output +11641,8610662,"TERMINAL",0,0,"72",,terminal_output +11642,8611706,"TERMINAL",0,0,"8",,terminal_output +11643,8612819,"TERMINAL",0,0,"9",,terminal_output +11644,8613844,"TERMINAL",0,0,"10",,terminal_output +11645,8614868,"TERMINAL",0,0,"1",,terminal_output +11646,8615861,"TERMINAL",0,0,"2",,terminal_output +11647,8616917,"TERMINAL",0,0,"3",,terminal_output +11648,8618019,"TERMINAL",0,0,"45",,terminal_output +11649,8618020,"TERMINAL",0,0,"SSIM: 0.32501327991485596\r\n",,terminal_output +11650,8619067,"TERMINAL",0,0,"5",,terminal_output +11651,8620091,"TERMINAL",0,0,"6",,terminal_output +11652,8620729,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +11653,8621057,"TERMINAL",0,0,"7",,terminal_output +11654,8622068,"TERMINAL",0,0,"8",,terminal_output +11655,8623090,"TERMINAL",0,0,"9",,terminal_output +11656,8624130,"TERMINAL",0,0,"20",,terminal_output +11657,8625180,"TERMINAL",0,0,"2",,terminal_output +11658,8626221,"TERMINAL",0,0,"3",,terminal_output +11659,8627270,"TERMINAL",0,0,"4",,terminal_output +11660,8628340,"TERMINAL",0,0,"5",,terminal_output +11661,8629379,"TERMINAL",0,0,"64",,terminal_output +11662,8630406,"TERMINAL",0,0,"7",,terminal_output +11663,8631456,"TERMINAL",0,0,"8",,terminal_output +11664,8632582,"TERMINAL",0,0,"9",,terminal_output +11665,8633525,"TERMINAL",0,0,"30",,terminal_output +11666,8634631,"TERMINAL",0,0,"1",,terminal_output +11667,8635609,"TERMINAL",0,0,"2",,terminal_output +11668,8636655,"TERMINAL",0,0,"3",,terminal_output +11669,8637698,"TERMINAL",0,0,"4",,terminal_output +11670,8638739,"TERMINAL",0,0,"5",,terminal_output +11671,8639792,"TERMINAL",0,0,"6",,terminal_output +11672,8640886,"TERMINAL",0,0,"7",,terminal_output +11673,8641857,"TERMINAL",0,0,"826",,terminal_output +11674,8642891,"TERMINAL",0,0,"9",,terminal_output +11675,8643950,"TERMINAL",0,0,"40",,terminal_output +11676,8645075,"TERMINAL",0,0,"1",,terminal_output +11677,8646025,"TERMINAL",0,0,"2",,terminal_output +11678,8647049,"TERMINAL",0,0,"3",,terminal_output +11679,8648095,"TERMINAL",0,0,"4",,terminal_output +11680,8649135,"TERMINAL",0,0,"5",,terminal_output +11681,8650159,"TERMINAL",0,0,"7",,terminal_output +11682,8651204,"TERMINAL",0,0,"8",,terminal_output +11683,8652271,"TERMINAL",0,0,"9",,terminal_output +11684,8653371,"TERMINAL",0,0,"50",,terminal_output +11685,8654336,"TERMINAL",0,0,"1",,terminal_output +11686,8655417,"TERMINAL",0,0,"2",,terminal_output +11687,8656427,"TERMINAL",0,0,"3",,terminal_output +11688,8658202,"TERMINAL",0,0,"413",,terminal_output +11689,8659244,"TERMINAL",0,0,"6",,terminal_output +11690,8660288,"TERMINAL",0,0,"7",,terminal_output +11691,8661337,"TERMINAL",0,0,"8",,terminal_output +11692,8662365,"TERMINAL",0,0,"9",,terminal_output +11693,8663408,"TERMINAL",0,0,"40:00",,terminal_output +11694,8664531,"TERMINAL",0,0,"1",,terminal_output +11695,8665487,"TERMINAL",0,0,"2",,terminal_output +11696,8666524,"TERMINAL",0,0,"3",,terminal_output +11697,8667603,"TERMINAL",0,0,"4",,terminal_output +11698,8668633,"TERMINAL",0,0,"5",,terminal_output +11699,8669652,"TERMINAL",0,0,"6",,terminal_output +11700,8670679,"TERMINAL",0,0,"7",,terminal_output +11701,8671803,"TERMINAL",0,0,"8",,terminal_output +11702,8672770,"TERMINAL",0,0,"9",,terminal_output +11703,8673883,"TERMINAL",0,0,"10",,terminal_output +11704,8674847,"TERMINAL",0,0,"1",,terminal_output +11705,8675896,"TERMINAL",0,0,"2",,terminal_output +11706,8677025,"TERMINAL",0,0,"3",,terminal_output +11707,8677977,"TERMINAL",0,0,"4",,terminal_output +11708,8679041,"TERMINAL",0,0,"5",,terminal_output +11709,8680041,"TERMINAL",0,0,"6",,terminal_output +11710,8681062,"TERMINAL",0,0,"7",,terminal_output +11711,8682083,"TERMINAL",0,0,"825",,terminal_output +11712,8683125,"TERMINAL",0,0,"9",,terminal_output +11713,8684161,"TERMINAL",0,0,"21",,terminal_output +11714,8685218,"TERMINAL",0,0,"2",,terminal_output +11715,8686349,"TERMINAL",0,0,"337",,terminal_output +11716,8687266,"TERMINAL",0,0,"4",,terminal_output +11717,8688390,"TERMINAL",0,0,"5",,terminal_output +11718,8689352,"TERMINAL",0,0,"6",,terminal_output +11719,8690404,"TERMINAL",0,0,"7",,terminal_output +11720,8691465,"TERMINAL",0,0,"8",,terminal_output +11721,8692590,"TERMINAL",0,0,"9",,terminal_output +11722,8693540,"TERMINAL",0,0,"30",,terminal_output +11723,8694644,"TERMINAL",0,0,"1",,terminal_output +11724,8695662,"TERMINAL",0,0,"2",,terminal_output +11725,8696649,"TERMINAL",0,0,"3",,terminal_output +11726,8697710,"TERMINAL",0,0,"4",,terminal_output +11727,8698823,"TERMINAL",0,0,"5",,terminal_output +11728,8699861,"TERMINAL",0,0,"6",,terminal_output +11729,8700802,"TERMINAL",0,0,"7",,terminal_output +11730,8701909,"TERMINAL",0,0,"8",,terminal_output +11731,8702976,"TERMINAL",0,0,"9",,terminal_output +11732,8703942,"TERMINAL",0,0,"40",,terminal_output +11733,8704972,"TERMINAL",0,0,"1",,terminal_output +11734,8706042,"TERMINAL",0,0,"2",,terminal_output +11735,8707130,"TERMINAL",0,0,"3",,terminal_output +11736,8708099,"TERMINAL",0,0,"4",,terminal_output +11737,8709141,"TERMINAL",0,0,"6",,terminal_output +11738,8710183,"TERMINAL",0,0,"7",,terminal_output +11739,8711360,"TERMINAL",0,0,"8",,terminal_output +11740,8712268,"TERMINAL",0,0,"9",,terminal_output +11741,8713474,"TERMINAL",0,0,"50",,terminal_output +11742,8714352,"TERMINAL",0,0,"1",,terminal_output +11743,8715419,"TERMINAL",0,0,"2",,terminal_output +11744,8716438,"TERMINAL",0,0,"3",,terminal_output +11745,8717576,"TERMINAL",0,0,"4",,terminal_output +11746,8718601,"TERMINAL",0,0,"5",,terminal_output +11747,8719624,"TERMINAL",0,0,"6",,terminal_output +11748,8720607,"TERMINAL",0,0,"7",,terminal_output +11749,8721605,"TERMINAL",0,0,"8",,terminal_output +11750,8722695,"TERMINAL",0,0,"9",,terminal_output +11751,8723308,"genie.py",0,0,"",python,tab +11752,8723309,"genie.py",3859,0,"",python,selection_mouse +11753,8723677,"TERMINAL",0,0,"1:00",,terminal_output +11754,8724344,"genie.py",3823,0,"",python,selection_command +11755,8724743,"TERMINAL",0,0,"1",,terminal_output +11756,8725882,"TERMINAL",0,0,"2",,terminal_output +11757,8726734,"genie.py",3860,0,"",python,selection_mouse +11758,8726916,"TERMINAL",0,0,"3",,terminal_output +11759,8727834,"TERMINAL",0,0,"4",,terminal_output +11760,8728030,"genie.py",3860,0,"+",python,content +11761,8728030,"genie.py",3861,0,"",python,selection_keyboard +11762,8728117,"genie.py",3861,0,"1",python,content +11763,8728118,"genie.py",3862,0,"",python,selection_keyboard +11764,8728484,"genie.py",3861,0,"",python,selection_command +11765,8728879,"TERMINAL",0,0,"5",,terminal_output +11766,8729597,"genie.py",3815,0,"",python,selection_mouse +11767,8729713,"genie.py",3811,9,"initial_T",python,selection_mouse +11768,8729976,"TERMINAL",0,0,"6",,terminal_output +11769,8730994,"TERMINAL",0,0,"7",,terminal_output +11770,8732117,"TERMINAL",0,0,"8",,terminal_output +11771,8733048,"TERMINAL",0,0,"9",,terminal_output +11772,8734170,"TERMINAL",0,0,"10",,terminal_output +11773,8735126,"TERMINAL",0,0,"1",,terminal_output +11774,8736171,"TERMINAL",0,0,"3",,terminal_output +11775,8737212,"TERMINAL",0,0,"4",,terminal_output +11776,8738288,"TERMINAL",0,0,"5",,terminal_output +11777,8739388,"TERMINAL",0,0,"6",,terminal_output +11778,8740413,"TERMINAL",0,0,"7",,terminal_output +11779,8741436,"TERMINAL",0,0,"8",,terminal_output +11780,8742503,"TERMINAL",0,0,"9",,terminal_output +11781,8743484,"TERMINAL",0,0,"20",,terminal_output +11782,8744543,"TERMINAL",0,0,"1",,terminal_output +11783,8745093,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +11784,8745636,"TERMINAL",0,0,"2",,terminal_output +11785,8746571,"TERMINAL",0,0,"3",,terminal_output +11786,8747681,"TERMINAL",0,0,"4",,terminal_output +11787,8748706,"TERMINAL",0,0,"5",,terminal_output +11788,8749730,"TERMINAL",0,0,"6",,terminal_output +11789,8750072,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +11790,8750135,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +11791,8750753,"TERMINAL",0,0,"7",,terminal_output +11792,8751779,"TERMINAL",0,0,"8",,terminal_output +11793,8752781,"TERMINAL",0,0,"9",,terminal_output +11794,8753028,"TERMINAL",0,0,"2025-07-03 18:41:29.924943: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11795,8753928,"TERMINAL",0,0,"30",,terminal_output +11796,8754953,"TERMINAL",0,0,"1",,terminal_output +11797,8755977,"TERMINAL",0,0,"2",,terminal_output +11798,8757001,"TERMINAL",0,0,"3",,terminal_output +11799,8757994,"TERMINAL",0,0,"4",,terminal_output +11800,8759055,"TERMINAL",0,0,"51",,terminal_output +11801,8760065,"TERMINAL",0,0,"6",,terminal_output +11802,8761098,"TERMINAL",0,0,"7",,terminal_output +11803,8762121,"TERMINAL",0,0,"8",,terminal_output +11804,8763145,"TERMINAL",0,0,"40",,terminal_output +11805,8764178,"TERMINAL",0,0,"1",,terminal_output +11806,8765245,"TERMINAL",0,0,"2",,terminal_output +11807,8766122,"TERMINAL",0,0,"2025-07-03 18:41:43.018198: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11808,8766243,"TERMINAL",0,0,"3",,terminal_output +11809,8767283,"TERMINAL",0,0,"4",,terminal_output +11810,8768369,"TERMINAL",0,0,"5",,terminal_output +11811,8769391,"TERMINAL",0,0,"6",,terminal_output +11812,8770416,"TERMINAL",0,0,"7",,terminal_output +11813,8771542,"TERMINAL",0,0,"8",,terminal_output +11814,8772565,"TERMINAL",0,0,"9",,terminal_output +11815,8773591,"TERMINAL",0,0,"50",,terminal_output +11816,8774575,"TERMINAL",0,0,"1",,terminal_output +11817,8775637,"TERMINAL",0,0,"2",,terminal_output +11818,8776457,"TERMINAL",0,0,"2025-07-03 18:41:53.322878: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11819,8776657,"TERMINAL",0,0,"3",,terminal_output +11820,8777788,"TERMINAL",0,0,"4",,terminal_output +11821,8778813,"TERMINAL",0,0,"5130",,terminal_output +11822,8779835,"TERMINAL",0,0,"6",,terminal_output +11823,8780860,"TERMINAL",0,0,"7",,terminal_output +11824,8781884,"TERMINAL",0,0,"8",,terminal_output +11825,8782908,"TERMINAL",0,0,"94",,terminal_output +11826,8783932,"TERMINAL",0,0,"2:00",,terminal_output +11827,8784956,"TERMINAL",0,0,"1",,terminal_output +11828,8786082,"TERMINAL",0,0,"2",,terminal_output +11829,8787107,"TERMINAL",0,0,"3",,terminal_output +11830,8788045,"TERMINAL",0,0,"4",,terminal_output +11831,8789064,"TERMINAL",0,0,"5",,terminal_output +11832,8789665,"TERMINAL",0,0,"2025-07-03 18:42:06.554680: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11833,8790179,"TERMINAL",0,0,"6",,terminal_output +11834,8791122,"TERMINAL",0,0,"7",,terminal_output +11835,8792151,"TERMINAL",0,0,"9",,terminal_output +11836,8792300,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +11837,8793182,"TERMINAL",0,0,"10",,terminal_output +11838,8794219,"TERMINAL",0,0,"1",,terminal_output +11839,8795299,"TERMINAL",0,0,"2",,terminal_output +11840,8796322,"TERMINAL",0,0,"3",,terminal_output +11841,8797331,"TERMINAL",0,0,"4",,terminal_output +11842,8798413,"TERMINAL",0,0,"5",,terminal_output +11843,8799496,"TERMINAL",0,0,"6",,terminal_output +11844,8800507,"TERMINAL",0,0,"7",,terminal_output +11845,8801043,"TERMINAL",0,0,"2025-07-03 18:42:17.878498: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11846,8801546,"TERMINAL",0,0,"8",,terminal_output +11847,8802569,"TERMINAL",0,0,"9",,terminal_output +11848,8803586,"TERMINAL",0,0,"20",,terminal_output +11849,8803963,"TERMINAL",0,0,"2025-07-03 18:42:20.864694: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11850,8804719,"TERMINAL",0,0,"1",,terminal_output +11851,8805658,"TERMINAL",0,0,"2",,terminal_output +11852,8806751,"TERMINAL",0,0,"3",,terminal_output +11853,8807791,"TERMINAL",0,0,"4",,terminal_output +11854,8808759,"TERMINAL",0,0,"5",,terminal_output +11855,8809786,"TERMINAL",0,0,"63",,terminal_output +11856,8810453,"TERMINAL",0,0,"2025-07-03 18:42:27.322133: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11857,8810862,"TERMINAL",0,0,"7",,terminal_output +11858,8811887,"TERMINAL",0,0,"8",,terminal_output +11859,8812583,"TERMINAL",0,0,"2025-07-03 18:42:29.483435: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11860,8812904,"TERMINAL",0,0,"9",,terminal_output +11861,8814039,"TERMINAL",0,0,"30",,terminal_output +11862,8814039,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +11863,8814653,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\n",,terminal_output +11864,8814706,"TERMINAL",0,0,"init_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11865,8815075,"TERMINAL",0,0,"1",,terminal_output +11866,8816107,"TERMINAL",0,0,"2",,terminal_output +11867,8817081,"TERMINAL",0,0,"3",,terminal_output +11868,8818160,"TERMINAL",0,0,"4",,terminal_output +11869,8819158,"TERMINAL",0,0,"6",,terminal_output +11870,8820182,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11871,8820211,"TERMINAL",0,0,"7",,terminal_output +11872,8821233,"TERMINAL",0,0,"8",,terminal_output +11873,8822333,"TERMINAL",0,0,"9",,terminal_output +11874,8822978,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11875,8823356,"TERMINAL",0,0,"40",,terminal_output +11876,8824382,"TERMINAL",0,0,"1",,terminal_output +11877,8825404,"TERMINAL",0,0,"2",,terminal_output +11878,8825547,"TERMINAL",0,0,"maskgit-init_mask[0,:,0]: [[False]\r\n [False]\r\n [False]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]\r\n [ True]]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ntoken_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False True True True True True True True True\r\n True True True True]\r\n",,terminal_output +11879,8826531,"TERMINAL",0,0,"3",,terminal_output +11880,8826839,"TERMINAL",0,0,"^C",,terminal_output +11881,8827019,"TERMINAL",0,0,"^C",,terminal_output +11882,8827198,"TERMINAL",0,0,"^C",,terminal_output +11883,8827556,"TERMINAL",0,0,"4",,terminal_output +11884,8828242,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n return apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n ret = trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n y, out_variable_groups_xs_t = fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n broadcast_vars, (carry_vars, c), (ys, scan_vars) = scanned(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n out = scan_p.bind(*consts, *in_flat,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n executable = UnloadedMeshExecutable.from_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 326, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +11885,8828580,"TERMINAL",0,0,"5",,terminal_output +11886,8829584,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +11887,8829626,"TERMINAL",0,0,"6",,terminal_output +11888,8830620,"TERMINAL",0,0,"7",,terminal_output +11889,8831706,"TERMINAL",0,0,"8",,terminal_output +11890,8832859,"TERMINAL",0,0,"9",,terminal_output +11891,8833827,"TERMINAL",0,0,"50",,terminal_output +11892,8834825,"TERMINAL",0,0,"1",,terminal_output +11893,8835848,"TERMINAL",0,0,"2",,terminal_output +11894,8836881,"TERMINAL",0,0,"3",,terminal_output +11895,8837979,"TERMINAL",0,0,"44",,terminal_output +11896,8839025,"TERMINAL",0,0,"5",,terminal_output +11897,8840002,"TERMINAL",0,0,"6",,terminal_output +11898,8841119,"TERMINAL",0,0,"7",,terminal_output +11899,8842198,"TERMINAL",0,0,"8",,terminal_output +11900,8843149,"TERMINAL",0,0,"3:00",,terminal_output +11901,8844202,"TERMINAL",0,0,"1",,terminal_output +11902,8845221,"TERMINAL",0,0,"2",,terminal_output +11903,8846260,"TERMINAL",0,0,"3",,terminal_output +11904,8847319,"TERMINAL",0,0,"4",,terminal_output +11905,8848453,"TERMINAL",0,0,"5",,terminal_output +11906,8849469,"TERMINAL",0,0,"6",,terminal_output +11907,8850493,"TERMINAL",0,0,"7",,terminal_output +11908,8851518,"TERMINAL",0,0,"8",,terminal_output +11909,8852504,"TERMINAL",0,0,"9",,terminal_output +11910,8853563,"TERMINAL",0,0,"10",,terminal_output +11911,8854692,"TERMINAL",0,0,"1",,terminal_output +11912,8855715,"TERMINAL",0,0,"2",,terminal_output +11913,8856739,"TERMINAL",0,0,"3",,terminal_output +11914,8857690,"TERMINAL",0,0,"4",,terminal_output +11915,8858727,"TERMINAL",0,0,"5",,terminal_output +11916,8859812,"TERMINAL",0,0,"6",,terminal_output +11917,8860837,"TERMINAL",0,0,"7",,terminal_output +11918,8861725,"genie.py",0,0,"",python,tab +11919,8861726,"genie.py",4635,0,"",python,selection_mouse +11920,8861860,"genie.py",4634,0,"",python,selection_command +11921,8861946,"TERMINAL",0,0,"8",,terminal_output +11922,8862882,"TERMINAL",0,0,"9",,terminal_output +11923,8864028,"TERMINAL",0,0,"20",,terminal_output +11924,8865033,"TERMINAL",0,0,"1",,terminal_output +11925,8866057,"TERMINAL",0,0,"2",,terminal_output +11926,8867084,"TERMINAL",0,0,"3",,terminal_output +11927,8868056,"TERMINAL",0,0,"4",,terminal_output +11928,8869284,"TERMINAL",0,0,"5",,terminal_output +11929,8870147,"TERMINAL",0,0,"70",,terminal_output +11930,8870724,"genie.py",8245,0,"",python,selection_mouse +11931,8871201,"TERMINAL",0,0,"8",,terminal_output +11932,8872040,"genie.py",8220,154,"",python,content +11933,8872234,"TERMINAL",0,0,"9",,terminal_output +11934,8872499,"genie.py",8177,0,"",python,selection_command +11935,8873284,"TERMINAL",0,0,"30",,terminal_output +11936,8874309,"TERMINAL",0,0,"1",,terminal_output +11937,8875376,"TERMINAL",0,0,"2",,terminal_output +11938,8876505,"TERMINAL",0,0,"3",,terminal_output +11939,8877269,"genie.py",8215,0,"",python,selection_mouse +11940,8877404,"genie.py",8209,9,"vid_embed",python,selection_mouse +11941,8877476,"TERMINAL",0,0,"4",,terminal_output +11942,8878491,"TERMINAL",0,0,"5",,terminal_output +11943,8879574,"TERMINAL",0,0,"6",,terminal_output +11944,8880569,"TERMINAL",0,0,"7",,terminal_output +11945,8881623,"TERMINAL",0,0,"8",,terminal_output +11946,8882267,"genie.py",7841,0,"",python,selection_mouse +11947,8882432,"genie.py",7835,10,"token_idxs",python,selection_mouse +11948,8882644,"TERMINAL",0,0,"9",,terminal_output +11949,8883773,"TERMINAL",0,0,"40",,terminal_output +11950,8884797,"TERMINAL",0,0,"1",,terminal_output +11951,8885767,"TERMINAL",0,0,"2",,terminal_output +11952,8886846,"TERMINAL",0,0,"3",,terminal_output +11953,8887111,"genie.py",7699,0,"",python,selection_mouse +11954,8887313,"genie.py",7697,4,"step",python,selection_mouse +11955,8887981,"TERMINAL",0,0,"4",,terminal_output +11956,8888487,"genie.py",7842,0,"",python,selection_mouse +11957,8888584,"genie.py",7835,10,"token_idxs",python,selection_mouse +11958,8888914,"TERMINAL",0,0,"5",,terminal_output +11959,8889953,"TERMINAL",0,0,"6",,terminal_output +11960,8891044,"TERMINAL",0,0,"7",,terminal_output +11961,8892069,"TERMINAL",0,0,"8",,terminal_output +11962,8893089,"TERMINAL",0,0,"9",,terminal_output +11963,8894234,"TERMINAL",0,0,"50",,terminal_output +11964,8895159,"TERMINAL",0,0,"2",,terminal_output +11965,8896199,"TERMINAL",0,0,"3",,terminal_output +11966,8897249,"TERMINAL",0,0,"4",,terminal_output +11967,8898267,"TERMINAL",0,0,"5",,terminal_output +11968,8899348,"TERMINAL",0,0,"60",,terminal_output +11969,8900362,"TERMINAL",0,0,"7",,terminal_output +11970,8901385,"TERMINAL",0,0,"8",,terminal_output +11971,8902389,"TERMINAL",0,0,"9",,terminal_output +11972,8903474,"TERMINAL",0,0,"4:00",,terminal_output +11973,8904716,"TERMINAL",0,0,"1",,terminal_output +11974,8905529,"TERMINAL",0,0,"2",,terminal_output +11975,8906609,"TERMINAL",0,0,"3",,terminal_output +11976,8907582,"TERMINAL",0,0,"4",,terminal_output +11977,8908656,"TERMINAL",0,0,"5",,terminal_output +11978,8909681,"TERMINAL",0,0,"6",,terminal_output +11979,8910715,"TERMINAL",0,0,"7",,terminal_output +11980,8911831,"TERMINAL",0,0,"8",,terminal_output +11981,8912796,"TERMINAL",0,0,"9",,terminal_output +11982,8913881,"TERMINAL",0,0,"10",,terminal_output +11983,8914903,"TERMINAL",0,0,"1",,terminal_output +11984,8915877,"TERMINAL",0,0,"2",,terminal_output +11985,8916956,"TERMINAL",0,0,"3",,terminal_output +11986,8917991,"TERMINAL",0,0,"4",,terminal_output +11987,8918992,"TERMINAL",0,0,"5",,terminal_output +11988,8920126,"TERMINAL",0,0,"6",,terminal_output +11989,8921159,"TERMINAL",0,0,"7",,terminal_output +11990,8922174,"TERMINAL",0,0,"8",,terminal_output +11991,8923198,"TERMINAL",0,0,"20",,terminal_output +11992,8924222,"TERMINAL",0,0,"1",,terminal_output +11993,8925245,"TERMINAL",0,0,"2",,terminal_output +11994,8926301,"TERMINAL",0,0,"3",,terminal_output +11995,8927329,"TERMINAL",0,0,"4",,terminal_output +11996,8928419,"TERMINAL",0,0,"5",,terminal_output +11997,8929444,"TERMINAL",0,0,"6",,terminal_output +11998,8930472,"TERMINAL",0,0,"7",,terminal_output +11999,8931594,"TERMINAL",0,0,"8",,terminal_output +12000,8932564,"TERMINAL",0,0,"9",,terminal_output +12001,8933642,"TERMINAL",0,0,"30",,terminal_output +12002,8934666,"TERMINAL",0,0,"1",,terminal_output +12003,8935793,"TERMINAL",0,0,"2",,terminal_output +12004,8936735,"TERMINAL",0,0,"3",,terminal_output +12005,8937841,"TERMINAL",0,0,"4",,terminal_output +12006,8938814,"TERMINAL",0,0,"5",,terminal_output +12007,8939888,"TERMINAL",0,0,"6",,terminal_output +12008,8941016,"TERMINAL",0,0,"7",,terminal_output +12009,8942038,"TERMINAL",0,0,"8",,terminal_output +12010,8942991,"TERMINAL",0,0,"9",,terminal_output +12011,8944087,"TERMINAL",0,0,"40",,terminal_output +12012,8945127,"TERMINAL",0,0,"1",,terminal_output +12013,8945635,"genie.py",8283,0,"",python,selection_mouse +12014,8946130,"TERMINAL",0,0,"2",,terminal_output +12015,8946633,"genie.py",8220,0," jax.debug.print(""maskgit-init_mask[0,:,0]: {}"", mask_expanded[0,:,0])\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\n",python,content +12016,8946690,"genie.py",8245,0,"",python,selection_command +12017,8947131,"TERMINAL",0,0,"3",,terminal_output +12018,8948199,"TERMINAL",0,0,"5",,terminal_output +12019,8949208,"TERMINAL",0,0,"61",,terminal_output +12020,8949483,"genie.py",8220,78,"",python,content +12021,8949556,"genie.py",8228,0,"",python,selection_command +12022,8950264,"TERMINAL",0,0,"7",,terminal_output +12023,8951296,"TERMINAL",0,0,"8",,terminal_output +12024,8952340,"TERMINAL",0,0,"9",,terminal_output +12025,8952453,"genie.py",8220,76,"",python,content +12026,8952631,"genie.py",8221,0,"",python,selection_command +12027,8953108,"genie.py",8258,0,"",python,selection_command +12028,8953299,"genie.py",8317,0,"",python,selection_command +12029,8953300,"genie.py",8391,0,"",python,selection_command +12030,8953362,"genie.py",8464,0,"",python,selection_command +12031,8953363,"genie.py",8526,0,"",python,selection_command +12032,8953363,"genie.py",8602,0,"",python,selection_command +12033,8953363,"genie.py",8603,0,"",python,selection_command +12034,8953401,"genie.py",8655,0,"",python,selection_command +12035,8953472,"TERMINAL",0,0,"50",,terminal_output +12036,8953821,"genie.py",8686,0,"",python,selection_command +12037,8954089,"genie.py",8753,0,"",python,selection_command +12038,8954245,"genie.py",8767,0,"",python,selection_command +12039,8954432,"genie.py",8813,0,"",python,selection_command +12040,8954443,"TERMINAL",0,0,"1",,terminal_output +12041,8954597,"genie.py",8857,0,"",python,selection_command +12042,8954789,"genie.py",8897,0,"",python,selection_command +12043,8954956,"genie.py",8948,0,"",python,selection_command +12044,8955145,"genie.py",9008,0,"",python,selection_command +12045,8955362,"genie.py",9022,0,"",python,selection_command +12046,8955490,"TERMINAL",0,0,"2",,terminal_output +12047,8955586,"genie.py",9080,0,"",python,selection_command +12048,8955939,"genie.py",9168,0,"",python,selection_command +12049,8956245,"genie.py",9203,0,"",python,selection_command +12050,8956492,"genie.py",9239,0,"",python,selection_command +12051,8956587,"TERMINAL",0,0,"3",,terminal_output +12052,8957023,"genie.py",9307,0,"\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])",python,content +12053,8957062,"genie.py",9316,0,"",python,selection_command +12054,8957579,"TERMINAL",0,0,"42",,terminal_output +12055,8958601,"TERMINAL",0,0,"5",,terminal_output +12056,8959653,"TERMINAL",0,0,"6",,terminal_output +12057,8960693,"TERMINAL",0,0,"7",,terminal_output +12058,8961735,"TERMINAL",0,0,"8",,terminal_output +12059,8962766,"TERMINAL",0,0,"9",,terminal_output +12060,8963851,"TERMINAL",0,0,"5:00",,terminal_output +12061,8964843,"TERMINAL",0,0,"1",,terminal_output +12062,8965903,"TERMINAL",0,0,"2",,terminal_output +12063,8966927,"TERMINAL",0,0,"3",,terminal_output +12064,8967978,"TERMINAL",0,0,"4",,terminal_output +12065,8969086,"TERMINAL",0,0,"5",,terminal_output +12066,8970098,"TERMINAL",0,0,"6",,terminal_output +12067,8971066,"TERMINAL",0,0,"7",,terminal_output +12068,8972145,"TERMINAL",0,0,"8",,terminal_output +12069,8973170,"TERMINAL",0,0,"10",,terminal_output +12070,8974298,"TERMINAL",0,0,"1",,terminal_output +12071,8975238,"TERMINAL",0,0,"2",,terminal_output +12072,8976284,"TERMINAL",0,0,"3",,terminal_output +12073,8977337,"TERMINAL",0,0,"4",,terminal_output +12074,8978555,"TERMINAL",0,0,"5",,terminal_output +12075,8979472,"TERMINAL",0,0,"6",,terminal_output +12076,8979535,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +12077,8980509,"TERMINAL",0,0,"7",,terminal_output +12078,8980561,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +12079,8980674,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +12080,8981566,"TERMINAL",0,0,"8",,terminal_output +12081,8982536,"TERMINAL",0,0,"9",,terminal_output +12082,8983247,"TERMINAL",0,0,"^C",,terminal_output +12083,8983312,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 86, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2452, in init\r\n _, v_out = self.init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2304, in init_with_output\r\n return init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1115, in wrapper\r\n return apply(fn, mutable=mutable, flags=init_flags)(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3093, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 75, in __call__\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 57, in vq_encode\r\n x = self.encoder(x) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 79, in __call__\r\n x = nn.Sequential(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/combinators.py"", line 113, in __call__\r\n outputs = layer(outputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"", line 263, in __call__\r\n kernel = self.param(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1877, in param\r\n v = self.scope.param(name, init_fn, *init_args, unbox=unbox, **init_kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 968, in param\r\n value = init_fn(self.make_rng('params'), *init_args, **init_kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/initializers.py"", line 334, in init\r\n stddev = jnp.sqrt(variance) / jnp.array(.87962566103423978, dtype)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n executable = UnloadedMeshExecutable.from_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 335, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +12084,8983511,"TERMINAL",0,0,"^C",,terminal_output +12085,8983606,"TERMINAL",0,0,"20",,terminal_output +12086,8983625,"TERMINAL",0,0,"^C",,terminal_output +12087,8983825,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +12088,8984627,"TERMINAL",0,0,"1",,terminal_output +12089,8985765,"TERMINAL",0,0,"2",,terminal_output +12090,8986912,"TERMINAL",0,0,"3",,terminal_output +12091,8987805,"TERMINAL",0,0,"4",,terminal_output +12092,8988837,"TERMINAL",0,0,"5",,terminal_output +12093,8989017,"genie.py",0,0,"",python,tab +12094,8989018,"genie.py",3860,0,"",python,selection_mouse +12095,8989872,"TERMINAL",0,0,"60",,terminal_output +12096,8990987,"TERMINAL",0,0,"7",,terminal_output +12097,8991905,"genie.py",3860,1,"",python,content +12098,8991988,"TERMINAL",0,0,"8",,terminal_output +12099,8992022,"genie.py",3860,1,"",python,content +12100,8993001,"TERMINAL",0,0,"9",,terminal_output +12101,8994019,"TERMINAL",0,0,"30",,terminal_output +12102,8994246,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +12103,8994758,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +12104,8994878,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +12105,8995062,"TERMINAL",0,0,"1",,terminal_output +12106,8996164,"TERMINAL",0,0,"2",,terminal_output +12107,8997149,"TERMINAL",0,0,"4",,terminal_output +12108,8997790,"TERMINAL",0,0,"2025-07-03 18:45:34.690320: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12109,8998257,"TERMINAL",0,0,"5",,terminal_output +12110,8999282,"TERMINAL",0,0,"6",,terminal_output +12111,9000288,"TERMINAL",0,0,"7",,terminal_output +12112,9001336,"TERMINAL",0,0,"8",,terminal_output +12113,9002378,"TERMINAL",0,0,"9",,terminal_output +12114,9003414,"TERMINAL",0,0,"40",,terminal_output +12115,9004504,"TERMINAL",0,0,"1",,terminal_output +12116,9005529,"TERMINAL",0,0,"2",,terminal_output +12117,9006546,"TERMINAL",0,0,"3",,terminal_output +12118,9007679,"TERMINAL",0,0,"4",,terminal_output +12119,9008703,"TERMINAL",0,0,"5",,terminal_output +12120,9009727,"TERMINAL",0,0,"6",,terminal_output +12121,9010751,"TERMINAL",0,0,"7",,terminal_output +12122,9011000,"TERMINAL",0,0,"2025-07-03 18:45:47.877703: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12123,9011776,"TERMINAL",0,0,"8",,terminal_output +12124,9012798,"TERMINAL",0,0,"9",,terminal_output +12125,9014001,"TERMINAL",0,0,"50",,terminal_output +12126,9014950,"TERMINAL",0,0,"1",,terminal_output +12127,9016035,"TERMINAL",0,0,"2",,terminal_output +12128,9017060,"TERMINAL",0,0,"3",,terminal_output +12129,9017997,"TERMINAL",0,0,"4",,terminal_output +12130,9019050,"TERMINAL",0,0,"5",,terminal_output +12131,9020070,"TERMINAL",0,0,"6",,terminal_output +12132,9021094,"TERMINAL",0,0,"7",,terminal_output +12133,9021516,"TERMINAL",0,0,"2025-07-03 18:45:58.305177: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12134,9022115,"TERMINAL",0,0,"8",,terminal_output +12135,9023133,"TERMINAL",0,0,"9",,terminal_output +12136,9024267,"TERMINAL",0,0,"6:01",,terminal_output +12137,9025297,"TERMINAL",0,0,"2",,terminal_output +12138,9026261,"TERMINAL",0,0,"3",,terminal_output +12139,9027291,"TERMINAL",0,0,"4",,terminal_output +12140,9028369,"TERMINAL",0,0,"5",,terminal_output +12141,9029384,"TERMINAL",0,0,"6",,terminal_output +12142,9030435,"TERMINAL",0,0,"7",,terminal_output +12143,9031446,"TERMINAL",0,0,"8",,terminal_output +12144,9032462,"TERMINAL",0,0,"9",,terminal_output +12145,9033511,"TERMINAL",0,0,"10",,terminal_output +12146,9034418,"TERMINAL",0,0,"2025-07-03 18:46:11.319188: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12147,9034542,"TERMINAL",0,0,"1",,terminal_output +12148,9035637,"TERMINAL",0,0,"21",,terminal_output +12149,9036657,"TERMINAL",0,0,"3",,terminal_output +12150,9036977,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +12151,9037785,"TERMINAL",0,0,"4",,terminal_output +12152,9038727,"TERMINAL",0,0,"5",,terminal_output +12153,9039833,"TERMINAL",0,0,"6",,terminal_output +12154,9040857,"TERMINAL",0,0,"72",,terminal_output +12155,9041862,"TERMINAL",0,0,"8",,terminal_output +12156,9042905,"TERMINAL",0,0,"9",,terminal_output +12157,9043904,"TERMINAL",0,0,"20",,terminal_output +12158,9045056,"TERMINAL",0,0,"1",,terminal_output +12159,9045260,"TERMINAL",0,0,"2025-07-03 18:46:22.162139: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12160,9045990,"TERMINAL",0,0,"2",,terminal_output +12161,9047102,"TERMINAL",0,0,"3",,terminal_output +12162,9048071,"TERMINAL",0,0,"4",,terminal_output +12163,9048404,"TERMINAL",0,0,"2025-07-03 18:46:25.280657: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12164,9049151,"TERMINAL",0,0,"5",,terminal_output +12165,9050175,"TERMINAL",0,0,"70",,terminal_output +12166,9051199,"TERMINAL",0,0,"8",,terminal_output +12167,9052326,"TERMINAL",0,0,"9",,terminal_output +12168,9053280,"TERMINAL",0,0,"30",,terminal_output +12169,9054327,"TERMINAL",0,0,"1",,terminal_output +12170,9054887,"TERMINAL",0,0,"2025-07-03 18:46:31.714132: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12171,9055367,"TERMINAL",0,0,"2",,terminal_output +12172,9056387,"TERMINAL",0,0,"3",,terminal_output +12173,9056679,"TERMINAL",0,0,"2025-07-03 18:46:33.579475: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12174,9057446,"TERMINAL",0,0,"4",,terminal_output +12175,9058021,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +12176,9058512,"TERMINAL",0,0,"5",,terminal_output +12177,9058688,"TERMINAL",0,0,"token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\n",,terminal_output +12178,9058750,"TERMINAL",0,0,"init_mask[0,:,0]: [False True True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +12179,9059596,"TERMINAL",0,0,"6",,terminal_output +12180,9060621,"TERMINAL",0,0,"7",,terminal_output +12181,9061645,"TERMINAL",0,0,"8",,terminal_output +12182,9062668,"TERMINAL",0,0,"9",,terminal_output +12183,9063830,"TERMINAL",0,0,"40",,terminal_output +12184,9064667,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False True True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +12185,9064730,"TERMINAL",0,0,"1",,terminal_output +12186,9066407,"TERMINAL",0,0,"21",,terminal_output +12187,9067481,"TERMINAL",0,0,"41",,terminal_output +12188,9067534,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False True True True True True True True True True\r\n True True True True]\r\n",,terminal_output +12189,9068482,"TERMINAL",0,0,"5",,terminal_output +12190,9069631,"TERMINAL",0,0,"6",,terminal_output +12191,9070451,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False True True True True True True True True\r\n True True True True]\r\n",,terminal_output +12192,9070573,"TERMINAL",0,0,"7",,terminal_output +12193,9071680,"TERMINAL",0,0,"8",,terminal_output +12194,9072704,"TERMINAL",0,0,"9",,terminal_output +12195,9073318,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False True True True True True True True\r\n True True True True]\r\n",,terminal_output +12196,9073676,"TERMINAL",0,0,"50",,terminal_output +12197,9074752,"TERMINAL",0,0,"1",,terminal_output +12198,9075749,"TERMINAL",0,0,"2",,terminal_output +12199,9076308,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False True True True True True True\r\n True True True True]\r\n",,terminal_output +12200,9076794,"TERMINAL",0,0,"3",,terminal_output +12201,9077949,"TERMINAL",0,0,"4",,terminal_output +12202,9078950,"TERMINAL",0,0,"5",,terminal_output +12203,9079258,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False True True True True True\r\n True True True True]\r\n",,terminal_output +12204,9079974,"TERMINAL",0,0,"6",,terminal_output +12205,9080998,"TERMINAL",0,0,"7",,terminal_output +12206,9082035,"TERMINAL",0,0,"8",,terminal_output +12207,9082122,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False True True True True\r\n True True True True]\r\n",,terminal_output +12208,9083016,"TERMINAL",0,0,"9",,terminal_output +12209,9084069,"TERMINAL",0,0,"7:00",,terminal_output +12210,9085095,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False True True True\r\n True True True True]\r\n",,terminal_output +12211,9085096,"TERMINAL",0,0,"1",,terminal_output +12212,9086101,"TERMINAL",0,0,"2",,terminal_output +12213,9087245,"TERMINAL",0,0,"3",,terminal_output +12214,9087978,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False True True\r\n True True True True]\r\n",,terminal_output +12215,9088174,"TERMINAL",0,0,"5",,terminal_output +12216,9089294,"TERMINAL",0,0,"6",,terminal_output +12217,9090289,"TERMINAL",0,0,"7",,terminal_output +12218,9090832,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False True\r\n True True True True]\r\n",,terminal_output +12219,9091354,"TERMINAL",0,0,"8",,terminal_output +12220,9092343,"TERMINAL",0,0,"9",,terminal_output +12221,9093367,"TERMINAL",0,0,"10",,terminal_output +12222,9093696,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 0 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n True True True True]\r\n",,terminal_output +12223,9094410,"TERMINAL",0,0,"1",,terminal_output +12224,9095439,"TERMINAL",0,0,"2",,terminal_output +12225,9096563,"TERMINAL",0,0,"3",,terminal_output +12226,9096572,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 0 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False True True True]\r\n",,terminal_output +12227,9097678,"TERMINAL",0,0,"4",,terminal_output +12228,9098555,"TERMINAL",0,0,"5",,terminal_output +12229,9099635,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 0 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False False True True]\r\n",,terminal_output +12230,9099635,"TERMINAL",0,0,"6",,terminal_output +12231,9100635,"TERMINAL",0,0,"7",,terminal_output +12232,9101812,"TERMINAL",0,0,"8",,terminal_output +12233,9102505,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\ntoken_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 0]\r\ninit_mask[0,:,0]: [False False False False False False False False False False False False\r\n False False False True]\r\n",,terminal_output +12234,9102810,"TERMINAL",0,0,"9",,terminal_output +12235,9103674,"genie.py",0,0,"",python,tab +12236,9103677,"genie.py",9341,0,"",python,selection_mouse +12237,9103788,"genie.py",9339,10,"token_idxs",python,selection_mouse +12238,9103851,"TERMINAL",0,0,"20",,terminal_output +12239,9104714,"genie.py",9366,0,"",python,selection_mouse +12240,9104924,"genie.py",9363,10,"token_idxs",python,selection_mouse +12241,9104925,"TERMINAL",0,0,"1",,terminal_output +12242,9105350,"TERMINAL",0,0,"maskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12243,9105882,"TERMINAL",0,0,"2",,terminal_output +12244,9106902,"TERMINAL",0,0,"3",,terminal_output +12245,9107995,"TERMINAL",0,0,"4",,terminal_output +12246,9109056,"TERMINAL",0,0,"5",,terminal_output +12247,9110080,"TERMINAL",0,0,"60",,terminal_output +12248,9111104,"TERMINAL",0,0,"7",,terminal_output +12249,9112271,"TERMINAL",0,0,"8",,terminal_output +12250,9113145,"TERMINAL",0,0,"SSIM: 0.32501327991485596\r\n",,terminal_output +12251,9113208,"TERMINAL",0,0,"30",,terminal_output +12252,9114208,"TERMINAL",0,0,"1",,terminal_output +12253,9115308,"TERMINAL",0,0,"2",,terminal_output +12254,9115375,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +12255,9116301,"TERMINAL",0,0,"3",,terminal_output +12256,9117351,"TERMINAL",0,0,"4",,terminal_output +12257,9118413,"TERMINAL",0,0,"5",,terminal_output +12258,9119501,"TERMINAL",0,0,"6",,terminal_output +12259,9120471,"TERMINAL",0,0,"7",,terminal_output +12260,9121549,"TERMINAL",0,0,"8",,terminal_output +12261,9122573,"TERMINAL",0,0,"9",,terminal_output +12262,9123597,"TERMINAL",0,0,"40",,terminal_output +12263,9124723,"TERMINAL",0,0,"1",,terminal_output +12264,9125748,"TERMINAL",0,0,"2",,terminal_output +12265,9126721,"TERMINAL",0,0,"3",,terminal_output +12266,9127797,"TERMINAL",0,0,"4",,terminal_output +12267,9128863,"genie.py",0,0,"",python,tab +12268,9128864,"genie.py",8854,0,"",python,selection_mouse +12269,9129013,"genie.py",8853,0,"",python,selection_command +12270,9129036,"TERMINAL",0,0,"5",,terminal_output +12271,9129945,"TERMINAL",0,0,"6",,terminal_output +12272,9130977,"TERMINAL",0,0,"7",,terminal_output +12273,9131284,"genie.py",9324,0,"",python,selection_command +12274,9131952,"genie.py",4366,0,"",python,selection_command +12275,9132005,"TERMINAL",0,0,"8",,terminal_output +12276,9132995,"TERMINAL",0,0,"9",,terminal_output +12277,9133277,"genie.py",4354,56," print(""init_mask[0,:,0]:"", init_mask[0,:,0])",python,selection_command +12278,9133479,"genie.py",4295,115," print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n print(""init_mask[0,:,0]:"", init_mask[0,:,0])",python,selection_command +12279,9133722,"genie.py",4307,0,"",python,selection_command +12280,9133959,"TERMINAL",0,0,"50",,terminal_output +12281,9134591,"genie.py",4366,0,"#",python,content +12282,9134591,"genie.py",4307,0,"#",python,content +12283,9134592,"genie.py",4308,0,"",python,selection_keyboard +12284,9134731,"genie.py",4307,0,"",python,selection_command +12285,9134863,"genie.py",4308,0,"",python,selection_command +12286,9134996,"TERMINAL",0,0,"11",,terminal_output +12287,9135916,"genie.py",4368,0,"",python,selection_command +12288,9136029,"TERMINAL",0,0,"2",,terminal_output +12289,9136331,"genie.py",9326,0,"",python,selection_command +12290,9137074,"TERMINAL",0,0,"3",,terminal_output +12291,9138138,"TERMINAL",0,0,"4",,terminal_output +12292,9139148,"TERMINAL",0,0,"6",,terminal_output +12293,9139914,"genie.py",9272,0,"",python,selection_mouse +12294,9140070,"genie.py",9270,4,"mask",python,selection_mouse +12295,9140176,"TERMINAL",0,0,"70",,terminal_output +12296,9140877,"genie.py",9285,0,"",python,selection_mouse +12297,9141010,"genie.py",9276,18,"sampled_token_idxs",python,selection_mouse +12298,9141219,"TERMINAL",0,0,"8",,terminal_output +12299,9142267,"TERMINAL",0,0,"9",,terminal_output +12300,9143367,"TERMINAL",0,0,"8:00",,terminal_output +12301,9143795,"genie.py",9292,0,"",python,selection_mouse +12302,9143796,"genie.py",9276,18,"sampled_token_idxs",python,selection_mouse +12303,9144350,"TERMINAL",0,0,"1",,terminal_output +12304,9145390,"TERMINAL",0,0,"2",,terminal_output +12305,9145950,"genie.py",9299,0,"",python,selection_mouse +12306,9146082,"genie.py",9296,10,"token_idxs",python,selection_mouse +12307,9146535,"TERMINAL",0,0,"3",,terminal_output +12308,9147515,"TERMINAL",0,0,"4",,terminal_output +12309,9148506,"TERMINAL",0,0,"5",,terminal_output +12310,9149695,"TERMINAL",0,0,"6",,terminal_output +12311,9150868,"TERMINAL",0,0,"7",,terminal_output +12312,9151656,"TERMINAL",0,0,"8",,terminal_output +12313,9152692,"TERMINAL",0,0,"9",,terminal_output +12314,9153729,"TERMINAL",0,0,"10",,terminal_output +12315,9154830,"TERMINAL",0,0,"1",,terminal_output +12316,9155854,"TERMINAL",0,0,"2",,terminal_output +12317,9156878,"TERMINAL",0,0,"3",,terminal_output +12318,9157992,"TERMINAL",0,0,"4",,terminal_output +12319,9159028,"TERMINAL",0,0,"5",,terminal_output +12320,9160053,"TERMINAL",0,0,"6",,terminal_output +12321,9161076,"TERMINAL",0,0,"7",,terminal_output +12322,9162100,"TERMINAL",0,0,"8",,terminal_output +12323,9163132,"TERMINAL",0,0,"9",,terminal_output +12324,9164250,"TERMINAL",0,0,"21",,terminal_output +12325,9165213,"TERMINAL",0,0,"2",,terminal_output +12326,9166275,"TERMINAL",0,0,"31",,terminal_output +12327,9167324,"TERMINAL",0,0,"4",,terminal_output +12328,9168361,"TERMINAL",0,0,"5",,terminal_output +12329,9169366,"TERMINAL",0,0,"60",,terminal_output +12330,9170406,"TERMINAL",0,0,"7",,terminal_output +12331,9171453,"TERMINAL",0,0,"8",,terminal_output +12332,9172494,"TERMINAL",0,0,"91",,terminal_output +12333,9173835,"TERMINAL",0,0,"30",,terminal_output +12334,9174628,"TERMINAL",0,0,"1",,terminal_output +12335,9175718,"TERMINAL",0,0,"2",,terminal_output +12336,9176680,"TERMINAL",0,0,"3",,terminal_output +12337,9177769,"TERMINAL",0,0,"4",,terminal_output +12338,9178793,"TERMINAL",0,0,"5",,terminal_output +12339,9179825,"TERMINAL",0,0,"6",,terminal_output +12340,9180903,"TERMINAL",0,0,"7",,terminal_output +12341,9182022,"TERMINAL",0,0,"8",,terminal_output +12342,9182978,"TERMINAL",0,0,"92",,terminal_output +12343,9184116,"TERMINAL",0,0,"40",,terminal_output +12344,9185140,"TERMINAL",0,0,"1",,terminal_output +12345,9186164,"TERMINAL",0,0,"2",,terminal_output +12346,9187189,"TERMINAL",0,0,"3",,terminal_output +12347,9188178,"TERMINAL",0,0,"5",,terminal_output +12348,9189236,"TERMINAL",0,0,"6",,terminal_output +12349,9190365,"TERMINAL",0,0,"7",,terminal_output +12350,9191315,"TERMINAL",0,0,"8",,terminal_output +12351,9192346,"TERMINAL",0,0,"9",,terminal_output +12352,9193415,"TERMINAL",0,0,"50",,terminal_output +12353,9194439,"TERMINAL",0,0,"1",,terminal_output +12354,9195585,"TERMINAL",0,0,"2",,terminal_output +12355,9196612,"TERMINAL",0,0,"3",,terminal_output +12356,9197635,"TERMINAL",0,0,"4",,terminal_output +12357,9198657,"TERMINAL",0,0,"5",,terminal_output +12358,9199682,"TERMINAL",0,0,"6",,terminal_output +12359,9200706,"TERMINAL",0,0,"7",,terminal_output +12360,9201832,"TERMINAL",0,0,"8",,terminal_output +12361,9202856,"TERMINAL",0,0,"9",,terminal_output +12362,9203887,"TERMINAL",0,0,"9:00",,terminal_output +12363,9204911,"TERMINAL",0,0,"13",,terminal_output +12364,9205901,"TERMINAL",0,0,"2",,terminal_output +12365,9206952,"TERMINAL",0,0,"3",,terminal_output +12366,9207977,"TERMINAL",0,0,"4",,terminal_output +12367,9209005,"TERMINAL",0,0,"5",,terminal_output +12368,9210127,"TERMINAL",0,0,"6",,terminal_output +12369,9211150,"TERMINAL",0,0,"7",,terminal_output +12370,9212175,"TERMINAL",0,0,"8",,terminal_output +12371,9213151,"TERMINAL",0,0,"10",,terminal_output +12372,9214223,"TERMINAL",0,0,"1",,terminal_output +12373,9215246,"TERMINAL",0,0,"2",,terminal_output +12374,9216288,"TERMINAL",0,0,"3",,terminal_output +12375,9217306,"TERMINAL",0,0,"4",,terminal_output +12376,9218347,"TERMINAL",0,0,"5",,terminal_output +12377,9219365,"TERMINAL",0,0,"6",,terminal_output +12378,9220399,"TERMINAL",0,0,"7",,terminal_output +12379,9221439,"TERMINAL",0,0,"8",,terminal_output +12380,9222516,"TERMINAL",0,0,"9",,terminal_output +12381,9223507,"TERMINAL",0,0,"20",,terminal_output +12382,9224565,"TERMINAL",0,0,"1",,terminal_output +12383,9225600,"TERMINAL",0,0,"2",,terminal_output +12384,9226613,"TERMINAL",0,0,"3",,terminal_output +12385,9227052,"genie.py",0,0,"",python,tab +12386,9227053,"genie.py",9384,0,"",python,selection_mouse +12387,9227305,"genie.py",9374,0,"",python,selection_mouse +12388,9227684,"TERMINAL",0,0,"4",,terminal_output +12389,9228250,"genie.py",9308,76,"",python,content +12390,9228412,"genie.py",9239,0,"",python,selection_command +12391,9228481,"genie.py",9203,0,"",python,selection_command +12392,9228698,"TERMINAL",0,0,"5",,terminal_output +12393,9229070,"genie.py",9238,0,"\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])",python,content +12394,9229114,"genie.py",9247,0,"",python,selection_command +12395,9229747,"TERMINAL",0,0,"60",,terminal_output +12396,9230651,"genie.py",9372,0,"",python,selection_mouse +12397,9230795,"TERMINAL",0,0,"7",,terminal_output +12398,9231780,"genie.py",9289,0,"",python,selection_mouse +12399,9231840,"TERMINAL",0,0,"8",,terminal_output +12400,9232328,"genie.py",9239,76,"",python,content +12401,9232402,"genie.py",9247,0,"",python,selection_command +12402,9232647,"genie.py",9307,0,"\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])",python,content +12403,9232658,"genie.py",9316,0,"",python,selection_command +12404,9232902,"TERMINAL",0,0,"9",,terminal_output +12405,9233877,"genie.py",9247,0,"",python,selection_command +12406,9233923,"TERMINAL",0,0,"30",,terminal_output +12407,9234040,"genie.py",9211,0,"",python,selection_command +12408,9234319,"genie.py",9238,0,"\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])",python,content +12409,9234378,"genie.py",9247,0,"",python,selection_command +12410,9234755,"genie.py",9248,0,"",python,selection_command +12411,9234933,"genie.py",9249,0,"",python,selection_command +12412,9234972,"TERMINAL",0,0,"1",,terminal_output +12413,9235067,"genie.py",9250,0,"",python,selection_command +12414,9235560,"genie.py",9251,0,"",python,selection_command +12415,9235650,"genie.py",9252,0,"",python,selection_command +12416,9235751,"genie.py",9253,0,"",python,selection_command +12417,9235752,"genie.py",9254,0,"",python,selection_command +12418,9235752,"genie.py",9255,0,"",python,selection_command +12419,9235821,"genie.py",9256,0,"",python,selection_command +12420,9235821,"genie.py",9257,0,"",python,selection_command +12421,9235822,"genie.py",9258,0,"",python,selection_command +12422,9235880,"genie.py",9259,0,"",python,selection_command +12423,9235880,"genie.py",9260,0,"",python,selection_command +12424,9235914,"genie.py",9261,0,"",python,selection_command +12425,9235914,"genie.py",9262,0,"",python,selection_command +12426,9235989,"TERMINAL",0,0,"2",,terminal_output +12427,9236001,"genie.py",9263,0,"",python,selection_command +12428,9236186,"genie.py",9264,0,"",python,selection_command +12429,9236918,"genie.py",9265,0,"",python,selection_command +12430,9237019,"TERMINAL",0,0,"3",,terminal_output +12431,9237098,"genie.py",9266,0,"",python,selection_command +12432,9237218,"genie.py",9267,0,"",python,selection_command +12433,9237362,"genie.py",9268,0,"",python,selection_command +12434,9237500,"genie.py",9269,0,"",python,selection_command +12435,9237640,"genie.py",9270,0,"",python,selection_command +12436,9237801,"genie.py",9271,0,"",python,selection_command +12437,9237919,"genie.py",9272,0,"",python,selection_command +12438,9238052,"TERMINAL",0,0,"4",,terminal_output +12439,9238456,"genie.py",9272,0,"s",python,content +12440,9238457,"genie.py",9273,0,"",python,selection_keyboard +12441,9238623,"genie.py",9273,0,"a",python,content +12442,9238624,"genie.py",9274,0,"",python,selection_keyboard +12443,9238714,"genie.py",9274,0,"m",python,content +12444,9238714,"genie.py",9275,0,"",python,selection_keyboard +12445,9238876,"genie.py",9275,0,"p",python,content +12446,9238877,"genie.py",9276,0,"",python,selection_keyboard +12447,9238938,"genie.py",9276,0,"l",python,content +12448,9238940,"genie.py",9277,0,"",python,selection_keyboard +12449,9239006,"genie.py",9277,0,"e",python,content +12450,9239007,"genie.py",9278,0,"",python,selection_keyboard +12451,9239104,"TERMINAL",0,0,"5",,terminal_output +12452,9239159,"genie.py",9278,0,"-",python,content +12453,9239160,"genie.py",9279,0,"",python,selection_keyboard +12454,9239644,"genie.py",9278,1,"",python,content +12455,9239847,"genie.py",9278,0,"d",python,content +12456,9239849,"genie.py",9279,0,"",python,selection_keyboard +12457,9239918,"genie.py",9279,0,"-",python,content +12458,9239919,"genie.py",9280,0,"",python,selection_keyboard +12459,9240135,"TERMINAL",0,0,"6",,terminal_output +12460,9240243,"genie.py",9279,0,"",python,selection_command +12461,9241333,"TERMINAL",0,0,"8",,terminal_output +12462,9241905,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +12463,9242301,"TERMINAL",0,0,"9",,terminal_output +12464,9242651,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +12465,9242652,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +12466,9243285,"TERMINAL",0,0,"40",,terminal_output +12467,9244063,"genie.py",0,0,"",python,tab +12468,9244063,"genie.py",9498,0,"",python,selection_mouse +12469,9244170,"genie.py",9497,0,"",python,selection_command +12470,9244323,"TERMINAL",0,0,"1",,terminal_output +12471,9244586,"genie.py",9609,0,"",python,selection_mouse +12472,9245174,"genie.py",9597,0,"",python,selection_mouse +12473,9245346,"TERMINAL",0,0,"2",,terminal_output +12474,9245563,"TERMINAL",0,0,"2025-07-03 18:49:42.459979: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12475,9246020,"genie.py",9506,0,"",python,selection_mouse +12476,9246240,"genie.py",9506,81," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n ",python,selection_mouse +12477,9246240,"genie.py",9506,167," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorte",python,selection_mouse +12478,9246241,"genie.py",9506,255," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn",python,selection_mouse +12479,9246241,"genie.py",9506,343," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_f",python,selection_mouse +12480,9246266,"genie.py",9506,349," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask",python,selection_mouse +12481,9246312,"genie.py",9506,357," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted",python,selection_mouse +12482,9246348,"genie.py",9506,363," num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)",python,selection_mouse +12483,9246465,"TERMINAL",0,0,"3",,terminal_output +12484,9247432,"TERMINAL",0,0,"4",,terminal_output +12485,9248476,"TERMINAL",0,0,"5",,terminal_output +12486,9249551,"TERMINAL",0,0,"6",,terminal_output +12487,9250575,"TERMINAL",0,0,"7",,terminal_output +12488,9251702,"TERMINAL",0,0,"8",,terminal_output +12489,9252725,"TERMINAL",0,0,"9",,terminal_output +12490,9253777,"TERMINAL",0,0,"50",,terminal_output +12491,9254773,"TERMINAL",0,0,"1",,terminal_output +12492,9255796,"TERMINAL",0,0,"2",,terminal_output +12493,9256821,"TERMINAL",0,0,"3",,terminal_output +12494,9257981,"TERMINAL",0,0,"4",,terminal_output +12495,9258501,"TERMINAL",0,0,"2025-07-03 18:49:55.401505: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12496,9258869,"TERMINAL",0,0,"5",,terminal_output +12497,9260302,"TERMINAL",0,0,"6",,terminal_output +12498,9261328,"TERMINAL",0,0,"8",,terminal_output +12499,9262350,"TERMINAL",0,0,"9",,terminal_output +12500,9263422,"TERMINAL",0,0,"50:00",,terminal_output +12501,9263939,"genie.py",7840,0,"",python,selection_mouse +12502,9264053,"genie.py",7835,10,"token_idxs",python,selection_mouse +12503,9264434,"TERMINAL",0,0,"1",,terminal_output +12504,9265476,"TERMINAL",0,0,"2",,terminal_output +12505,9266480,"TERMINAL",0,0,"3",,terminal_output +12506,9267296,"genie.py",7800,0,"",python,selection_mouse +12507,9267461,"genie.py",7797,9,"vid_embed",python,selection_mouse +12508,9267565,"TERMINAL",0,0,"4",,terminal_output +12509,9268598,"TERMINAL",0,0,"5",,terminal_output +12510,9269019,"TERMINAL",0,0,"2025-07-03 18:50:05.831658: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12511,9269620,"TERMINAL",0,0,"6",,terminal_output +12512,9269713,"genie.py",8214,0,"",python,selection_mouse +12513,9269897,"genie.py",8209,9,"vid_embed",python,selection_mouse +12514,9270646,"TERMINAL",0,0,"72",,terminal_output +12515,9271135,"genie.py",8192,0,"",python,selection_mouse +12516,9271285,"genie.py",8182,13,"mask_expanded",python,selection_mouse +12517,9271628,"TERMINAL",0,0,"8",,terminal_output +12518,9272536,"genie.py",8202,0,"",python,selection_mouse +12519,9272700,"TERMINAL",0,0,"9",,terminal_output +12520,9272761,"genie.py",8197,10,"mask_token",python,selection_mouse +12521,9273200,"genie.py",8216,0,"",python,selection_mouse +12522,9273370,"genie.py",8209,9,"vid_embed",python,selection_mouse +12523,9273746,"TERMINAL",0,0,"10",,terminal_output +12524,9274064,"genie.py",8164,0,"",python,selection_mouse +12525,9274192,"genie.py",8160,9,"vid_embed",python,selection_mouse +12526,9274843,"TERMINAL",0,0,"1",,terminal_output +12527,9275868,"TERMINAL",0,0,"2",,terminal_output +12528,9276892,"TERMINAL",0,0,"3",,terminal_output +12529,9277886,"TERMINAL",0,0,"4",,terminal_output +12530,9278919,"TERMINAL",0,0,"5",,terminal_output +12531,9280069,"TERMINAL",0,0,"6",,terminal_output +12532,9281091,"TERMINAL",0,0,"7",,terminal_output +12533,9282012,"TERMINAL",0,0,"2025-07-03 18:50:18.895593: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12534,9282075,"TERMINAL",0,0,"8",,terminal_output +12535,9283138,"TERMINAL",0,0,"9",,terminal_output +12536,9284163,"TERMINAL",0,0,"21",,terminal_output +12537,9284675,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +12538,9285185,"TERMINAL",0,0,"2",,terminal_output +12539,9286324,"TERMINAL",0,0,"3",,terminal_output +12540,9287269,"TERMINAL",0,0,"4",,terminal_output +12541,9288326,"TERMINAL",0,0,"5",,terminal_output +12542,9289385,"TERMINAL",0,0,"6",,terminal_output +12543,9290410,"TERMINAL",0,0,"7",,terminal_output +12544,9291436,"TERMINAL",0,0,"8",,terminal_output +12545,9292486,"TERMINAL",0,0,"9",,terminal_output +12546,9293304,"TERMINAL",0,0,"2025-07-03 18:50:30.173103: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12547,9293583,"TERMINAL",0,0,"30",,terminal_output +12548,9294580,"TERMINAL",0,0,"1",,terminal_output +12549,9295544,"genie.py",8584,0,"",python,selection_mouse +12550,9295654,"TERMINAL",0,0,"2",,terminal_output +12551,9295720,"genie.py",8581,4,")[:,",python,selection_mouse +12552,9295875,"genie.py",8526,76," final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n",python,selection_mouse +12553,9296245,"TERMINAL",0,0,"2025-07-03 18:50:33.146945: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12554,9296704,"TERMINAL",0,0,"3",,terminal_output +12555,9296786,"genie.py",8574,0,"",python,selection_mouse +12556,9297412,"genie.py",8544,0,"",python,selection_mouse +12557,9297586,"genie.py",8534,12,"final_logits",python,selection_mouse +12558,9297738,"TERMINAL",0,0,"4",,terminal_output +12559,9298804,"TERMINAL",0,0,"5",,terminal_output +12560,9299794,"genie.py",8575,0,"",python,selection_mouse +12561,9299893,"TERMINAL",0,0,"6",,terminal_output +12562,9300868,"TERMINAL",0,0,"7",,terminal_output +12563,9301816,"genie.py",8602,0,"",python,selection_mouse +12564,9301901,"TERMINAL",0,0,"8",,terminal_output +12565,9302769,"TERMINAL",0,0,"2025-07-03 18:50:39.671446: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12566,9302991,"TERMINAL",0,0,"9",,terminal_output +12567,9304028,"TERMINAL",0,0,"40",,terminal_output +12568,9304950,"TERMINAL",0,0,"2025-07-03 18:50:41.767967: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12569,9305058,"TERMINAL",0,0,"1",,terminal_output +12570,9306178,"TERMINAL",0,0,"2",,terminal_output +12571,9306286,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +12572,9307202,"TERMINAL",0,0,"3",,terminal_output +12573,9308178,"TERMINAL",0,0,"5",,terminal_output +12574,9308673,"genie.py",8578,0,"",python,selection_mouse +12575,9309250,"TERMINAL",0,0,"6",,terminal_output +12576,9310275,"TERMINAL",0,0,"7",,terminal_output +12577,9311402,"TERMINAL",0,0,"8",,terminal_output +12578,9312322,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12579,9312348,"TERMINAL",0,0,"9",,terminal_output +12580,9313425,"TERMINAL",0,0,"50",,terminal_output +12581,9314439,"TERMINAL",0,0,"1",,terminal_output +12582,9314985,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12583,9315484,"TERMINAL",0,0,"2",,terminal_output +12584,9316626,"TERMINAL",0,0,"3",,terminal_output +12585,9317654,"TERMINAL",0,0,"4",,terminal_output +12586,9317654,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12587,9318672,"TERMINAL",0,0,"5",,terminal_output +12588,9319661,"TERMINAL",0,0,"6",,terminal_output +12589,9320331,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12590,9320719,"TERMINAL",0,0,"7",,terminal_output +12591,9321748,"TERMINAL",0,0,"8",,terminal_output +12592,9322869,"TERMINAL",0,0,"9",,terminal_output +12593,9322924,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12594,9323895,"TERMINAL",0,0,"1:00",,terminal_output +12595,9324919,"TERMINAL",0,0,"1",,terminal_output +12596,9325718,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12597,9325889,"TERMINAL",0,0,"2",,terminal_output +12598,9326967,"TERMINAL",0,0,"3",,terminal_output +12599,9328005,"TERMINAL",0,0,"4",,terminal_output +12600,9328401,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12601,9329116,"TERMINAL",0,0,"5",,terminal_output +12602,9330150,"TERMINAL",0,0,"6",,terminal_output +12603,9331165,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12604,9331166,"TERMINAL",0,0,"7",,terminal_output +12605,9332203,"TERMINAL",0,0,"9",,terminal_output +12606,9333327,"TERMINAL",0,0,"10",,terminal_output +12607,9333894,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12608,9334348,"TERMINAL",0,0,"1",,terminal_output +12609,9335363,"TERMINAL",0,0,"2",,terminal_output +12610,9336398,"TERMINAL",0,0,"3",,terminal_output +12611,9336519,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12612,9337414,"TERMINAL",0,0,"4",,terminal_output +12613,9338428,"TERMINAL",0,0,"5",,terminal_output +12614,9339255,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12615,9339441,"TERMINAL",0,0,"6",,terminal_output +12616,9340486,"TERMINAL",0,0,"7",,terminal_output +12617,9341539,"TERMINAL",0,0,"8",,terminal_output +12618,9341917,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12619,9342633,"TERMINAL",0,0,"9",,terminal_output +12620,9343658,"TERMINAL",0,0,"20",,terminal_output +12621,9344705,"TERMINAL",0,0,"1",,terminal_output +12622,9344744,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12623,9345526,"genie.py",9354,0,"",python,selection_mouse +12624,9345695,"TERMINAL",0,0,"2",,terminal_output +12625,9346129,"genie.py",9367,0,"",python,selection_mouse +12626,9346301,"genie.py",9360,18,"sampled_token_idxs",python,selection_mouse +12627,9346737,"TERMINAL",0,0,"3",,terminal_output +12628,9347168,"genie.py",9385,0,"",python,selection_mouse +12629,9347320,"genie.py",9380,10,"token_idxs",python,selection_mouse +12630,9347419,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12631,9347856,"TERMINAL",0,0,"4",,terminal_output +12632,9348880,"TERMINAL",0,0,"5",,terminal_output +12633,9349913,"TERMINAL",0,0,"600",,terminal_output +12634,9350085,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +12635,9350928,"TERMINAL",0,0,"7",,terminal_output +12636,9352065,"TERMINAL",0,0,"8",,terminal_output +12637,9352991,"TERMINAL",0,0,"9",,terminal_output +12638,9354103,"TERMINAL",0,0,"30",,terminal_output +12639,9355127,"TERMINAL",0,0,"1",,terminal_output +12640,9356162,"TERMINAL",0,0,"2",,terminal_output +12641,9357176,"TERMINAL",0,0,"3",,terminal_output +12642,9357851,"TERMINAL",0,0,"SSIM: 0.3250111937522888\r\n",,terminal_output +12643,9358199,"TERMINAL",0,0,"5",,terminal_output +12644,9359337,"TERMINAL",0,0,"6",,terminal_output +12645,9360263,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +12646,9360283,"TERMINAL",0,0,"7",,terminal_output +12647,9361381,"TERMINAL",0,0,"8",,terminal_output +12648,9362405,"TERMINAL",0,0,"9",,terminal_output +12649,9363432,"TERMINAL",0,0,"40",,terminal_output +12650,9364447,"TERMINAL",0,0,"1",,terminal_output +12651,9364727,"genie.py",8587,0,"",python,selection_mouse +12652,9365480,"TERMINAL",0,0,"2",,terminal_output +12653,9366526,"TERMINAL",0,0,"3",,terminal_output +12654,9366938,"genie.py",8588,0,"",python,selection_mouse +12655,9367618,"TERMINAL",0,0,"4",,terminal_output +12656,9368015,"genie.py",8602,0,"",python,selection_mouse +12657,9368647,"genie.py",8581,0,"",python,selection_mouse +12658,9368660,"TERMINAL",0,0,"5",,terminal_output +12659,9369630,"genie.py",8582,0,"",python,selection_mouse +12660,9369683,"TERMINAL",0,0,"6",,terminal_output +12661,9369921,"genie.py",8582,1,"[",python,selection_mouse +12662,9369976,"genie.py",8582,2,"[:",python,selection_mouse +12663,9370058,"genie.py",8582,3,"[:,",python,selection_mouse +12664,9370059,"genie.py",8582,4,"[:, ",python,selection_mouse +12665,9370123,"genie.py",8582,5,"[:, -",python,selection_mouse +12666,9370193,"genie.py",8582,6,"[:, -1",python,selection_mouse +12667,9370421,"genie.py",8582,7,"[:, -1]",python,selection_mouse +12668,9370691,"TERMINAL",0,0,"7",,terminal_output +12669,9371640,"genie.py",8582,7,"",python,content +12670,9371776,"TERMINAL",0,0,"822",,terminal_output +12671,9372774,"TERMINAL",0,0,"9",,terminal_output +12672,9373757,"genie.py",8540,0,"",python,selection_mouse +12673,9373843,"TERMINAL",0,0,"50",,terminal_output +12674,9373951,"genie.py",8534,12,"final_logits",python,selection_mouse +12675,9374890,"TERMINAL",0,0,"1",,terminal_output +12676,9375914,"TERMINAL",0,0,"2",,terminal_output +12677,9376938,"TERMINAL",0,0,"3",,terminal_output +12678,9378029,"TERMINAL",0,0,"4",,terminal_output +12679,9379089,"TERMINAL",0,0,"5",,terminal_output +12680,9380113,"TERMINAL",0,0,"6",,terminal_output +12681,9381161,"TERMINAL",0,0,"8",,terminal_output +12682,9382246,"TERMINAL",0,0,"9",,terminal_output +12683,9383287,"TERMINAL",0,0,"2:00",,terminal_output +12684,9384312,"TERMINAL",0,0,"1",,terminal_output +12685,9385334,"TERMINAL",0,0,"2",,terminal_output +12686,9386360,"TERMINAL",0,0,"3",,terminal_output +12687,9387485,"TERMINAL",0,0,"4",,terminal_output +12688,9388457,"TERMINAL",0,0,"5",,terminal_output +12689,9389535,"TERMINAL",0,0,"6",,terminal_output +12690,9390562,"TERMINAL",0,0,"7",,terminal_output +12691,9391549,"TERMINAL",0,0,"8",,terminal_output +12692,9392594,"TERMINAL",0,0,"9",,terminal_output +12693,9393732,"TERMINAL",0,0,"10",,terminal_output +12694,9394456,"genie.py",9059,0,"",python,selection_mouse +12695,9394552,"genie.py",9053,6,"lambda",python,selection_mouse +12696,9394707,"TERMINAL",0,0,"1",,terminal_output +12697,9395406,"genie.py",9129,0,"",python,selection_mouse +12698,9395522,"genie.py",9126,12,"final_logits",python,selection_mouse +12699,9395725,"TERMINAL",0,0,"2",,terminal_output +12700,9396803,"TERMINAL",0,0,"3",,terminal_output +12701,9397028,"genie.py",9092,0,"",python,selection_mouse +12702,9397143,"genie.py",9081,17,"final_token_probs",python,selection_mouse +12703,9397792,"TERMINAL",0,0,"4",,terminal_output +12704,9398853,"TERMINAL",0,0,"5",,terminal_output +12705,9399980,"TERMINAL",0,0,"6",,terminal_output +12706,9400940,"genie.py",9180,0,"",python,selection_mouse +12707,9401017,"TERMINAL",0,0,"7",,terminal_output +12708,9401065,"genie.py",9169,17,"final_token_probs",python,selection_mouse +12709,9401936,"genie.py",9036,0,"",python,selection_mouse +12710,9402085,"TERMINAL",0,0,"8",,terminal_output +12711,9402497,"genie.py",9094,0,"",python,selection_mouse +12712,9402683,"genie.py",9081,17,"final_token_probs",python,selection_mouse +12713,9403011,"TERMINAL",0,0,"9",,terminal_output +12714,9403281,"genie.py",9182,0,"",python,selection_mouse +12715,9403426,"genie.py",9169,17,"final_token_probs",python,selection_mouse +12716,9404075,"TERMINAL",0,0,"20",,terminal_output +12717,9405098,"TERMINAL",0,0,"1",,terminal_output +12718,9406188,"TERMINAL",0,0,"2",,terminal_output +12719,9406416,"genie.py",8704,0,"",python,selection_mouse +12720,9406554,"genie.py",8691,18,"sampled_token_idxs",python,selection_mouse +12721,9407249,"TERMINAL",0,0,"4",,terminal_output +12722,9408273,"TERMINAL",0,0,"5",,terminal_output +12723,9409312,"TERMINAL",0,0,"6",,terminal_output +12724,9410306,"TERMINAL",0,0,"7",,terminal_output +12725,9411361,"TERMINAL",0,0,"8",,terminal_output +12726,9412405,"TERMINAL",0,0,"9",,terminal_output +12727,9413513,"TERMINAL",0,0,"30",,terminal_output +12728,9414549,"TERMINAL",0,0,"1",,terminal_output +12729,9415601,"TERMINAL",0,0,"2",,terminal_output +12730,9416566,"TERMINAL",0,0,"3",,terminal_output +12731,9417608,"TERMINAL",0,0,"4",,terminal_output +12732,9418718,"TERMINAL",0,0,"5",,terminal_output +12733,9419011,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +12734,9419742,"TERMINAL",0,0,"6",,terminal_output +12735,9420151,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +12736,9420270,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +12737,9420765,"TERMINAL",0,0,"7",,terminal_output +12738,9421791,"TERMINAL",0,0,"8",,terminal_output +12739,9422825,"TERMINAL",0,0,"9",,terminal_output +12740,9423225,"TERMINAL",0,0,"2025-07-03 18:52:40.051877: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12741,9423874,"TERMINAL",0,0,"40",,terminal_output +12742,9424965,"TERMINAL",0,0,"1",,terminal_output +12743,9425989,"TERMINAL",0,0,"2",,terminal_output +12744,9427117,"TERMINAL",0,0,"3",,terminal_output +12745,9428046,"TERMINAL",0,0,"4",,terminal_output +12746,9429163,"TERMINAL",0,0,"5",,terminal_output +12747,9430145,"TERMINAL",0,0,"6",,terminal_output +12748,9431212,"TERMINAL",0,0,"8",,terminal_output +12749,9432235,"TERMINAL",0,0,"9",,terminal_output +12750,9433361,"TERMINAL",0,0,"50",,terminal_output +12751,9434385,"TERMINAL",0,0,"1",,terminal_output +12752,9435342,"TERMINAL",0,0,"23",,terminal_output +12753,9435819,"TERMINAL",0,0,"2025-07-03 18:52:52.663024: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12754,9436434,"TERMINAL",0,0,"3",,terminal_output +12755,9437457,"TERMINAL",0,0,"4",,terminal_output +12756,9438487,"TERMINAL",0,0,"5",,terminal_output +12757,9439523,"TERMINAL",0,0,"6",,terminal_output +12758,9440572,"TERMINAL",0,0,"7",,terminal_output +12759,9441612,"TERMINAL",0,0,"8",,terminal_output +12760,9442659,"TERMINAL",0,0,"9",,terminal_output +12761,9443705,"TERMINAL",0,0,"3:00",,terminal_output +12762,9444831,"TERMINAL",0,0,"1",,terminal_output +12763,9445780,"TERMINAL",0,0,"2",,terminal_output +12764,9446099,"TERMINAL",0,0,"2025-07-03 18:53:02.994746: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12765,9446880,"TERMINAL",0,0,"3",,terminal_output +12766,9447903,"TERMINAL",0,0,"4",,terminal_output +12767,9448927,"TERMINAL",0,0,"5",,terminal_output +12768,9449966,"TERMINAL",0,0,"6",,terminal_output +12769,9450974,"TERMINAL",0,0,"7",,terminal_output +12770,9452109,"TERMINAL",0,0,"8",,terminal_output +12771,9453034,"TERMINAL",0,0,"9",,terminal_output +12772,9454149,"TERMINAL",0,0,"10",,terminal_output +12773,9455174,"TERMINAL",0,0,"1",,terminal_output +12774,9456198,"TERMINAL",0,0,"3",,terminal_output +12775,9457221,"TERMINAL",0,0,"4",,terminal_output +12776,9458245,"TERMINAL",0,0,"5",,terminal_output +12777,9459269,"TERMINAL",0,0,"6",,terminal_output +12778,9459270,"TERMINAL",0,0,"2025-07-03 18:53:16.163394: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12779,9460396,"TERMINAL",0,0,"7",,terminal_output +12780,9461419,"TERMINAL",0,0,"8",,terminal_output +12781,9461915,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +12782,9462444,"TERMINAL",0,0,"9",,terminal_output +12783,9463511,"TERMINAL",0,0,"20",,terminal_output +12784,9464459,"TERMINAL",0,0,"1",,terminal_output +12785,9465534,"TERMINAL",0,0,"2",,terminal_output +12786,9466548,"TERMINAL",0,0,"3",,terminal_output +12787,9467584,"TERMINAL",0,0,"47",,terminal_output +12788,9468630,"TERMINAL",0,0,"5",,terminal_output +12789,9469657,"TERMINAL",0,0,"66",,terminal_output +12790,9470285,"TERMINAL",0,0,"2025-07-03 18:53:27.185734: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12791,9470738,"TERMINAL",0,0,"7",,terminal_output +12792,9471761,"TERMINAL",0,0,"8",,terminal_output +12793,9472803,"TERMINAL",0,0,"9",,terminal_output +12794,9473465,"TERMINAL",0,0,"2025-07-03 18:53:30.325635: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12795,9473913,"TERMINAL",0,0,"30",,terminal_output +12796,9474936,"TERMINAL",0,0,"1",,terminal_output +12797,9475960,"TERMINAL",0,0,"2",,terminal_output +12798,9476952,"TERMINAL",0,0,"3",,terminal_output +12799,9477976,"TERMINAL",0,0,"4",,terminal_output +12800,9479033,"TERMINAL",0,0,"5",,terminal_output +12801,9480057,"TERMINAL",0,0,"2025-07-03 18:53:36.888042: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12802,9480057,"TERMINAL",0,0,"6",,terminal_output +12803,9481082,"TERMINAL",0,0,"7",,terminal_output +12804,9481899,"TERMINAL",0,0,"2025-07-03 18:53:38.786130: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12805,9482206,"TERMINAL",0,0,"8",,terminal_output +12806,9483233,"TERMINAL",0,0,"40",,terminal_output +12807,9483233,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +12808,9484255,"TERMINAL",0,0,"1",,terminal_output +12809,9484640,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 16, 920, 1024), (1, 1, 16, 920).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 196, in broadcast_shapes\r\n return _broadcast_shapes_cached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 294, in wrapper\r\n return cached(config.trace_context() if trace_context_in_key else _ignore(),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 288, in cached\r\n return f(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 202, in _broadcast_shapes_cached\r\n return _broadcast_shapes_uncached(*shapes)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 221, in _broadcast_shapes_uncached\r\n raise ValueError(f""Incompatible shapes for broadcasting: shapes={list(shapes)}"") from err\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 16, 920, 1024), (1, 16, 920)]\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 218, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 135, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 16, 920, 1024), (1, 1, 16, 920).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 165, in scan_fn\r\n _, out_pvals, _ = pe.trace_to_jaxpr_nounits(f_flat, in_pvals)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 135, in body_fn\r\n broadcast_out, c, ys = fn(broadcast_in, c, *xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 265, in __call__\r\n final_token_probs += ~mask\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1081, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 180, in __call__\r\n return call(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1234, in add\r\n x, y = promote_args(""add"", x, y)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 228, in promote_args\r\n return promote_shapes(fun_name, *promote_dtypes(*args))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 64, in promote_shapes\r\n result_rank = len(lax.broadcast_shapes(*shapes))\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 16, 920, 1024), (1, 16, 920)]\r\n",,terminal_output +12810,9485280,"TERMINAL",0,0,"2",,terminal_output +12811,9485907,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +12812,9486314,"TERMINAL",0,0,"338",,terminal_output +12813,9487370,"TERMINAL",0,0,"4",,terminal_output +12814,9488367,"TERMINAL",0,0,"5",,terminal_output +12815,9489478,"TERMINAL",0,0,"6",,terminal_output +12816,9490503,"TERMINAL",0,0,"7",,terminal_output +12817,9491534,"TERMINAL",0,0,"8",,terminal_output +12818,9492555,"TERMINAL",0,0,"9",,terminal_output +12819,9493562,"TERMINAL",0,0,"50",,terminal_output +12820,9494598,"TERMINAL",0,0,"1",,terminal_output +12821,9495635,"TERMINAL",0,0,"2",,terminal_output +12822,9496663,"TERMINAL",0,0,"350",,terminal_output +12823,9497703,"TERMINAL",0,0,"4",,terminal_output +12824,9498798,"TERMINAL",0,0,"5",,terminal_output +12825,9499819,"TERMINAL",0,0,"6",,terminal_output +12826,9501152,"TERMINAL",0,0,"738",,terminal_output +12827,9502175,"TERMINAL",0,0,"8",,terminal_output +12828,9503199,"TERMINAL",0,0,"4:00",,terminal_output +12829,9504172,"TERMINAL",0,0,"1",,terminal_output +12830,9505248,"TERMINAL",0,0,"2",,terminal_output +12831,9506255,"TERMINAL",0,0,"3",,terminal_output +12832,9507300,"TERMINAL",0,0,"4",,terminal_output +12833,9508422,"TERMINAL",0,0,"5",,terminal_output +12834,9509445,"TERMINAL",0,0,"6",,terminal_output +12835,9510434,"TERMINAL",0,0,"7",,terminal_output +12836,9511494,"TERMINAL",0,0,"8",,terminal_output +12837,9512619,"TERMINAL",0,0,"9",,terminal_output +12838,9513544,"TERMINAL",0,0,"10",,terminal_output +12839,9514574,"TERMINAL",0,0,"1",,terminal_output +12840,9515613,"TERMINAL",0,0,"2",,terminal_output +12841,9516661,"TERMINAL",0,0,"3",,terminal_output +12842,9517711,"TERMINAL",0,0,"4",,terminal_output +12843,9518764,"TERMINAL",0,0,"5",,terminal_output +12844,9519825,"TERMINAL",0,0,"6",,terminal_output +12845,9520914,"TERMINAL",0,0,"7",,terminal_output +12846,9521894,"TERMINAL",0,0,"8",,terminal_output +12847,9522987,"TERMINAL",0,0,"9",,terminal_output +12848,9523978,"TERMINAL",0,0,"20",,terminal_output +12849,9525113,"TERMINAL",0,0,"19",,terminal_output +12850,9526137,"TERMINAL",0,0,"2",,terminal_output +12851,9527143,"TERMINAL",0,0,"3",,terminal_output +12852,9528140,"TERMINAL",0,0,"5",,terminal_output +12853,9529159,"TERMINAL",0,0,"6",,terminal_output +12854,9530233,"TERMINAL",0,0,"7",,terminal_output +12855,9531255,"TERMINAL",0,0,"8",,terminal_output +12856,9532281,"TERMINAL",0,0,"9",,terminal_output +12857,9533286,"TERMINAL",0,0,"30",,terminal_output +12858,9533634,"genie.py",0,0,"",python,tab +12859,9533635,"genie.py",9571,0,"",python,selection_mouse +12860,9533696,"genie.py",9570,0,"",python,selection_command +12861,9534366,"genie.py",8525,0,"",python,selection_mouse +12862,9534366,"genie.py",8524,0,"",python,selection_command +12863,9534400,"TERMINAL",0,0,"1",,terminal_output +12864,9535330,"genie.py",8582,0,"[:, -1]",python,content +12865,9535426,"TERMINAL",0,0,"2",,terminal_output +12866,9536436,"genie.py",8602,0,"",python,selection_mouse +12867,9536462,"TERMINAL",0,0,"3",,terminal_output +12868,9537360,"TERMINAL",0,0,"watch",,terminal_focus +12869,9537457,"TERMINAL",0,0,"4",,terminal_output +12870,9540351,"TERMINAL",0,0,"git status",,terminal_command +12871,9540434,"TERMINAL",0,0,"]633;E;2025-07-03 18:54:37 git status;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;COn branch fix-sampling\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: genie.py\r\n\tmodified: sample.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +12872,9548896,"TERMINAL",0,0,"git add genie.py sample.py",,terminal_command +12873,9557980,"TERMINAL",0,0,"git status",,terminal_command +12874,9558022,"TERMINAL",0,0,"]633;E;2025-07-03 18:54:54 git status;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;COn branch fix-sampling\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tmodified: genie.py\r\n\tmodified: sample.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +12875,9563880,"TERMINAL",0,0,"git checkout main",,terminal_command +12876,9563958,"TERMINAL",0,0,"]633;E;2025-07-03 18:55:00 git checkout main;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;CM\tgenie.py\r\nM\tsample.py\r\nSwitched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +12877,9567095,"genie.py",0,0,"Switched from branch 'fix-sampling' to 'main'",python,git_branch_checkout +12878,9580753,"TERMINAL",0,0,"git checkout runner",,terminal_command +12879,9580817,"TERMINAL",0,0,"]633;E;2025-07-03 18:55:17 git checkout runner;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;CM\tgenie.py\r\nM\tsample.py\r\nSwitched to branch 'runner'\r\n",,terminal_output +12880,9582089,"genie.py",0,0,"Switched from branch 'main' to 'runner'",python,git_branch_checkout +12881,9594358,"scripts_horeka/batchsize_scaling/adjusted_lr/tester.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log \\n --name=tokenizer-batch-size-scaling-1-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 1-node debug \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir",shellscript,tab +12882,9602197,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,0,"",shellscript,tab +12883,9636391,"TERMINAL",0,0,"cd scripts_horeka/batchsize_scaling/adjusted_lr/",,terminal_command +12884,9636892,"TERMINAL",0,0,"ls",,terminal_command +12885,9638166,"TERMINAL",0,0,"pwd",,terminal_command +12886,9638225,"TERMINAL",0,0,"]633;E;2025-07-03 18:56:15 pwd;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0",,terminal_output +12887,9664630,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,0,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/1node.sbatch\n\n",shellscript,content +12888,9664767,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",118,0,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\n",shellscript,content +12889,9665042,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",236,0,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\n",shellscript,content +12890,9665372,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",354,0,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch\n",shellscript,content +12891,9665375,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",472,1,"",shellscript,content +12892,9670052,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_1_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-1-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 1-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +12893,9673698,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,0,"",shellscript,tab +12894,9674576,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,0,"",shellscript,selection_mouse +12895,9674577,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",116,0,"",shellscript,selection_command +12896,9674781,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",116,1,"h",shellscript,selection_mouse +12897,9674781,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",111,5,"sbatc",shellscript,selection_mouse +12898,9674782,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",116,82,"h\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batch",shellscript,selection_mouse +12899,9674782,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",116,161,"h\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0",shellscript,selection_mouse +12900,9674782,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",116,144,"h\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-",shellscript,selection_mouse +12901,9674805,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,0,"",shellscript,selection_command +12902,9674805,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,137,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-pr",shellscript,selection_mouse +12903,9674846,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,133,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/h",shellscript,selection_mouse +12904,9674897,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,132,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/",shellscript,selection_mouse +12905,9674988,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,131,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home",shellscript,selection_mouse +12906,9675031,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,130,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /hom",shellscript,selection_mouse +12907,9675058,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,129,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /ho",shellscript,selection_mouse +12908,9675080,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,9,"\nsbatch /",shellscript,selection_mouse +12909,9675105,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,8,"\nsbatch ",shellscript,selection_mouse +12910,9675136,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,7,"\nsbatch",shellscript,selection_mouse +12911,9675163,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",117,6,"\nsbatc",shellscript,selection_mouse +12912,9675187,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,113,"ch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/1node.sbatch",shellscript,selection_mouse +12913,9675451,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",5,112,"h /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/1node.sbatch",shellscript,selection_mouse +12914,9675538,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",6,111," /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/1node.sbatch",shellscript,selection_mouse +12915,9675708,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",7,110,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/1node.sbatch",shellscript,selection_mouse +12916,9676469,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",7,110,"",shellscript,content +12917,9676504,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",6,0,"",shellscript,selection_command +12918,9677084,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,7,"",shellscript,content +12919,9678341,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,0,"a",shellscript,content +12920,9678342,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",1,0,"",shellscript,selection_keyboard +12921,9678406,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",1,0,"b",shellscript,content +12922,9678407,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",2,0,"",shellscript,selection_keyboard +12923,9678587,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",2,0,"c",shellscript,content +12924,9678588,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",3,0,"",shellscript,selection_keyboard +12925,9678623,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",3,0,"t",shellscript,content +12926,9678624,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,0,"",shellscript,selection_keyboard +12927,9679103,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,0,"h",shellscript,content +12928,9679104,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",5,0,"",shellscript,selection_keyboard +12929,9679535,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,1,"",shellscript,content +12930,9679736,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",3,1,"",shellscript,content +12931,9679754,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",2,1,"",shellscript,content +12932,9679891,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",1,1,"",shellscript,content +12933,9680026,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,1,"",shellscript,content +12934,9680236,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",0,0,"s",shellscript,content +12935,9680237,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",1,0,"",shellscript,selection_keyboard +12936,9680340,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",1,0,"b",shellscript,content +12937,9680341,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",2,0,"",shellscript,selection_keyboard +12938,9680513,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",2,0,"a",shellscript,content +12939,9680513,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",3,0,"",shellscript,selection_keyboard +12940,9680621,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",3,0,"t",shellscript,content +12941,9680622,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,0,"",shellscript,selection_keyboard +12942,9680757,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",4,0,"c",shellscript,content +12943,9680758,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",5,0,"",shellscript,selection_keyboard +12944,9680854,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",5,0,"h",shellscript,content +12945,9680855,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",6,0,"",shellscript,selection_keyboard +12946,9680950,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",6,0," ",shellscript,content +12947,9680951,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",7,0,"",shellscript,selection_keyboard +12948,9681167,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",7,0,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,content +12949,9682555,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",490,0,"",shellscript,selection_mouse +12950,9682915,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",487,3,"ch\n",shellscript,selection_mouse +12951,9682916,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",489,1,"\n",shellscript,selection_mouse +12952,9683371,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",489,0,"",shellscript,selection_mouse +12953,9684056,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",481,8,"e.sbatch",shellscript,selection_mouse +12954,9684057,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",461,28,"ing/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12955,9684057,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",318,171,"pts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12956,9684057,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",301,188,"ojects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12957,9684160,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",292,197,"te0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12958,9684161,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",287,202,"tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12959,9684161,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",285,204,"0/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12960,9684161,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",283,206,"960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12961,9684162,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",282,207,"3960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12962,9684218,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",281,208,"23960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12963,9684229,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",280,209,"023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12964,9684260,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",279,210,"0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12965,9684318,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",278,211,"p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12966,9684318,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",276,213,"t-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12967,9684335,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",274,215,"ect-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12968,9684357,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",272,217,"oject-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12969,9684380,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",150,339,"k-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12970,9684404,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",147,342,"e/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12971,9684458,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",144,345,"home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12972,9684458,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",142,347," /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12973,9684468,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",259,230,"h /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12974,9684493,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",258,231,"ch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12975,9684547,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",256,233,"atch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12976,9684559,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",255,234,"batch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12977,9684584,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",254,235,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12978,9684882,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",136,353,"sbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/2node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/3node.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/4node.sbatch",shellscript,selection_mouse +12979,9685563,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",136,353,"",shellscript,content +12980,9687288,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",135,1,"",shellscript,content +12981,9687521,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",134,0,"",shellscript,selection_command +12982,9688592,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",135,0,"\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,content +12983,9688631,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",136,0,"",shellscript,selection_command +12984,9693813,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",143,0,"",shellscript,selection_mouse +12985,9694957,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",271,0,"",shellscript,selection_mouse +12986,9694967,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",270,0,"",shellscript,selection_command +12987,9695175,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",268,2,"tc",shellscript,selection_mouse +12988,9695176,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",121,149,"1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatc",shellscript,selection_mouse +12989,9695176,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",106,164,"rain_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatc",shellscript,selection_mouse +12990,9695176,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",91,179,"g/adjusted_lr/train_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatc",shellscript,selection_mouse +12991,9695177,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",80,190,"size_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatc",shellscript,selection_mouse +12992,9695205,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",268,3,"tch",shellscript,selection_command +12993,9695206,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",62,209,"ripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12994,9695249,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",50,221,"cts/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12995,9695282,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",44,227,"/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\nsbatch /home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12996,9695312,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",178,93,"15/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12997,9695345,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",175,96,"e0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12998,9695375,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",172,99,"_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +12999,9695407,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",169,102,"tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13000,9695446,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",165,106,"960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13001,9695513,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",271,1,"\n",shellscript,selection_mouse +13002,9695915,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",147,124,"e/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13003,9696050,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",146,125,"me/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13004,9696087,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",145,126,"ome/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13005,9696135,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",144,127,"home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13006,9696395,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",143,128,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",shellscript,selection_mouse +13007,9697244,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",143,128,"",shellscript,content +13008,9697309,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",142,0,"",shellscript,selection_command +13009,9697608,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",143,0,"",shellscript,selection_command +13010,9698089,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",143,0,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",shellscript,content +13011,9703759,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",272,0,"",shellscript,selection_mouse +13012,9704846,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",272,0,"s",shellscript,content +13013,9704848,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",273,0,"",shellscript,selection_keyboard +13014,9704960,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",273,0,"b",shellscript,content +13015,9704961,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",274,0,"",shellscript,selection_keyboard +13016,9705061,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",274,0,"a",shellscript,content +13017,9705062,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",275,0,"",shellscript,selection_keyboard +13018,9705157,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",275,0,"t",shellscript,content +13019,9705157,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",276,0,"",shellscript,selection_keyboard +13020,9705244,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",276,0,"c",shellscript,content +13021,9705245,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",277,0,"",shellscript,selection_keyboard +13022,9705340,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",277,0,"h",shellscript,content +13023,9705341,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",278,0,"",shellscript,selection_keyboard +13024,9705400,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",278,0," ",shellscript,content +13025,9705401,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",279,0,"",shellscript,selection_keyboard +13026,9705618,"scripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh",279,0,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",shellscript,content +13027,9709037,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"",shellscript,tab +13028,9709880,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",854,0,"",shellscript,selection_mouse +13029,9709881,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",853,0,"",shellscript,selection_command +13030,9717231,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_2_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-2-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 2-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +13031,9719301,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=4\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_4_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=192 \\n --min_lr=6.00e-4 \\n --max_lr=6.00e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-4-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 4-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +13032,9757893,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13033,9777973,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",1123,0,"",shellscript,selection_mouse +13034,9777991,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",1122,0,"",shellscript,selection_command +13035,9778969,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",963,0,"",shellscript,selection_mouse +13036,9778982,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",962,0,"",shellscript,selection_command +13037,9779742,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",920,0,"",shellscript,selection_mouse +13038,9779744,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",919,0,"",shellscript,selection_command +13039,9781078,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",890,0,"",shellscript,selection_mouse +13040,9813811,"TERMINAL",0,0,"srun",,terminal_focus +13041,9814460,"TERMINAL",0,0,"s",,terminal_output +13042,9814863,"TERMINAL",0,0,"",,terminal_output +13043,9815105,"TERMINAL",0,0,"l",,terminal_output +13044,9815167,"TERMINAL",0,0,"s",,terminal_output +13045,9815301,"TERMINAL",0,0," ",,terminal_output +13046,9815771,"TERMINAL",0,0,"l",,terminal_output +13047,9816341,"TERMINAL",0,0,"",,terminal_output +13048,9816486,"TERMINAL",0,0,"",,terminal_output +13049,9817111,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13050,9817173,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13051,9817351,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13052,9817764,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +13053,9817998,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13054,9819330,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +13055,9819498,"TERMINAL",0,0,"afar",,terminal_output +13056,9820759,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +13057,9820877,"TERMINAL",0,0,"jobs/",,terminal_output +13058,9821352,"TERMINAL",0,0,"",,terminal_output +13059,9822307,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13060,9822472,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +13061,9822581,"TERMINAL",0,0,"gs/",,terminal_output +13062,9822977,"TERMINAL",0,0,"logs_training/",,terminal_output +13063,9823127,"TERMINAL",0,0,"train_",,terminal_output +13064,9823857,"TERMINAL",0,0,"",,terminal_output +13065,9824124,"TERMINAL",0,0,"\r\ntrain_dynamics_minecraft_overfit_sample_110M_3296571.log train_dynamics_minecraft_overfit_sample_tiny_3307619.log\r\ntrain_dynamics_minecraft_overfit_sample_12M_3297577.log train_dynamics_minecraft_overfit_sample_tiny_3310436.log\r\ntrain_dynamics_minecraft_overfit_sample_12M_3299066.log train_dynamics_minecraft_overfit_sample_tiny_smol_lr_3301026.log\r\ntrain_dynamics_minecraft_overfit_sample_1.5M_3297569.log train_dynamics_minecraft_overfit_sample_tiny_smol_lr_3301031.log\r\ntrain_dynamics_minecraft_overfit_sample_1.5M_3299063.log train_lam_minecraft_overfit_sample_3299069.log\r\ntrain_dynamics_minecraft_overfit_sample_180M_3296573.log train_lam_minecraft_overfit_sample_3299259.log\r\ntrain_dynamics_minecraft_overfit_sample_18M_3297578.log train_lam_minecraft_overfit_sample_3309663.log\r\ntrain_dynamics_minecraft_overfit_sample_18M_3299062.log train_lam_minecraft_overfit_sample_3311672.log\r\ntrain_dynamics_minecraft_overfit_sample_270M_3296574.log train_tokenizer_batch_size_scaling_1_node_3294600.log\r\ntrain_dynamics_minecraft_overfit_sample_3.5M_3297575.log train_tokenizer_batch_size_scaling_1_node_3313570.log\r\ntrain_dynamics_minecraft_overfit_sample_36M_3296500.log train_tokenizer_batch_size_scaling_2_node_3294601.log\r\ntrain_dynamics_minecraft_overfit_sample_36M_3296502.log train_tokenizer_batch_size_scaling_2_node_3313571.log\r\ntrain_dynamics_minecraft_overfit_sample_36M_3296540.log train_tokenizer_batch_size_scaling_4_node_3294602.log\r\ntrain_dynamics_minecraft_overfit_sample_500M_3296575.log train_tokenizer_batch_size_scaling_4_node_3313572.log\r\ntrain_dynamics_minecraft_overfit_sample_6M_3297576.log train_tokenizer_batch_size_scaling_8_node_3294603.log\r\ntrain_dynamics_minecraft_overfit_sample_6M_3299065.log train_tokenizer_minecraft_overfit_batch_3311671.log\r\ntrain_dynamics_minecraft_overfit_sample_tiny_3301025.log train_tokenizer_minecraft_overfit_sample_3299016.log\r\ntrain_dynamics_minecraft_overfit_sample_tiny_3301027.log train_tokenizer_minecraft_overfit_sample_3299068.log\r\ntrain_dynamics_minecraft_overfit_sample_tiny_3301029.log train_tokenizer_minecraft_overfit_sample_3299258.log\r\ntrain_dynamics_minecraft_overfit_sample_tiny_3301030.log train_tokenizer_minecraft_overfit_sample_3299272.log\r\ntrain_dynamics_minecraft_overfit_sample_tiny_3307618.log train_tokenizer_minecraft_overfit_sample_3309662.log\r\n(jafar) [tum_cte0515@hkn0901 jafar]$ ls ../jafar_jobs/logs/logs_training/train_",,terminal_output +13066,9838184,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13067,9838269,"TERMINAL",0,0,"okenizer_",,terminal_output +13068,9838616,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +13069,9838680,"TERMINAL",0,0,"atch_size_scaling_",,terminal_output +13070,9840850,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +13071,9841057,"TERMINAL",0,0,"_node_3",,terminal_output +13072,9844647,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +13073,9845132,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +13074,9845375,"TERMINAL",0,0,"13570.log ",,terminal_output +13075,9847655,"TERMINAL",0,0,"\r\n[?2004l\r../jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13076,9848764,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_1_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-1-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 1-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n/var/spool/slurmd/job3313570/slurm_script: line 18: .venv/bin/activate: No such file or directory\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=1989439\nSLURM_JOB_GPUS=0,1,2,3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs\nSLURMD_NODENAME=hkn0512\nSLURM_JOB_START_TIME=1751511828\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1751565828\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24\nSLURM_GPUS_ON_NODE=4\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3313570\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=4\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e10.hkn0512\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0512\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=4\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1991.localdomain\nSLURM_JOB_ID=3313570\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_tokenizer_batch_size_scaling_1_node\nSLURM_NTASKS_PER_NODE=4\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0512\nGpuFreq=control_disabled\n2025-07-03 05:04:39.228831: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n2025-07-03 05:04:39.228830: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n2025-07-03 05:04:39.228828: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n2025-07-03 05:04:39.228830: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\nE0000 00:00:1751511879.311058 1989542 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\nE0000 00:00:1751511879.311221 1989540 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\nE0000 00:00:1751511879.311363 1989541 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\nE0000 00:00:1751511879.311333 1989543 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\nE0000 00:00:1751511879.354914 1989540 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\nE0000 00:00:1751511879.354954 1989541 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\nE0000 00:00:1751511879.354924 1989542 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\nE0000 00:00:1751511879.354940 1989543 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\nW0000 00:00:1751511879.676045 1989540 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676076 1989540 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676078 1989540 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676080 1989540 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676047 1989541 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676081 1989541 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676084 1989541 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676085 1989541 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676048 1989542 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676085 1989542 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676087 1989542 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676089 1989542 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676050 1989543 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676086 1989543 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676088 1989543 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511879.676089 1989543 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\nW0000 00:00:1751511910.121814 1989542 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\nSkipping registering GPU devices...\nW0000 00:00:1751511910.121818 1989543 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\nSkipping registering GPU devices...\nW0000 00:00:1751511910.121838 1989540 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\nSkipping registering GPU devices...\nW0000 00:00:1751511910.121987 1989541 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\nSkipping registering GPU devices...\nwandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\nwandb: Tracking run with wandb version 0.19.11\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/wandb/run-20250703_050542-9ohe3ypl\nwandb: Run `wandb offline` to turn off syncing.\nwandb: Syncing run tokenizer-batch-size-scaling-1-node-3313570\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/9ohe3ypl\nRunning on 4 devices.\nCounting all components: ['encoder', 'vq', 'decoder']\nParameter counts:\n{'encoder': 18978432, 'vq': 32768, 'decoder': 18978416, 'total': 37989616}\nRunning on 4 devices.\nCounting all components: ['encoder', 'vq', 'decoder']\nParameter counts:\n{'encoder': 18978432, 'vq': 32768, 'decoder': 18978416, 'total': 37989616}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/train_tokenizer.py"", line 213, in \nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/train_tokenizer.py"", line 213, in \n for x in os.listdir(args.data_dir)\nFileNotFoundError: [Errno 2] No such file or directory: 'hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord'\n for x in os.listdir(args.data_dir)\nFileNotFoundError: [Errno 2] No such file or directory: 'hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord'\nRunning on 4 devices.\nCounting all components: ['encoder', 'vq', 'decoder']\nParameter counts:\n{'encoder': 18978432, 'vq': 32768, 'decoder': 18978416, 'total': 37989616}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/train_tokenizer.py"", line 213, in \n for x in os.listdir(args.data_dir)\nFileNotFoundError: [Errno 2] No such file or directory: 'hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord'\nRunning on 4 devices.\nCounting all components: ['encoder', 'vq', 'decoder']\nParameter counts:\n{'encoder': 18978432, 'vq': 32768, 'decoder': 18978416, 'total': 37989616}\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/train_tokenizer.py"", line 213, in \n for x in os.listdir(args.data_dir)\nFileNotFoundError: [Errno 2] No such file or directory: 'hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord'\nwandb: \nwandb: 🚀 View run tokenizer-batch-size-scaling-1-node-3313570 at: https://wandb.ai/instant-uv/jafar/runs/9ohe3ypl\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/wandb/run-20250703_050542-9ohe3ypl/logs\nsrun: error: hkn0512: tasks 0,2-3: Exited with exit code 1\nsrun: error: hkn0512: task 1: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3313570\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 1\nCores per node: 24\nNodelist: hkn0512\nCPU Utilized: 00:05:01\nCPU Efficiency: 10.11% of 00:49:36 core-walltime\nJob Wall-clock time: 00:02:04\nStarttime: Thu Jul 3 05:03:48 2025\nEndtime: Thu Jul 3 05:05:52 2025\nMemory Utilized: 6.66 GB (estimated maximum)\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\nEnergy Consumed: 128622 Joule / 35.7283333333333 Watthours\nAverage node power draw: 1037.27419354839 Watt\n",log,tab +13077,9856520,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10716,0,"",log,selection_mouse +13078,9856681,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,4,"work",log,selection_mouse +13079,9856923,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,14,"work/workspace",log,selection_mouse +13080,9856924,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,22,"work/workspace/scratch",log,selection_mouse +13081,9856924,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,34,"work/workspace/scratch/tum_ind3695",log,selection_mouse +13082,9856960,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,49,"work/workspace/scratch/tum_ind3695-jafa_ws_shared",log,selection_mouse +13083,9857045,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,50,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/",log,selection_mouse +13084,9857045,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,54,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data",log,selection_mouse +13085,9857101,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,55,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/",log,selection_mouse +13086,9857116,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,81,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13087,9857903,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10789,0,"",log,selection_mouse +13088,9858050,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10770,26,"open_ai_minecraft_tfrecord",log,selection_mouse +13089,9858263,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10770,49,"open_ai_minecraft_tfrecord'\nRunning on 4 devices.",log,selection_mouse +13090,9858699,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10720,76,"workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13091,9858699,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10719,77,"/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13092,9858700,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10715,81,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13093,9858830,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10714,82,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13094,9858875,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log",10710,86,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",log,selection_mouse +13095,9861271,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",0,0,"",shellscript,tab +13096,9861272,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",476,0,"",shellscript,selection_mouse +13097,9862622,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",476,0,"/",shellscript,content +13098,9862623,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",477,0,"",shellscript,selection_keyboard +13099,9864696,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",0,0,"",shellscript,tab +13100,9866493,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",476,0,"",shellscript,selection_mouse +13101,9867852,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",476,0,"/",shellscript,content +13102,9867853,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",477,0,"",shellscript,selection_keyboard +13103,9869926,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"",shellscript,tab +13104,9871339,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",457,0,"",shellscript,selection_mouse +13105,9872746,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",457,0,"/",shellscript,content +13106,9872748,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",458,0,"",shellscript,selection_keyboard +13107,9873676,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",457,0,"",shellscript,selection_command +13108,9874904,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",698,0,"",shellscript,selection_mouse +13109,9875514,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",689,0,"",shellscript,selection_mouse +13110,9876895,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",610,0,"",shellscript,selection_mouse +13111,9876923,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",609,0,"",shellscript,selection_command +13112,9877227,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",606,3,"ed/",shellscript,selection_mouse +13113,9877228,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",602,7,"shared/",shellscript,selection_mouse +13114,9877228,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",598,11,"_ws_shared/",shellscript,selection_mouse +13115,9877228,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",593,16,"-jafa_ws_shared/",shellscript,selection_mouse +13116,9877228,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",589,20,"3695-jafa_ws_shared/",shellscript,selection_mouse +13117,9877228,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",584,25,"m_ind3695-jafa_ws_shared/",shellscript,selection_mouse +13118,9877229,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",581,28,"/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +13119,9877229,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",577,32,"atch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +13120,9877254,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",606,4,"ed/'",shellscript,selection_command +13121,9877255,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",571,39,"ce/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13122,9877305,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",566,44,"rkspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13123,9877337,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",563,47,"/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13124,9877368,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",562,48,"k/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13125,9877406,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",561,49,"rk/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13126,9877430,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",560,50,"ork/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13127,9877464,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",559,51,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13128,9877496,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",558,52,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13129,9877527,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",557,53,"s/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13130,9877563,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",555,55,"kfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13131,9877615,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",554,56,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +13132,9878004,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",554,0,"",shellscript,selection_mouse +13133,9878873,"TERMINAL",0,0,"bash",,terminal_focus +13134,9882913,"TERMINAL",0,0,"srun",,terminal_focus +13135,9884384,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13136,9884445,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13137,9884699,"TERMINAL",0,0,"[?25l [?25h[?25l.[?25h",,terminal_output +13138,9884852,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13139,9885036,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects[?2004h(jafar) [tum_cte0515@hkn0901 Projects]$ ",,terminal_output +13140,9885377,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13141,9885443,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13142,9885599,"TERMINAL",0,0,"\r\n[?2004l\rjafar jafar_jobs\r\n]0;tum_cte0515@hkn0901:~/Projects[?2004h(jafar) [tum_cte0515@hkn0901 Projects]$ ",,terminal_output +13143,9886354,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +13144,9886491,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +13145,9886556,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13146,9886687,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +13147,9886750,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +13148,9886813,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13149,9887594,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13150,9887656,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +13151,9888611,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13152,9888806,"TERMINAL",0,0,"[?25lc[?25h[?25lk[?25h",,terminal_output +13153,9889062,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +13154,9889201,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +13155,9889513,"TERMINAL",0,0,"[?25lin[?25h[?25lt[?25h",,terminal_output +13156,9889741,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13157,9892185,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects[?2004h(jafar) [tum_cte0515@hkn0901 Projects]$ ",,terminal_output +13158,9892462,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13159,9892619,"TERMINAL",0,0,"[?25ls[?25h\r\n[?2004l\rcheckpoints jafar jafar_jobs\r\n]0;tum_cte0515@hkn0901:~/Projects[?2004h(jafar) [tum_cte0515@hkn0901 Projects]$ ",,terminal_output +13160,9894473,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13161,9894535,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13162,9894717,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13163,9895225,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +13164,9895648,"TERMINAL",0,0,"eckpoints/",,terminal_output +13165,9896038,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/checkpoints[?2004h(jafar) [tum_cte0515@hkn0901 checkpoints]$ ",,terminal_output +13166,9896562,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +13167,9896629,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +13168,9896732,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13169,9896859,"TERMINAL",0,0,"\r\n[?2004l\r/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints\r\n]0;tum_cte0515@hkn0901:~/Projects/checkpoints[?2004h(jafar) [tum_cte0515@hkn0901 checkpoints]$ ",,terminal_output +13170,9900751,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"",shellscript,tab +13171,9900752,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",664,0,"",shellscript,selection_mouse +13172,9901860,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,0,"",shellscript,selection_mouse +13173,9902084,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,1,"$",shellscript,selection_mouse +13174,9902084,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,4,"$ws_",shellscript,selection_mouse +13175,9902084,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,6,"$ws_di",shellscript,selection_mouse +13176,9902168,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,8,"$ws_dir/",shellscript,selection_mouse +13177,9902168,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,9,"$ws_dir/c",shellscript,selection_mouse +13178,9902168,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,11,"$ws_dir/che",shellscript,selection_mouse +13179,9902203,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,12,"$ws_dir/chec",shellscript,selection_mouse +13180,9902229,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,13,"$ws_dir/check",shellscript,selection_mouse +13181,9902252,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,14,"$ws_dir/checkp",shellscript,selection_mouse +13182,9902283,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,15,"$ws_dir/checkpo",shellscript,selection_mouse +13183,9902315,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,16,"$ws_dir/checkpoi",shellscript,selection_mouse +13184,9902344,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,17,"$ws_dir/checkpoin",shellscript,selection_mouse +13185,9902375,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,18,"$ws_dir/checkpoint",shellscript,selection_mouse +13186,9902524,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,19,"$ws_dir/checkpoints",shellscript,selection_mouse +13187,9903618,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,19,"",shellscript,content +13188,9904312,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",680,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13189,9908070,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",749,0,"",shellscript,selection_mouse +13190,9908653,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",748,1,"",shellscript,content +13191,9909186,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",748,0,"/",shellscript,content +13192,9909187,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",749,0,"",shellscript,selection_keyboard +13193,9910634,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",745,0,"",shellscript,selection_mouse +13194,9910803,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",740,8,"job_name",shellscript,selection_mouse +13195,9911659,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",725,0,"",shellscript,selection_mouse +13196,9914045,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",0,0,"",shellscript,tab +13197,9915618,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,0,"",shellscript,selection_mouse +13198,9915814,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,2,"$w",shellscript,selection_mouse +13199,9915815,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,5,"$ws_d",shellscript,selection_mouse +13200,9915815,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,7,"$ws_dir",shellscript,selection_mouse +13201,9915815,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,9,"$ws_dir/c",shellscript,selection_mouse +13202,9915815,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,11,"$ws_dir/che",shellscript,selection_mouse +13203,9915815,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,13,"$ws_dir/check",shellscript,selection_mouse +13204,9915904,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,14,"$ws_dir/checkp",shellscript,selection_mouse +13205,9915953,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,15,"$ws_dir/checkpo",shellscript,selection_mouse +13206,9916008,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,16,"$ws_dir/checkpoi",shellscript,selection_mouse +13207,9916061,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,17,"$ws_dir/checkpoin",shellscript,selection_mouse +13208,9916124,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,18,"$ws_dir/checkpoint",shellscript,selection_mouse +13209,9916279,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,19,"$ws_dir/checkpoints",shellscript,selection_mouse +13210,9916860,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,19,"",shellscript,content +13211,9917472,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",699,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13212,9918964,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",768,0,"",shellscript,selection_mouse +13213,9919273,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",767,1,"",shellscript,content +13214,9919929,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",767,0,"/",shellscript,content +13215,9919929,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",768,0,"",shellscript,selection_keyboard +13216,9921727,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",0,0,"",shellscript,tab +13217,9922826,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,0,"",shellscript,selection_mouse +13218,9922946,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,1,"$",shellscript,selection_mouse +13219,9922947,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,2,"$w",shellscript,selection_mouse +13220,9922947,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,3,"$ws",shellscript,selection_mouse +13221,9922947,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,4,"$ws_",shellscript,selection_mouse +13222,9922984,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,5,"$ws_d",shellscript,selection_mouse +13223,9923042,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,6,"$ws_di",shellscript,selection_mouse +13224,9923073,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,7,"$ws_dir",shellscript,selection_mouse +13225,9923647,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,7,"",shellscript,content +13226,9924076,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",699,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13227,9925581,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13228,9925761,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13229,9925892,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13230,9926004,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13231,9926131,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13232,9926250,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13233,9926378,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13234,9926505,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13235,9926636,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13236,9926764,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13237,9926962,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13238,9927220,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",757,1,"",shellscript,content +13239,9929857,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",768,0,"",shellscript,selection_mouse +13240,9930221,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",767,1,"",shellscript,content +13241,9930817,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",767,0,"/",shellscript,content +13242,9930818,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",768,0,"",shellscript,selection_keyboard +13243,9955802,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_tokenizer_model_size_scaling_37M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log_checkpoint_interval=500 \\n --log \\n --name=tokenizer-model-size-scaling-38M-$slurm_job_id \\n --tags tokenizer model-size-scaling 38M \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +13244,9958384,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,0,"",shellscript,selection_mouse +13245,9958659,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,1,"$",shellscript,selection_mouse +13246,9958660,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,2,"$w",shellscript,selection_mouse +13247,9958727,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,3,"$ws",shellscript,selection_mouse +13248,9958727,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,4,"$ws_",shellscript,selection_mouse +13249,9958785,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,5,"$ws_d",shellscript,selection_mouse +13250,9958786,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,6,"$ws_di",shellscript,selection_mouse +13251,9958806,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,7,"$ws_dir",shellscript,selection_mouse +13252,9958838,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,8,"$ws_dir/",shellscript,selection_mouse +13253,9958869,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,10,"$ws_dir/ch",shellscript,selection_mouse +13254,9958903,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,11,"$ws_dir/che",shellscript,selection_mouse +13255,9958935,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,13,"$ws_dir/check",shellscript,selection_mouse +13256,9958966,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",767,16,"\nCHECKPOINT_DIR=",shellscript,selection_mouse +13257,9959412,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,22,"$ws_dir/checkpoints/$j",shellscript,selection_mouse +13258,9959563,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,21,"$ws_dir/checkpoints/$",shellscript,selection_mouse +13259,9959833,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,20,"$ws_dir/checkpoints/",shellscript,selection_mouse +13260,9962221,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,20,"",shellscript,content +13261,9963245,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",783,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13262,9964569,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",841,0,"/",shellscript,content +13263,9964570,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",842,0,"",shellscript,selection_keyboard +13264,9966600,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=4\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_tokenizer_model_size_scaling_80M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log_checkpoint_interval=500 \\n --log \\n --name=tokenizer-modelsize-L1-$slurm_job_id \\n --tags tokenizer model-size-scaling L1 80M \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=768 \\n --num_blocks=12 \\n --num_heads=12 \\n --latent_dim=64 \\n --num_latents=2048\n",shellscript,tab +13265,9976380,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,0,"",shellscript,selection_mouse +13266,9976557,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,1,"$",shellscript,selection_mouse +13267,9976558,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,4,"$ws_",shellscript,selection_mouse +13268,9976558,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,6,"$ws_di",shellscript,selection_mouse +13269,9976558,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,8,"$ws_dir/",shellscript,selection_mouse +13270,9976650,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,9,"$ws_dir/c",shellscript,selection_mouse +13271,9976703,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,10,"$ws_dir/ch",shellscript,selection_mouse +13272,9976740,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,11,"$ws_dir/che",shellscript,selection_mouse +13273,9976769,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,12,"$ws_dir/chec",shellscript,selection_mouse +13274,9976808,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,13,"$ws_dir/check",shellscript,selection_mouse +13275,9976846,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,15,"$ws_dir/checkpo",shellscript,selection_mouse +13276,9976884,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,16,"$ws_dir/checkpoi",shellscript,selection_mouse +13277,9976922,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,17,"$ws_dir/checkpoin",shellscript,selection_mouse +13278,9976959,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,18,"$ws_dir/checkpoint",shellscript,selection_mouse +13279,9976997,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,19,"$ws_dir/checkpoints",shellscript,selection_mouse +13280,9977123,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,20,"$ws_dir/checkpoints/",shellscript,selection_mouse +13281,9977887,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,20,"",shellscript,content +13282,9978138,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,0,"i",shellscript,content +13283,9978139,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",784,0,"",shellscript,selection_keyboard +13284,9979838,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,1,"",shellscript,content +13285,9983582,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",783,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13286,9985339,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",841,0,"/",shellscript,content +13287,9985340,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",842,0,"",shellscript,selection_keyboard +13288,9987904,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_tokenizer_model_size_scaling_140M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log_checkpoint_interval=500 \\n --log \\n --name=tokenizer-modelsize-L2-$slurm_job_id \\n --tags tokenizer model-size-scaling L1 80M \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=768 \\n --num_blocks=12 \\n --num_heads=12 \\n --latent_dim=64 \\n --num_latents=2048\n",shellscript,tab +13289,9989282,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,0,"",shellscript,selection_mouse +13290,9989444,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,4,"$ws_",shellscript,selection_mouse +13291,9989445,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,6,"$ws_di",shellscript,selection_mouse +13292,9989445,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,8,"$ws_dir/",shellscript,selection_mouse +13293,9989445,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,10,"$ws_dir/ch",shellscript,selection_mouse +13294,9989486,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,11,"$ws_dir/che",shellscript,selection_mouse +13295,9989517,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,12,"$ws_dir/chec",shellscript,selection_mouse +13296,9989664,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,13,"$ws_dir/check",shellscript,selection_mouse +13297,9989665,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,14,"$ws_dir/checkp",shellscript,selection_mouse +13298,9989665,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,15,"$ws_dir/checkpo",shellscript,selection_mouse +13299,9989755,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,16,"$ws_dir/checkpoi",shellscript,selection_mouse +13300,9989782,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,17,"$ws_dir/checkpoin",shellscript,selection_mouse +13301,9989827,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,18,"$ws_dir/checkpoint",shellscript,selection_mouse +13302,9989927,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,19,"$ws_dir/checkpoints",shellscript,selection_mouse +13303,9990951,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,19,"",shellscript,content +13304,9991918,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,1,"",shellscript,content +13305,9992227,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",784,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13306,9993954,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",842,0,"/",shellscript,content +13307,9993955,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",843,0,"",shellscript,selection_keyboard +13308,9995662,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=12\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_tokenizer_model_size_scaling_200M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log_checkpoint_interval=500 \\n --log \\n --name=tokenizer-modelsize-L3-$slurm_job_id \\n --tags tokenizer model-size-scaling L3 200M \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=1152 \\n --num_blocks=16 \\n --num_heads=16 \\n --latent_dim=128 \\n --num_latents=4096\n",shellscript,tab +13309,9999459,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,0,"",shellscript,selection_mouse +13310,9999643,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,2,"$w",shellscript,selection_mouse +13311,9999643,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,5,"$ws_d",shellscript,selection_mouse +13312,9999644,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,7,"$ws_dir",shellscript,selection_mouse +13313,9999644,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,9,"$ws_dir/c",shellscript,selection_mouse +13314,9999644,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,11,"$ws_dir/che",shellscript,selection_mouse +13315,9999644,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,12,"$ws_dir/chec",shellscript,selection_mouse +13316,9999679,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,13,"$ws_dir/check",shellscript,selection_mouse +13317,9999715,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,14,"$ws_dir/checkp",shellscript,selection_mouse +13318,9999744,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,15,"$ws_dir/checkpo",shellscript,selection_mouse +13319,9999777,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,17,"$ws_dir/checkpoin",shellscript,selection_mouse +13320,9999810,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,18,"$ws_dir/checkpoint",shellscript,selection_mouse +13321,9999841,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,19,"$ws_dir/checkpoints",shellscript,selection_mouse +13322,9999873,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,21,"$ws_dir/checkpoints/$",shellscript,selection_mouse +13323,9999906,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,22,"$ws_dir/checkpoints/$j",shellscript,selection_mouse +13324,9999939,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,23,"$ws_dir/checkpoints/$jo",shellscript,selection_mouse +13325,9999995,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,24,"$ws_dir/checkpoints/$job",shellscript,selection_mouse +13326,10000006,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,26,"$ws_dir/checkpoints/$job_n",shellscript,selection_mouse +13327,10000058,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,27,"$ws_dir/checkpoints/$job_na",shellscript,selection_mouse +13328,10000089,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,28,"$ws_dir/checkpoints/$job_nam",shellscript,selection_mouse +13329,10000115,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,29,"$ws_dir/checkpoints/$job_name",shellscript,selection_mouse +13330,10000149,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,30,"$ws_dir/checkpoints/$job_name_",shellscript,selection_mouse +13331,10000698,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,27,"$ws_dir/checkpoints/$job_na",shellscript,selection_mouse +13332,10000727,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,25,"$ws_dir/checkpoints/$job_",shellscript,selection_mouse +13333,10000752,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,24,"$ws_dir/checkpoints/$job",shellscript,selection_mouse +13334,10000784,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,23,"$ws_dir/checkpoints/$jo",shellscript,selection_mouse +13335,10000979,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,22,"$ws_dir/checkpoints/$j",shellscript,selection_mouse +13336,10001129,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,21,"$ws_dir/checkpoints/$",shellscript,selection_mouse +13337,10001171,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,20,"$ws_dir/checkpoints/",shellscript,selection_mouse +13338,10002059,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,20,"",shellscript,content +13339,10003033,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",785,0,"/home/hk-project-p0023960/tum_cte0515/Projects/checkpoints",shellscript,content +13340,10004232,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",843,0,"/",shellscript,content +13341,10004233,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",844,0,"",shellscript,selection_keyboard +13342,10005939,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",853,1,"",shellscript,content +13343,10006703,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",853,0,"/",shellscript,content +13344,10006703,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",854,0,"",shellscript,selection_keyboard +13345,10007892,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",849,0,"",shellscript,selection_mouse +13346,10008050,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",845,8,"job_name",shellscript,selection_mouse +13347,10008856,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",835,0,"",shellscript,selection_mouse +13348,10011392,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",0,0,"",shellscript,tab +13349,10012612,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",0,0,"",shellscript,tab +13350,10013841,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",0,0,"",shellscript,tab +13351,10015039,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",852,0,"",shellscript,selection_mouse +13352,10015967,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",852,1,"",shellscript,content +13353,10017823,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",852,0,"/",shellscript,content +13354,10017825,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",853,0,"",shellscript,selection_keyboard +13355,10019396,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",0,0,"",shellscript,tab +13356,10020868,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",852,0,"",shellscript,selection_mouse +13357,10021732,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",851,1,"",shellscript,content +13358,10022368,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",851,0,"/",shellscript,content +13359,10022369,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",852,0,"",shellscript,selection_keyboard +13360,10023981,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",0,0,"",shellscript,tab +13361,10025566,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",851,0,"",shellscript,selection_mouse +13362,10026416,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",851,1,"",shellscript,content +13363,10027072,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",851,0,"/",shellscript,content +13364,10027072,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",852,0,"",shellscript,selection_keyboard +13365,10030457,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",0,0,"",shellscript,tab +13366,10032628,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",0,0,"",shellscript,tab +13367,10034050,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",0,0,"",shellscript,tab +13368,10035218,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1149,0,"",shellscript,selection_mouse +13369,10036098,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1150,0,"",shellscript,selection_command +13370,10036291,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1149,1,"",shellscript,content +13371,10036393,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1148,1,"",shellscript,content +13372,10037184,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1148,0,"8",shellscript,content +13373,10037185,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1149,0,"",shellscript,selection_keyboard +13374,10037268,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1149,0,"0",shellscript,content +13375,10037269,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1150,0,"",shellscript,selection_keyboard +13376,10038757,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1150,0,"M",shellscript,content +13377,10038757,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",1151,0,"",shellscript,selection_keyboard +13378,10040297,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",0,0,"",shellscript,tab +13379,10041616,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1151,0,"",shellscript,selection_mouse +13380,10042148,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1150,1,"",shellscript,content +13381,10042267,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1149,1,"",shellscript,content +13382,10042816,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1149,0,"1",shellscript,content +13383,10042817,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1150,0,"",shellscript,selection_keyboard +13384,10043064,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1150,0,"4",shellscript,content +13385,10043065,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1151,0,"",shellscript,selection_keyboard +13386,10043176,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1151,0,"0",shellscript,content +13387,10043177,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1152,0,"",shellscript,selection_keyboard +13388,10043839,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1152,0,"M",shellscript,content +13389,10043840,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",1153,0,"",shellscript,selection_keyboard +13390,10045078,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",0,0,"",shellscript,tab +13391,10045877,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1152,0,"",shellscript,selection_mouse +13392,10046485,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1151,1,"",shellscript,content +13393,10046601,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1150,1,"",shellscript,content +13394,10046857,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1150,0,"2",shellscript,content +13395,10046857,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1151,0,"",shellscript,selection_keyboard +13396,10046970,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1151,0,"0",shellscript,content +13397,10046970,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1152,0,"",shellscript,selection_keyboard +13398,10047408,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1152,0,"0",shellscript,content +13399,10047409,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1153,0,"",shellscript,selection_keyboard +13400,10048339,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1153,0,"M",shellscript,content +13401,10048340,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1154,0,"",shellscript,selection_keyboard +13402,10049092,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1324,0,"",shellscript,selection_mouse +13403,10049606,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1414,0,"",shellscript,selection_mouse +13404,10050172,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1324,0,"",shellscript,selection_mouse +13405,10050912,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",1413,0,"",shellscript,selection_mouse +13406,10082735,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13407,10084232,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +13408,10084362,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13409,10084468,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13410,10084804,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +13411,10084987,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +13412,10085094,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13413,10085158,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13414,10085334,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13415,10085397,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13416,10085459,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +13417,10085697,"TERMINAL",0,0,"\r\n[?2004l\rsending incremental file list\r\n",,terminal_output +13418,10086067,"TERMINAL",0,0,"./\r\n.gitignore\r\ngeneration_1751556914.7367506.gif\r\ngeneration_1751557545.2100096.gif\r\ngeneration_1751558593.4171152.gif\r\ngeneration_1751560755.4123495.gif\r\ngeneration_1751561250.3749754.gif\r\ngeneration_1751561495.0908976.gif\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\n",,terminal_output +13419,10086184,"TERMINAL",0,0,"frames/frame_000.png\r\nframes/frame_001.png\r\nframes/frame_002.png\r\ngifs/\r\ngifs/generation_1751544626.3417823.gif\r\ngifs/generation_1751544917.2474952.gif\r\ngifs/generation_1751545102.3212328.gif\r\ngifs/generation_1751545233.5287852.gif\r\ngifs/generation_1751546045.9054332.gif\r\ngifs/generation_1751556520.405701.gif\r\nmodels/\r\nmodels/dynamics.py\r\nscripts_horeka/batchsize_scaling/adjusted_lr/\r\nscripts_horeka/batchsize_scaling/adjusted_lr/run_sbatch.sh\r\nscripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\r\nscripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch\r\nscripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch\r\nscripts_horeka/modelsize_scaling/tokenizer/\r\nscripts_horeka/modelsize_scaling/tokenizer/tester.sh\r\nscripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch\r\nscripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch\r\nscripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch\r\nscripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch\r\nscripts_horeka/overfit_sample_tiny/sample.sh\r\nutils/\r\nutils/dataloader.py\r\n",,terminal_output +13420,10086320,"TERMINAL",0,0,"\r\nsent 2,613,380 bytes received 739 bytes 1,742,746.00 bytes/sec\r\ntotal size is 83,803,210 speedup is 32.06\r\n]0;tum_cte0515@hkn0901:~/Projects/checkpoints[?2004h(jafar) [tum_cte0515@hkn0901 checkpoints]$ ",,terminal_output +13421,10086797,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +13422,10086917,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13423,10086970,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13424,10087159,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13425,10087252,"TERMINAL",0,0,"[?25le[?25h[?25lr[?25h",,terminal_output +13426,10087461,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13427,10092279,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13428,10092425,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +13429,10092535,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13430,10092660,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13431,10092741,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13432,10092856,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +13433,10092977,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13434,10093261,"TERMINAL",0,0,"scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",,terminal_output +13435,10100542,"TERMINAL",0,0,"\rscripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch\r\n[?2004l\rSubmitted batch job 3316016\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13436,10100926,"TERMINAL",0,0,"sbatch scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",,terminal_output +13437,10102591,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +13438,10103254,"TERMINAL",0,0,"_nodes.sbatch",,terminal_output +13439,10103647,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +13440,10103934,"TERMINAL",0,0,"^[[A",,terminal_output +13441,10104055,"TERMINAL",0,0,"Submitted batch job 3316017\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ sbatch scripts_horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",,terminal_output +13442,10106631,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +13443,10106841,"TERMINAL",0,0,"_nodes.sbatch",,terminal_output +13444,10107391,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +13445,10107458,"TERMINAL",0,0,"Submitted batch job 3316018\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13446,10110997,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13447,10111136,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +13448,10111189,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13449,10111356,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13450,10111421,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13451,10111534,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +13452,10111671,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13453,10112021,"TERMINAL",0,0,"scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",,terminal_output +13454,10113161,"TERMINAL",0,0,"\rscripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch\r\n[?2004l\rSubmitted batch job 3316019\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13455,10113650,"TERMINAL",0,0,"sbatch scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_140M.sbatch",,terminal_output +13456,10115851,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +13457,10116047,"TERMINAL",0,0,"00M.sbatch",,terminal_output +13458,10119497,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3316020\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13459,10119949,"TERMINAL",0,0,"sbatch scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_200M.sbatch",,terminal_output +13460,10121664,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +13461,10121874,"TERMINAL",0,0,"7M.sbatch",,terminal_output +13462,10122742,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3316022\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13463,10123076,"TERMINAL",0,0,"sbatch scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch",,terminal_output +13464,10125540,"TERMINAL",0,0,"[?25l8[?25h",,terminal_output +13465,10125646,"TERMINAL",0,0,"0M.sbatch",,terminal_output +13466,10126195,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3316026\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13467,10127092,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +13468,10127198,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13469,10127259,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13470,10127326,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13471,10127436,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13472,10127583,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Thu Jul 3 19:04:24 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3316026 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316022 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316020 accelerat train_to tum_cte0 PD\t0:00 12 (Priority)3316019 accelerat train_to tum_cte0 PD\t0:00\t 8 (Priority)3316018 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3315981 dev_accel interact tum_cte0 R47:42\t 1 hkn0901",,terminal_output +13473,10128565,"TERMINAL",0,0,"53",,terminal_output +13474,10129589,"TERMINAL",0,0,"64",,terminal_output +13475,10130610,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs]$ ",,terminal_output +13476,10151818,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n# from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n start_time = time.time()\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n elapsed = time.time() - start_time\n print(f""Frame {frame_idx} sampling took {elapsed:.3f} seconds"")\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample_mihir(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = genie.apply(\n params,\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample_mihir,\n )\n return generated_vid\n\n\n\n# --- Get video + latent actions ---\n# tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n# ]\n# dataloader = get_dataloader(\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # args.image_height,\n # args.image_width,\n # args.image_channels,\n # seed=args.seed,\n# )\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1,:args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample_mihir(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +13477,10152501,"sample.py",3727,0,"",python,selection_mouse +13478,10152532,"sample.py",3726,0,"",python,selection_command +13479,10153020,"sample.py",3759,0,"",python,selection_mouse +13480,10153527,"sample.py",3760,0,"",python,selection_mouse +13481,10154292,"sample.py",3759,0,"",python,selection_command +13482,10154424,"sample.py",3734,0,"",python,selection_command +13483,10154509,"sample.py",3759,0,"",python,selection_command +13484,10154644,"sample.py",3734,0,"",python,selection_command +13485,10154666,"sample.py",3759,0,"",python,selection_command +13486,10154870,"sample.py",3760,0,"",python,selection_command +13487,10155057,"sample.py",3761,0,"",python,selection_command +13488,10155180,"sample.py",3760,0,"",python,selection_command +13489,10155498,"sample.py",3759,0,"",python,selection_command +13490,10155762,"sample.py",3760,0,"",python,selection_command +13491,10155969,"sample.py",3761,0,"",python,selection_command +13492,10156139,"sample.py",3760,0,"",python,selection_command +13493,10156393,"sample.py",3759,0,"",python,selection_command +13494,10156495,"sample.py",3760,0,"",python,selection_command +13495,10156669,"sample.py",3759,0,"",python,selection_command +13496,10156675,"sample.py",3760,0,"",python,selection_command +13497,10156800,"sample.py",3759,0,"",python,selection_command +13498,10156894,"sample.py",3760,0,"",python,selection_command +13499,10157496,"sample.py",3761,0,"",python,selection_command +13500,10157622,"sample.py",3760,0,"",python,selection_command +13501,10157697,"sample.py",3761,0,"",python,selection_command +13502,10157830,"sample.py",3760,0,"",python,selection_command +13503,10157944,"sample.py",3761,0,"",python,selection_command +13504,10158049,"sample.py",3760,0,"",python,selection_command +13505,10158235,"sample.py",3759,0,"",python,selection_command +13506,10158458,"sample.py",3760,0,"",python,selection_command +13507,10158688,"sample.py",3761,0,"",python,selection_command +13508,10158770,"sample.py",3760,0,"",python,selection_command +13509,10158827,"sample.py",3761,0,"",python,selection_command +13510,10158918,"sample.py",3760,0,"",python,selection_command +13511,10159042,"sample.py",3761,0,"",python,selection_command +13512,10159152,"sample.py",3760,0,"",python,selection_command +13513,10159345,"sample.py",3759,0,"",python,selection_command +13514,10159427,"sample.py",3760,0,"",python,selection_command +13515,10159555,"sample.py",3759,0,"",python,selection_command +13516,10159640,"sample.py",3760,0,"",python,selection_command +13517,10159772,"sample.py",3759,0,"",python,selection_command +13518,10159829,"sample.py",3760,0,"",python,selection_command +13519,10159933,"sample.py",3759,0,"",python,selection_command +13520,10160042,"sample.py",3760,0,"",python,selection_command +13521,10160143,"sample.py",3759,0,"",python,selection_command +13522,10160262,"sample.py",3760,0,"",python,selection_command +13523,10160350,"sample.py",3759,0,"",python,selection_command +13524,10160446,"sample.py",3760,0,"",python,selection_command +13525,10160554,"sample.py",3759,0,"",python,selection_command +13526,10160620,"sample.py",3760,0,"",python,selection_command +13527,10160785,"sample.py",3759,0,"",python,selection_command +13528,10160844,"sample.py",3760,0,"",python,selection_command +13529,10160957,"sample.py",3759,0,"",python,selection_command +13530,10161049,"sample.py",3760,0,"",python,selection_command +13531,10161159,"sample.py",3759,0,"",python,selection_command +13532,10161295,"sample.py",3760,0,"",python,selection_command +13533,10161378,"sample.py",3759,0,"",python,selection_command +13534,10161473,"sample.py",3760,0,"",python,selection_command +13535,10161567,"sample.py",3759,0,"",python,selection_command +13536,10161694,"sample.py",3760,0,"",python,selection_command +13537,10161756,"sample.py",3759,0,"",python,selection_command +13538,10161858,"sample.py",3760,0,"",python,selection_command +13539,10161989,"sample.py",3759,0,"",python,selection_command +13540,10162173,"sample.py",3760,0,"",python,selection_command +13541,10162180,"sample.py",3759,0,"",python,selection_command +13542,10162228,"sample.py",3760,0,"",python,selection_command +13543,10162418,"sample.py",3761,0,"",python,selection_command +13544,10162615,"sample.py",3762,0,"",python,selection_command +13545,10162770,"sample.py",3761,0,"",python,selection_command +13546,10162948,"sample.py",3760,0,"",python,selection_command +13547,10163127,"sample.py",3759,0,"",python,selection_command +13548,10165191,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample_mihir(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int,\n temperature: float,\n sample_argmax: bool,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # begin potential forloop (from T to S)\n initial_T = T\n for T in range(initial_T, S):\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n token_idxs *= ~init_mask\n #print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n #print(""init_mask[0,:,0]:"", init_mask[0,:,0])\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n token_idxs = final_carry[2]\n\n new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]# (B, T, N)\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0] # (B, N) \n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S, A, D)\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0] # (B, N)\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStepMihir(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, mask, token_idxs, action_tokens = carry\n step = x\n B, S, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs)\n # Mask vid_embed: set to mask_token where mask==1, else keep vid_embed\n # mask: (B, S, N), vid_embed: (B, S, N, D), mask_token: (D,)\n mask_token = self.dynamics.mask_token # (1,1, 1, D,)\n # Expand mask to (B, S, N, 1) for broadcasting\n mask_expanded = mask[..., None]\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n jax.debug.print(""maskgit-sampled-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n ) # (B, T+1, N)\n vid_embed = self.dynamics.patch_embed(vid_token_idxs) # (B, T+1, N, D)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +13549,10167269,"genie.py",8584,0,"",python,selection_mouse +13550,10167805,"genie.py",8566,0,"",python,selection_mouse +13551,10168869,"genie.py",8602,0,"",python,selection_mouse +13552,10169342,"genie.py",8496,0,"",python,selection_mouse +13553,10169926,"genie.py",8558,0,"",python,selection_mouse +13554,10173894,"genie.py",8601,0,"\n ",python,content +13555,10174383,"genie.py",8610,0,"j",python,content +13556,10174384,"genie.py",8611,0,"",python,selection_keyboard +13557,10174479,"genie.py",8611,0,"a",python,content +13558,10174480,"genie.py",8612,0,"",python,selection_keyboard +13559,10174685,"genie.py",8612,0,"x",python,content +13560,10174686,"genie.py",8613,0,"",python,selection_keyboard +13561,10174799,"genie.py",8613,0,".",python,content +13562,10174800,"genie.py",8614,0,"",python,selection_keyboard +13563,10175134,"genie.py",8614,0,"d",python,content +13564,10175135,"genie.py",8615,0,"",python,selection_keyboard +13565,10175252,"genie.py",8615,0,"b",python,content +13566,10175253,"genie.py",8616,0,"",python,selection_keyboard +13567,10175332,"genie.py",8616,0,"e",python,content +13568,10175333,"genie.py",8617,0,"",python,selection_keyboard +13569,10175464,"genie.py",8617,0,"u",python,content +13570,10175465,"genie.py",8618,0,"",python,selection_keyboard +13571,10175899,"genie.py",8617,1,"",python,content +13572,10176001,"genie.py",8616,1,"",python,content +13573,10176132,"genie.py",8615,1,"",python,content +13574,10176326,"genie.py",8614,1,"",python,content +13575,10176860,"genie.py",8613,1,"",python,content +13576,10176952,"genie.py",8610,3,"",python,content +13577,10178334,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13578,10178704,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13579,10178880,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13580,10178941,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13581,10179104,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13582,10180077,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +13583,10181183,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +13584,10181364,"TERMINAL",0,0,"afar",,terminal_output +13585,10182124,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13586,10183861,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +13587,10183948,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13588,10184051,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13589,10184173,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +13590,10184236,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +13591,10184296,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13592,10184434,"TERMINAL",0,0,"[?25lc[?25h[?25lk[?25h",,terminal_output +13593,10184602,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +13594,10184722,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13595,10184794,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13596,10184853,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13597,10185520,"TERMINAL",0,0,"[?25lf[?25h[?25li[?25h",,terminal_output +13598,10186297,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +13599,10186472,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +13600,10186615,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13601,10186823,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13602,10187220,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +13603,10187412,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +13604,10187714,"TERMINAL",0,0,"[?25li[?25h[?25ln[?25h",,terminal_output +13605,10187842,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +13606,10188183,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +13607,10188318,"TERMINAL",0,0,"M\tgenie.py\r\nM\tsample.py\r\nSwitched to branch 'fix-sampling'\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13608,10192444,"",0,0,"Switched from branch 'runner' to 'fix-sampling'",,git_branch_checkout +13609,10198787,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +13610,10198877,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +13611,10199022,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +13612,10199147,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13613,10199258,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Thu Jul 3 19:05:36 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3316026 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316022 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316020 accelerat train_to tum_cte0 PD\t0:00 12 (Priority)3316019 accelerat train_to tum_cte0 PD\t0:00\t 8 (Priority)3316018 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3315981 dev_accel interact tum_cte0 R48:54\t 1 hkn0901",,terminal_output +13614,10200347,"TERMINAL",0,0,"75",,terminal_output +13615,10201372,"TERMINAL",0,0,"86",,terminal_output +13616,10202395,"TERMINAL",0,0,"97",,terminal_output +13617,10203353,"TERMINAL",0,0,"408",,terminal_output +13618,10204245,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13619,10204808,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +13620,10204871,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +13621,10204933,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13622,10205130,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13623,10205825,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +13624,10205930,"TERMINAL",0,0,"[?25li[?25h[?25lf[?25h",,terminal_output +13625,10206136,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +13626,10206199,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +13627,10206485,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\rdiff --git a/genie.py b/genie.py\r\nindex f06a64c..67b1db9 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -249,6 +249,7 @@ class MaskGITStepMihir(nn.Module):\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n step_temp = self.temperature * (1.0 - unmasked_ratio)\r\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\r\n+ \r\n \r\n # --- Sample new tokens for final frame ---\r\n if self.sample_argmax:\r\n\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +13628,10208007,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +13629,10246290,"TERMINAL",0,0,"git diff ",,terminal_output +13630,10246431,"TERMINAL",0,0,"queue",,terminal_output +13631,10246644,"TERMINAL",0,0,"git checkout fix-sampling",,terminal_output +13632,10246800,"TERMINAL",0,0,"cd ../jafar",,terminal_output +13633,10246958,"TERMINAL",0,0,"queue",,terminal_output +13634,10247090,"TERMINAL",0,0,"sbatch scripts_horeka/modelsize_scaling/tokenizer/train_tokenizer_80M.sbatch",,terminal_output +13635,10247230,"TERMINAL",0,0,"37M.sbatch",,terminal_output +13636,10247383,"TERMINAL",0,0,"200M.sbatch",,terminal_output +13637,10248064,"TERMINAL",0,0,"140M.sbatch",,terminal_output +13638,10248261,"TERMINAL",0,0,"batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",,terminal_output +13639,10248420,"TERMINAL",0,0,"2_nodes.sbatch",,terminal_output +13640,10248551,"TERMINAL",0,0,"1_nodes.sbatch",,terminal_output +13641,10248700,"TERMINAL",0,0,"\rrunner",,terminal_output +13642,10248854,"TERMINAL",0,0,"sync-runner",,terminal_output +13643,10249264,"TERMINAL",0,0,"pwd",,terminal_output +13644,10249633,"TERMINAL",0,0,"cd checkpoints/",,terminal_output +13645,10250027,"TERMINAL",0,0,"ls",,terminal_output +13646,10250297,"TERMINAL",0,0,"mkdir checkpoints",,terminal_output +13647,10250484,"TERMINAL",0,0,"ls",,terminal_output +13648,10250881,"TERMINAL",0,0,"cd ..",,terminal_output +13649,10251255,"TERMINAL",0,0,"ls ../jafar_jobs/logs/logs_training/train_tokenizer_batch_size_scaling_1_node_3313570.log ",,terminal_output +13650,10251539,"TERMINAL",0,0,"\rsh scripts_horeka/overfit_sample_tiny/sample.sh",,terminal_output +13651,10261567,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample_mihir(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int,\n temperature: float,\n sample_argmax: bool,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n S = seq_len\n print(""token_idxs shape:"", token_idxs.shape)\n pad_shape = (B, S - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # shape (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # begin potential forloop (from T to S)\n initial_T = T\n for T in range(initial_T, S):\n # Create a mask that is 1 (True) where we just padded\n # token_idxs shape: (B, S, N), T = original length, S = seq_len\n # mask is True for padded positions (i.e., t >= T)\n mask = (jnp.arange(S)[None, :, None] >= T) # shape (1, S, 1)\n mask = jnp.broadcast_to(mask, (B, S, N)) # shape (B, S, N)\n init_mask = mask.astype(bool)\n token_idxs *= ~init_mask\n #print(""token_idxs[0,:,0]:"", token_idxs[0,:,0])\n #print(""init_mask[0,:,0]:"", init_mask[0,:,0])\n\n assert init_mask.shape == (B, S, N), ""Wrong mask shape""\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStepMihir,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n token_idxs = final_carry[2]\n\n new_frame_pixels = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # B == batch_size\n # T == num_frames (input)\n # N == num_patches\n # S == seq_len\n # A == action_space\n # D == latent_dim\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]# (B, T, N)\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0] # (B, N) \n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S, A, D)\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0] # (B, N)\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStepMihir(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, mask, token_idxs, action_tokens = carry\n step = x\n B, S, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs)\n # Mask vid_embed: set to mask_token where mask==1, else keep vid_embed\n # mask: (B, S, N), vid_embed: (B, S, N, D), mask_token: (D,)\n mask_token = self.dynamics.mask_token # (1,1, 1, D,)\n # Expand mask to (B, S, N, 1) for broadcasting\n mask_expanded = mask[..., None]\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n \n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n jax.debug.print(""maskgit-sampled-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n ) # (B, T+1, N)\n vid_embed = self.dynamics.patch_embed(vid_token_idxs) # (B, T+1, N, D)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1), # (B, N, 1)\n self.dynamics.mask_token[0], # (B, 1, D)\n vid_embed[:, -1], # (B, N, D)\n ) # (B, N, D)\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +13652,10261570,"genie.py",8151,0,"",python,selection_mouse +13653,10261989,"genie.py",8525,0,"",python,selection_mouse +13654,10262608,"genie.py",8310,0,"",python,selection_mouse +13655,10263299,"genie.py",8610,0,"",python,selection_mouse +13656,10264275,"genie.py",8610,0,"j",python,content +13657,10264276,"genie.py",8611,0,"",python,selection_keyboard +13658,10264322,"genie.py",8611,0,"a",python,content +13659,10264323,"genie.py",8612,0,"",python,selection_keyboard +13660,10264489,"genie.py",8612,0,"x",python,content +13661,10264490,"genie.py",8613,0,"",python,selection_keyboard +13662,10264605,"genie.py",8613,0,".",python,content +13663,10264606,"genie.py",8614,0,"",python,selection_keyboard +13664,10264842,"genie.py",8614,0,"d",python,content +13665,10264843,"genie.py",8615,0,"",python,selection_keyboard +13666,10264987,"genie.py",8615,0,"e",python,content +13667,10264988,"genie.py",8616,0,"",python,selection_keyboard +13668,10265041,"genie.py",8616,0,"b",python,content +13669,10265042,"genie.py",8617,0,"",python,selection_keyboard +13670,10265162,"genie.py",8617,0,"u",python,content +13671,10265163,"genie.py",8618,0,"",python,selection_keyboard +13672,10265271,"genie.py",8618,0,"g",python,content +13673,10265272,"genie.py",8619,0,"",python,selection_keyboard +13674,10265401,"genie.py",8619,0,".",python,content +13675,10265402,"genie.py",8620,0,"",python,selection_keyboard +13676,10265625,"genie.py",8620,0,"b",python,content +13677,10265627,"genie.py",8621,0,"",python,selection_keyboard +13678,10265687,"genie.py",8621,0,"r",python,content +13679,10265689,"genie.py",8622,0,"",python,selection_keyboard +13680,10265860,"genie.py",8622,0,"e",python,content +13681,10265861,"genie.py",8623,0,"",python,selection_keyboard +13682,10265996,"genie.py",8623,0,"a",python,content +13683,10265998,"genie.py",8624,0,"",python,selection_keyboard +13684,10266282,"genie.py",8620,4,"breakpoint",python,content +13685,10266918,"genie.py",8630,0,"()",python,content +13686,10266919,"genie.py",8631,0,"",python,selection_keyboard +13687,10267043,"genie.py",8631,1,")",python,content +13688,10267043,"genie.py",8632,0,"",python,selection_keyboard +13689,10268965,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +13690,10269108,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +13691,10272028,"TERMINAL",0,0,"2025-07-03 19:06:48.871601: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13692,10285144,"TERMINAL",0,0,"2025-07-03 19:07:01.995969: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13693,10295478,"TERMINAL",0,0,"2025-07-03 19:07:12.343518: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13694,10302913,"genie.py",0,0,"",python,tab +13695,10302914,"genie.py",8614,0,"",python,selection_mouse +13696,10303074,"genie.py",8614,5,"debug",python,selection_mouse +13697,10303699,"genie.py",8540,0,"",python,selection_mouse +13698,10303822,"genie.py",8534,12,"final_logits",python,selection_mouse +13699,10308314,"TERMINAL",0,0,"2025-07-03 19:07:25.215770: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13700,10310939,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +13701,10319349,"TERMINAL",0,0,"2025-07-03 19:07:36.193946: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13702,10322514,"TERMINAL",0,0,"2025-07-03 19:07:39.314268: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13703,10329065,"TERMINAL",0,0,"2025-07-03 19:07:45.880024: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13704,10331320,"TERMINAL",0,0,"2025-07-03 19:07:48.123638: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +13705,10332665,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +13706,10339112,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nEntering jdb:\r\n(jdb) ",,terminal_output +13707,10340034,"genie.py",9084,0,"",python,selection_mouse +13708,10340598,"genie.py",9141,0,"",python,selection_mouse +13709,10341831,"genie.py",9140,0,"",python,selection_command +13710,10341948,"genie.py",9198,0,"\n ",python,content +13711,10342459,"genie.py",9207,0,"j",python,content +13712,10342460,"genie.py",9208,0,"",python,selection_keyboard +13713,10342537,"genie.py",9208,0,"a",python,content +13714,10342538,"genie.py",9209,0,"",python,selection_keyboard +13715,10342699,"genie.py",9209,0,"x",python,content +13716,10342700,"genie.py",9210,0,"",python,selection_keyboard +13717,10342836,"genie.py",9210,0,".",python,content +13718,10342837,"genie.py",9211,0,"",python,selection_keyboard +13719,10343023,"genie.py",9211,0,"d",python,content +13720,10343023,"genie.py",9212,0,"",python,selection_keyboard +13721,10343229,"genie.py",9212,0,"e",python,content +13722,10343229,"genie.py",9213,0,"",python,selection_keyboard +13723,10343344,"genie.py",9213,0,"b",python,content +13724,10343345,"genie.py",9214,0,"",python,selection_keyboard +13725,10343481,"genie.py",9214,0,"u",python,content +13726,10343481,"genie.py",9215,0,"",python,selection_keyboard +13727,10343613,"genie.py",9215,0,"g",python,content +13728,10343615,"genie.py",9216,0,"",python,selection_keyboard +13729,10344352,"genie.py",9216,0,".",python,content +13730,10344352,"genie.py",9217,0,"",python,selection_keyboard +13731,10344899,"genie.py",9217,0,"b",python,content +13732,10344900,"genie.py",9218,0,"",python,selection_keyboard +13733,10344935,"genie.py",9218,0,"r",python,content +13734,10344936,"genie.py",9219,0,"",python,selection_keyboard +13735,10345118,"genie.py",9219,0,"e",python,content +13736,10345122,"genie.py",9220,0,"",python,selection_keyboard +13737,10345431,"genie.py",9217,3,"breakpoint",python,content +13738,10346290,"genie.py",9227,0,"()",python,content +13739,10346290,"genie.py",9228,0,"",python,selection_keyboard +13740,10346349,"genie.py",9228,1,")",python,content +13741,10346350,"genie.py",9229,0,"",python,selection_keyboard +13742,10346526,"genie.py",9228,0,"",python,selection_command +13743,10350533,"genie.py",8619,0,"",python,selection_mouse +13744,10351029,"genie.py",8547,0,"",python,selection_mouse +13745,10352834,"genie.py",8601,0,"\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp",python,content +13746,10352905,"genie.py",8610,0,"",python,selection_command +13747,10353640,"genie.py",8611,0,"",python,selection_command +13748,10354028,"genie.py",8612,0,"",python,selection_command +13749,10354079,"genie.py",8613,0,"",python,selection_command +13750,10354079,"genie.py",8614,0,"",python,selection_command +13751,10354141,"genie.py",8615,0,"",python,selection_command +13752,10354142,"genie.py",8616,0,"",python,selection_command +13753,10354173,"genie.py",8617,0,"",python,selection_command +13754,10354202,"genie.py",8618,0,"",python,selection_command +13755,10354259,"genie.py",8619,0,"",python,selection_command +13756,10354320,"genie.py",8620,0,"",python,selection_command +13757,10354321,"genie.py",8621,0,"",python,selection_command +13758,10354334,"genie.py",8622,0,"",python,selection_command +13759,10355173,"genie.py",8622,0,"_",python,content +13760,10355174,"genie.py",8623,0,"",python,selection_keyboard +13761,10355428,"genie.py",8623,0,"t",python,content +13762,10355429,"genie.py",8624,0,"",python,selection_keyboard +13763,10355508,"genie.py",8624,0,"m",python,content +13764,10355509,"genie.py",8625,0,"",python,selection_keyboard +13765,10355650,"genie.py",8625,0,"p",python,content +13766,10355651,"genie.py",8626,0,"",python,selection_keyboard +13767,10356161,"genie.py",8625,0,"",python,selection_command +13768,10356303,"genie.py",8626,0,"",python,selection_command +13769,10356796,"genie.py",8627,0,"",python,selection_command +13770,10356833,"genie.py",8628,0,"",python,selection_command +13771,10356897,"genie.py",8629,0,"",python,selection_command +13772,10356897,"genie.py",8630,0,"",python,selection_command +13773,10356992,"genie.py",8631,0,"",python,selection_command +13774,10357072,"genie.py",8632,0,"",python,selection_command +13775,10357073,"genie.py",8633,0,"",python,selection_command +13776,10357073,"genie.py",8634,0,"",python,selection_command +13777,10357113,"genie.py",8635,0,"",python,selection_command +13778,10357114,"genie.py",8636,0,"",python,selection_command +13779,10357149,"genie.py",8637,0,"",python,selection_command +13780,10357151,"genie.py",8638,0,"",python,selection_command +13781,10357205,"genie.py",8639,0,"",python,selection_command +13782,10357239,"genie.py",8640,0,"",python,selection_command +13783,10357240,"genie.py",8641,0,"",python,selection_command +13784,10357359,"genie.py",8642,0,"",python,selection_command +13785,10357360,"genie.py",8643,0,"",python,selection_command +13786,10357360,"genie.py",8644,0,"",python,selection_command +13787,10357397,"genie.py",8645,0,"",python,selection_command +13788,10357398,"genie.py",8646,0,"",python,selection_command +13789,10357472,"genie.py",8647,0,"",python,selection_command +13790,10357473,"genie.py",8648,0,"",python,selection_command +13791,10357510,"genie.py",8649,0,"",python,selection_command +13792,10357511,"genie.py",8650,0,"",python,selection_command +13793,10357538,"genie.py",8651,0,"",python,selection_command +13794,10357627,"genie.py",8652,0,"",python,selection_command +13795,10357627,"genie.py",8653,0,"",python,selection_command +13796,10357685,"genie.py",8654,0,"",python,selection_command +13797,10357686,"genie.py",8655,0,"",python,selection_command +13798,10357721,"genie.py",8656,0,"",python,selection_command +13799,10357722,"genie.py",8657,0,"",python,selection_command +13800,10357758,"genie.py",8658,0,"",python,selection_command +13801,10357759,"genie.py",8659,0,"",python,selection_command +13802,10357894,"genie.py",8660,0,"",python,selection_command +13803,10358044,"genie.py",8661,0,"",python,selection_command +13804,10358210,"genie.py",8662,0,"",python,selection_command +13805,10358673,"genie.py",8662,4,"",python,content +13806,10359102,"genie.py",8662,1,"",python,content +13807,10359254,"genie.py",8662,1,"",python,content +13808,10359420,"genie.py",8662,1,"",python,content +13809,10361487,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13810,10361668,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(252)\r\n # --- Predict transition ---\r\n act_embed = self.dynamics.action_up(action_tokens)\r\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\r\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\r\n step_temp = self.temperature * (1.0 - unmasked_ratio)\r\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\r\n-> jax.debug.breakpoint()\r\n \r\n # --- Sample new tokens for final frame ---\r\n if self.sample_argmax:\r\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\r\n else:\r\n(jdb) ",,terminal_output +13811,10365639,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +13812,10365639,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +13813,10365720,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +13814,10365772,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13815,10365916,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13816,10366290,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +13817,10366540,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +13818,10366740,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +13819,10366872,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +13820,10366932,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +13821,10367512,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +13822,10367725,"TERMINAL",0,0,"[?25ls[?25h[?25l.[?25h",,terminal_output +13823,10368199,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +13824,10368348,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13825,10368419,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +13826,10368527,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13827,10368614,"TERMINAL",0,0,"\r\n(1, 920, 1024)\r\n(jdb) ",,terminal_output +13828,10384808,"TERMINAL",0,0,"[?25lm[?25h[?25la[?25h",,terminal_output +13829,10384943,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13830,10385002,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +13831,10385437,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +13832,10385498,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +13833,10385716,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +13834,10385827,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +13835,10385895,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +13836,10386000,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +13837,10386073,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +13838,10398070,"genie.py",0,0,"",python,tab +13839,10398070,"genie.py",8700,0,"",python,selection_mouse +13840,10399921,"genie.py",8813,0,"",python,selection_mouse +13841,10400758,"genie.py",8698,0,"",python,selection_mouse +13842,10401766,"genie.py",8706,0,"",python,selection_command +13843,10401923,"genie.py",8730,0,"",python,selection_command +13844,10402087,"genie.py",8782,0,"",python,selection_command +13845,10402228,"genie.py",8813,0,"",python,selection_command +13846,10402362,"genie.py",8869,0,"",python,selection_command +13847,10402512,"genie.py",8894,0,"",python,selection_command +13848,10402666,"genie.py",8940,0,"",python,selection_command +13849,10402826,"genie.py",8984,0,"",python,selection_command +13850,10402977,"genie.py",9024,0,"",python,selection_command +13851,10403088,"genie.py",9075,0,"",python,selection_command +13852,10403230,"genie.py",9124,0,"",python,selection_command +13853,10403381,"genie.py",9149,0,"",python,selection_command +13854,10403863,"genie.py",9124,0,"",python,selection_command +13855,10404073,"genie.py",9075,0,"",python,selection_command +13856,10404289,"genie.py",9024,0,"",python,selection_command +13857,10404406,"genie.py",9075,0,"",python,selection_command +13858,10404534,"genie.py",9024,0,"",python,selection_command +13859,10404612,"genie.py",9075,0,"",python,selection_command +13860,10404772,"genie.py",9024,0,"",python,selection_command +13861,10404833,"genie.py",9075,0,"",python,selection_command +13862,10404978,"genie.py",9024,0,"",python,selection_command +13863,10405045,"genie.py",9075,0,"",python,selection_command +13864,10405237,"genie.py",9024,0,"",python,selection_command +13865,10405357,"genie.py",9075,0,"",python,selection_command +13866,10405397,"genie.py",9024,0,"",python,selection_command +13867,10405476,"genie.py",9075,0,"",python,selection_command +13868,10405621,"genie.py",9024,0,"",python,selection_command +13869,10405761,"genie.py",9075,0,"",python,selection_command +13870,10405847,"genie.py",9024,0,"",python,selection_command +13871,10405981,"genie.py",9075,0,"",python,selection_command +13872,10406210,"genie.py",9124,0,"",python,selection_command +13873,10437867,"genie.py",0,0,"",python,tab +13874,10437868,"genie.py",9208,0,"",python,selection_mouse +13875,10440323,"genie.py",9245,0,"",python,selection_mouse +13876,10440492,"genie.py",9237,12,"final_logits",python,selection_mouse +13877,10441764,"genie.py",9253,0,"",python,selection_mouse +13878,10441916,"genie.py",9252,18,"sampled_token_idxs",python,selection_mouse +13879,10442916,"genie.py",9205,0,"",python,selection_mouse +13880,10444261,"genie.py",9271,0,"\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)",python,content +13881,10444272,"genie.py",9280,0,"",python,selection_command +13882,10444528,"genie.py",9281,0,"",python,selection_command +13883,10445031,"genie.py",9282,0,"",python,selection_command +13884,10445066,"genie.py",9283,0,"",python,selection_command +13885,10445100,"genie.py",9284,0,"",python,selection_command +13886,10445133,"genie.py",9285,0,"",python,selection_command +13887,10445163,"genie.py",9286,0,"",python,selection_command +13888,10445171,"genie.py",9287,0,"",python,selection_command +13889,10445241,"genie.py",9288,0,"",python,selection_command +13890,10445294,"genie.py",9289,0,"",python,selection_command +13891,10445332,"genie.py",9290,0,"",python,selection_command +13892,10445332,"genie.py",9291,0,"",python,selection_command +13893,10445343,"genie.py",9292,0,"",python,selection_command +13894,10445464,"genie.py",9293,0,"",python,selection_command +13895,10445605,"genie.py",9294,0,"",python,selection_command +13896,10445752,"genie.py",9295,0,"",python,selection_command +13897,10445901,"genie.py",9296,0,"",python,selection_command +13898,10446016,"genie.py",9297,0,"",python,selection_command +13899,10446566,"genie.py",9297,0,"_",python,content +13900,10446567,"genie.py",9298,0,"",python,selection_keyboard +13901,10446939,"genie.py",9298,0,"t",python,content +13902,10446939,"genie.py",9299,0,"",python,selection_keyboard +13903,10446971,"genie.py",9299,0,"m",python,content +13904,10446971,"genie.py",9300,0,"",python,selection_keyboard +13905,10447077,"genie.py",9300,0,"p",python,content +13906,10447078,"genie.py",9301,0,"",python,selection_keyboard +13907,10447484,"genie.py",9301,0,"^",python,content +13908,10447484,"genie.py",9302,0,"",python,selection_keyboard +13909,10448149,"genie.py",9301,1,"",python,content +13910,10448149,"genie.py",9301,0,"",python,selection_keyboard +13911,10448371,"genie.py",9300,0,"",python,selection_command +13912,10449005,"genie.py",9363,0,"",python,selection_command +13913,10449148,"genie.py",9362,0,"",python,selection_command +13914,10452487,"genie.py",9342,0,"",python,selection_command +13915,10452672,"genie.py",9341,0,"",python,selection_command +13916,10453161,"genie.py",9341,0,"_",python,content +13917,10453162,"genie.py",9342,0,"",python,selection_keyboard +13918,10453485,"genie.py",9342,0,"t",python,content +13919,10453485,"genie.py",9343,0,"",python,selection_keyboard +13920,10453571,"genie.py",9343,0,"m",python,content +13921,10453571,"genie.py",9344,0,"",python,selection_keyboard +13922,10453694,"genie.py",9344,0,"p",python,content +13923,10453694,"genie.py",9345,0,"",python,selection_keyboard +13924,10453920,"genie.py",9344,0,"",python,selection_command +13925,10454209,"genie.py",9256,0,"",python,selection_command +13926,10454376,"genie.py",9182,0,"",python,selection_command +13927,10454526,"genie.py",9124,0,"",python,selection_command +13928,10454646,"genie.py",9110,0,"",python,selection_command +13929,10454802,"genie.py",9050,0,"",python,selection_command +13930,10454949,"genie.py",8999,0,"",python,selection_command +13931,10455093,"genie.py",8959,0,"",python,selection_command +13932,10455219,"genie.py",8915,0,"",python,selection_command +13933,10455349,"genie.py",8869,0,"",python,selection_command +13934,10455511,"genie.py",8855,0,"",python,selection_command +13935,10456113,"genie.py",8856,0,"\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)",python,content +13936,10456152,"genie.py",8869,0,"",python,selection_command +13937,10456573,"genie.py",8870,0,"",python,selection_command +13938,10457056,"genie.py",8871,0,"",python,selection_command +13939,10457254,"genie.py",8872,0,"",python,selection_command +13940,10457255,"genie.py",8873,0,"",python,selection_command +13941,10457255,"genie.py",8874,0,"",python,selection_command +13942,10457255,"genie.py",8875,0,"",python,selection_command +13943,10457255,"genie.py",8876,0,"",python,selection_command +13944,10457282,"genie.py",8877,0,"",python,selection_command +13945,10457283,"genie.py",8878,0,"",python,selection_command +13946,10457317,"genie.py",8879,0,"",python,selection_command +13947,10457354,"genie.py",8880,0,"",python,selection_command +13948,10457355,"genie.py",8881,0,"",python,selection_command +13949,10457410,"genie.py",8882,0,"",python,selection_command +13950,10457421,"genie.py",8883,0,"",python,selection_command +13951,10457476,"genie.py",8884,0,"",python,selection_command +13952,10457548,"genie.py",8885,0,"",python,selection_command +13953,10457549,"genie.py",8886,0,"",python,selection_command +13954,10457577,"genie.py",8887,0,"",python,selection_command +13955,10458479,"genie.py",8887,0,"_",python,content +13956,10458480,"genie.py",8888,0,"",python,selection_keyboard +13957,10458703,"genie.py",8888,0,"t",python,content +13958,10458704,"genie.py",8889,0,"",python,selection_keyboard +13959,10458820,"genie.py",8889,0,"m",python,content +13960,10458821,"genie.py",8890,0,"",python,selection_keyboard +13961,10458914,"genie.py",8890,0,"p",python,content +13962,10458915,"genie.py",8891,0,"",python,selection_keyboard +13963,10459573,"genie.py",8890,0,"",python,selection_command +13964,10459856,"genie.py",8891,0,"",python,selection_command +13965,10460310,"genie.py",8892,0,"",python,selection_command +13966,10460369,"genie.py",8893,0,"",python,selection_command +13967,10460426,"genie.py",8894,0,"",python,selection_command +13968,10460427,"genie.py",8895,0,"",python,selection_command +13969,10460491,"genie.py",8896,0,"",python,selection_command +13970,10460542,"genie.py",8897,0,"",python,selection_command +13971,10460543,"genie.py",8898,0,"",python,selection_command +13972,10460608,"genie.py",8899,0,"",python,selection_command +13973,10460610,"genie.py",8900,0,"",python,selection_command +13974,10460846,"genie.py",8901,0,"",python,selection_command +13975,10460847,"genie.py",8902,0,"",python,selection_command +13976,10461063,"genie.py",8903,0,"",python,selection_command +13977,10461064,"genie.py",8904,0,"",python,selection_command +13978,10461064,"genie.py",8905,0,"",python,selection_command +13979,10461065,"genie.py",8906,0,"",python,selection_command +13980,10461065,"genie.py",8907,0,"",python,selection_command +13981,10461066,"genie.py",8908,0,"",python,selection_command +13982,10461067,"genie.py",8909,0,"",python,selection_command +13983,10461067,"genie.py",8910,0,"",python,selection_command +13984,10461115,"genie.py",8911,0,"",python,selection_command +13985,10461116,"genie.py",8912,0,"",python,selection_command +13986,10461117,"genie.py",8913,0,"",python,selection_command +13987,10461117,"genie.py",8914,0,"",python,selection_command +13988,10461118,"genie.py",8915,0,"",python,selection_command +13989,10461119,"genie.py",8916,0,"",python,selection_command +13990,10461119,"genie.py",8917,0,"",python,selection_command +13991,10461290,"genie.py",8918,0,"",python,selection_command +13992,10462173,"genie.py",8917,0,"",python,selection_command +13993,10462837,"genie.py",8917,0,"_",python,content +13994,10462838,"genie.py",8918,0,"",python,selection_keyboard +13995,10463551,"genie.py",8918,0,"t",python,content +13996,10463551,"genie.py",8919,0,"",python,selection_keyboard +13997,10464127,"genie.py",8905,14,"final_logits_tmp",python,content +13998,10464418,"genie.py",8920,0,"",python,selection_command +13999,10469285,"genie.py",8945,0,"",python,selection_mouse +14000,10469307,"genie.py",8944,0,"",python,selection_command +14001,10469456,"genie.py",8945,0,"",python,selection_mouse +14002,10469457,"genie.py",8944,0,"",python,selection_command +14003,10469933,"genie.py",8880,0,"",python,selection_mouse +14004,10470103,"genie.py",8869,22,"sampled_token_idxs_tmp",python,selection_mouse +14005,10472765,"genie.py",9440,0,"",python,selection_mouse +14006,10474319,"genie.py",9441,0,"",python,selection_command +14007,10474550,"genie.py",9441,0,"_",python,content +14008,10474550,"genie.py",9442,0,"",python,selection_keyboard +14009,10474798,"genie.py",9442,0,"t",python,content +14010,10474799,"genie.py",9443,0,"",python,selection_keyboard +14011,10474886,"genie.py",9443,0,"m",python,content +14012,10474887,"genie.py",9444,0,"",python,selection_keyboard +14013,10475025,"genie.py",9444,0,"p",python,content +14014,10475026,"genie.py",9445,0,"",python,selection_keyboard +14015,10475128,"genie.py",9444,0,"",python,selection_command +14016,10479514,"TERMINAL",0,0,"^DERROR:2025-07-03 19:10:16,337:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nE0703 19:10:16.365191 4004769 pjrt_stream_executor_client.cc:2917] Execution of replica 0 failed: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\njaxlib._jax.XlaRuntimeError: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\n",,terminal_output +14017,10480837,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +14018,10481257,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +14019,10481377,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +14020,10481524,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +14021,10484317,"TERMINAL",0,0,"2025-07-03 19:10:21.190801: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14022,10485668,"TERMINAL",0,0,"bash",,terminal_focus +14023,10486652,"TERMINAL",0,0,"queue",,terminal_command +14024,10486711,"TERMINAL",0,0,"]633;E;2025-07-03 19:10:23 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 19:10:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3316026 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316022 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316020 accelerat train_to tum_cte0 PD\t0:00 12 (Priority)3316019 accelerat train_to tum_cte0 PD\t0:00\t 8 (Priority)3316018 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3315981 dev_accel interact tum_cte0 R53:41\t 1 hkn0901",,terminal_output +14025,10487797,"TERMINAL",0,0,"42",,terminal_output +14026,10488811,"TERMINAL",0,0,"53",,terminal_output +14027,10489851,"TERMINAL",0,0,"64",,terminal_output +14028,10490882,"TERMINAL",0,0,"75",,terminal_output +14029,10491927,"TERMINAL",0,0,"86",,terminal_output +14030,10492973,"TERMINAL",0,0,"97",,terminal_output +14031,10494033,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0",,terminal_output +14032,10495203,"TERMINAL",0,0,"idling",,terminal_command +14033,10495269,"TERMINAL",0,0,"]633;E;2025-07-03 19:10:32 idling;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 19:10:32 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 67 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +14034,10496258,"TERMINAL",0,0,"3\t",,terminal_output +14035,10496468,"TERMINAL",0,0,"2025-07-03 19:10:33.370942: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14036,10497311,"TERMINAL",0,0,"4\t",,terminal_output +14037,10498349,"TERMINAL",0,0,"5\t",,terminal_output +14038,10499389,"TERMINAL",0,0,"6\t",,terminal_output +14039,10500485,"TERMINAL",0,0,"7\t",,terminal_output +14040,10501509,"TERMINAL",0,0,"8\t",,terminal_output +14041,10502533,"TERMINAL",0,0,"9\t",,terminal_output +14042,10503558,"TERMINAL",0,0,"40\t",,terminal_output +14043,10504684,"TERMINAL",0,0,"1\t",,terminal_output +14044,10505629,"TERMINAL",0,0,"22",,terminal_output +14045,10506322,"TERMINAL",0,0,"2025-07-03 19:10:43.161817: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14046,10506673,"TERMINAL",0,0,"3\t",,terminal_output +14047,10507755,"TERMINAL",0,0,"4\t",,terminal_output +14048,10508781,"TERMINAL",0,0,"5\t",,terminal_output +14049,10509907,"TERMINAL",0,0,"6\t",,terminal_output +14050,10510843,"TERMINAL",0,0,"7\t",,terminal_output +14051,10511876,"TERMINAL",0,0,"8\t",,terminal_output +14052,10512924,"TERMINAL",0,0,"9\t",,terminal_output +14053,10513973,"TERMINAL",0,0,"50\t",,terminal_output +14054,10515016,"TERMINAL",0,0,"1\t",,terminal_output +14055,10516152,"TERMINAL",0,0,"2\t",,terminal_output +14056,10517178,"TERMINAL",0,0,"3\t",,terminal_output +14057,10518055,"TERMINAL",0,0,"2025-07-03 19:10:54.952641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14058,10518114,"TERMINAL",0,0,"4\t",,terminal_output +14059,10519225,"TERMINAL",0,0,"6\t",,terminal_output +14060,10520248,"TERMINAL",0,0,"7\t",,terminal_output +14061,10520659,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +14062,10521219,"TERMINAL",0,0,"8\t",,terminal_output +14063,10522300,"TERMINAL",0,0,"9\t",,terminal_output +14064,10523323,"TERMINAL",0,0,"1:00\t",,terminal_output +14065,10524446,"TERMINAL",0,0,"1\t",,terminal_output +14066,10525478,"TERMINAL",0,0,"2\t",,terminal_output +14067,10526421,"TERMINAL",0,0,"3\t",,terminal_output +14068,10527520,"TERMINAL",0,0,"4\t",,terminal_output +14069,10528062,"TERMINAL",0,0,"2025-07-03 19:11:04.963260: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14070,10528545,"TERMINAL",0,0,"5\t",,terminal_output +14071,10529568,"TERMINAL",0,0,"6\t",,terminal_output +14072,10530591,"TERMINAL",0,0,"7\t",,terminal_output +14073,10530997,"TERMINAL",0,0,"2025-07-03 19:11:07.899797: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14074,10531615,"TERMINAL",0,0,"8\t",,terminal_output +14075,10532742,"TERMINAL",0,0,"9\t",,terminal_output +14076,10533765,"TERMINAL",0,0,"10\t",,terminal_output +14077,10534790,"TERMINAL",0,0,"1\t",,terminal_output +14078,10535826,"TERMINAL",0,0,"2\t",,terminal_output +14079,10536843,"TERMINAL",0,0,"3\t",,terminal_output +14080,10537380,"TERMINAL",0,0,"2025-07-03 19:11:14.283277: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14081,10537863,"TERMINAL",0,0,"4\t",,terminal_output +14082,10538887,"TERMINAL",0,0,"5\t",,terminal_output +14083,10539501,"TERMINAL",0,0,"2025-07-03 19:11:16.345869: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14084,10539934,"TERMINAL",0,0,"6\t",,terminal_output +14085,10540840,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +14086,10540975,"TERMINAL",0,0,"7\t",,terminal_output +14087,10542076,"TERMINAL",0,0,"8\t",,terminal_output +14088,10543062,"TERMINAL",0,0,"9\t",,terminal_output +14089,10544119,"TERMINAL",0,0,"20\t",,terminal_output +14090,10545144,"TERMINAL",0,0,"2\t",,terminal_output +14091,10546259,"TERMINAL",0,0,"3\t",,terminal_output +14092,10547284,"TERMINAL",0,0,"4\t",,terminal_output +14093,10547824,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nEntering jdb:\r\n(jdb) ",,terminal_output +14094,10548307,"TERMINAL",0,0,"5\t",,terminal_output +14095,10549265,"TERMINAL",0,0,"6\t",,terminal_output +14096,10550356,"TERMINAL",0,0,"7\t",,terminal_output +14097,10551379,"TERMINAL",0,0,"8\t",,terminal_output +14098,10552378,"TERMINAL",0,0,"9\t",,terminal_output +14099,10553427,"TERMINAL",0,0,"30\t",,terminal_output +14100,10554461,"TERMINAL",0,0,"1\t",,terminal_output +14101,10555508,"TERMINAL",0,0,"2\t",,terminal_output +14102,10556613,"TERMINAL",0,0,"3\t",,terminal_output +14103,10557626,"TERMINAL",0,0,"4\t",,terminal_output +14104,10558617,"TERMINAL",0,0,"5\t",,terminal_output +14105,10559674,"TERMINAL",0,0,"6\t",,terminal_output +14106,10560800,"TERMINAL",0,0,"7\t",,terminal_output +14107,10561833,"TERMINAL",0,0,"8\t",,terminal_output +14108,10562854,"TERMINAL",0,0,"9\t",,terminal_output +14109,10563877,"TERMINAL",0,0,"40\t",,terminal_output +14110,10564907,"TERMINAL",0,0,"1\t",,terminal_output +14111,10565921,"TERMINAL",0,0,"2\t",,terminal_output +14112,10566945,"TERMINAL",0,0,"3\t",,terminal_output +14113,10567983,"TERMINAL",0,0,"4\t",,terminal_output +14114,10569033,"TERMINAL",0,0,"5\t",,terminal_output +14115,10569935,"TERMINAL",0,0,"srun",,terminal_focus +14116,10570051,"TERMINAL",0,0,"6\t",,terminal_output +14117,10570650,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14118,10571008,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(269)\r\n jnp.argmax(final_logits, axis=-1),\r\n jax.random.categorical(_rng, final_logits),\r\n )\r\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\r\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\r\n final_token_probs_tmp = gather_fn(jax.nn.softmax(final_logits_tmp), sampled_token_idxs_tmp)\r\n-> jax.debug.breakpoint()\r\n final_token_probs += ~mask\r\n # Update masked tokens only\r\n jax.debug.print(""maskgit-sampled-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\r\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\r\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\r\n(jdb) ",,terminal_output +14119,10571114,"TERMINAL",0,0,"7\t",,terminal_output +14120,10572169,"TERMINAL",0,0,"8\t",,terminal_output +14121,10573191,"TERMINAL",0,0,"50\t",,terminal_output +14122,10574318,"TERMINAL",0,0,"1\t",,terminal_output +14123,10575341,"TERMINAL",0,0,"2\t",,terminal_output +14124,10576365,"TERMINAL",0,0,"3\t",,terminal_output +14125,10577389,"TERMINAL",0,0,"4\t",,terminal_output +14126,10578387,"TERMINAL",0,0,"5\t",,terminal_output +14127,10579436,"TERMINAL",0,0,"6\t",,terminal_output +14128,10580563,"TERMINAL",0,0,"7\t",,terminal_output +14129,10581588,"TERMINAL",0,0,"8\t",,terminal_output +14130,10582544,"TERMINAL",0,0,"9\t",,terminal_output +14131,10583572,"TERMINAL",0,0,"2:00\t",,terminal_output +14132,10584613,"TERMINAL",0,0,"1\t",,terminal_output +14133,10585841,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +14134,10585894,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +14135,10585990,"TERMINAL",0,0,"2\t",,terminal_output +14136,10586024,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +14137,10586157,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14138,10586454,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14139,10586818,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +14140,10587031,"TERMINAL",0,0,"3\t",,terminal_output +14141,10587115,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14142,10587296,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +14143,10587420,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +14144,10587617,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +14145,10587763,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +14146,10587992,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14147,10588063,"TERMINAL",0,0,"4\t",,terminal_output +14148,10588183,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14149,10588520,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +14150,10589115,"TERMINAL",0,0,"5\t",,terminal_output +14151,10589175,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14152,10589658,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +14153,10589779,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14154,10589939,"TERMINAL",0,0,"[?25lp[?25h[?25le[?25h",,terminal_output +14155,10590002,"TERMINAL",0,0,"\r\n(1, 920, 1024)\r\n(jdb) ",,terminal_output +14156,10590131,"TERMINAL",0,0,"6\t",,terminal_output +14157,10590510,"TERMINAL",0,0,"\rfinal_logits.shape",,terminal_output +14158,10591176,"TERMINAL",0,0,"8\t",,terminal_output +14159,10592149,"TERMINAL",0,0,"[?25l.\r[1@_.[?25h",,terminal_output +14160,10592208,"TERMINAL",0,0,"9\t",,terminal_output +14161,10592471,"TERMINAL",0,0,"[?25ls.\r[1@t.[?25h",,terminal_output +14162,10592523,"TERMINAL",0,0,"\r[1@m.",,terminal_output +14163,10592673,"TERMINAL",0,0,"[?25l.\r[1@p.[?25h",,terminal_output +14164,10593127,"TERMINAL",0,0,"\r\n(1, 16, 920, 1024)\r\n(jdb) ",,terminal_output +14165,10593259,"TERMINAL",0,0,"10\t",,terminal_output +14166,10594388,"TERMINAL",0,0,"1\t",,terminal_output +14167,10595412,"TERMINAL",0,0,"2\t",,terminal_output +14168,10596435,"TERMINAL",0,0,"3\t",,terminal_output +14169,10597459,"TERMINAL",0,0,"4\t",,terminal_output +14170,10598485,"TERMINAL",0,0,"5\t",,terminal_output +14171,10599508,"TERMINAL",0,0,"6\t",,terminal_output +14172,10600634,"TERMINAL",0,0,"7\t",,terminal_output +14173,10601658,"TERMINAL",0,0,"8\t",,terminal_output +14174,10602682,"TERMINAL",0,0,"9\t",,terminal_output +14175,10603707,"TERMINAL",0,0,"20\t",,terminal_output +14176,10604730,"TERMINAL",0,0,"1\t",,terminal_output +14177,10605753,"TERMINAL",0,0,"2\t",,terminal_output +14178,10606767,"TERMINAL",0,0,"3\t",,terminal_output +14179,10607802,"TERMINAL",0,0,"4\t",,terminal_output +14180,10608929,"TERMINAL",0,0,"5\t",,terminal_output +14181,10609954,"TERMINAL",0,0,"6\t",,terminal_output +14182,10610917,"TERMINAL",0,0,"7\t",,terminal_output +14183,10611951,"TERMINAL",0,0,"8\t",,terminal_output +14184,10613002,"TERMINAL",0,0,"9\t",,terminal_output +14185,10614041,"TERMINAL",0,0,"30\t",,terminal_output +14186,10615177,"TERMINAL",0,0,"1\t",,terminal_output +14187,10616165,"TERMINAL",0,0,"2\t",,terminal_output +14188,10617187,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14189,10617187,"TERMINAL",0,0,"4\t",,terminal_output +14190,10617346,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14191,10617408,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +14192,10617551,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +14193,10617637,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14194,10617749,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14195,10617857,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +14196,10618237,"TERMINAL",0,0,"5\t",,terminal_output +14197,10618252,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +14198,10618598,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +14199,10618688,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +14200,10618780,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +14201,10619004,"TERMINAL",0,0,"[?25le[?25h[?25ln[?25h",,terminal_output +14202,10619278,"TERMINAL",0,0,"6\t",,terminal_output +14203,10619341,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +14204,10619597,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +14205,10619769,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +14206,10620398,"TERMINAL",0,0,"7\t",,terminal_output +14207,10620778,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +14208,10620841,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14209,10621019,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14210,10621244,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14211,10621351,"TERMINAL",0,0,"8\t",,terminal_output +14212,10621509,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14213,10621785,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +14214,10622009,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +14215,10622088,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14216,10622220,"TERMINAL",0,0,"[?25lo[?25h[?25le[?25h",,terminal_output +14217,10622446,"TERMINAL",0,0,"9\t",,terminal_output +14218,10622785,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +14219,10622904,"TERMINAL",0,0,"[?25lo\r[?25h",,terminal_output +14220,10623001,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +14221,10623097,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14222,10623161,"TERMINAL",0,0,"\r\n*** AttributeError: 'jaxlib._jax.ArrayImpl' object has no attribute 'shaoe'\r\n(jdb) ",,terminal_output +14223,10623469,"TERMINAL",0,0,"40\t",,terminal_output +14224,10623879,"TERMINAL",0,0,"watch",,terminal_focus +14225,10624175,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0",,terminal_output +14226,10626053,"TERMINAL",0,0,"queue",,terminal_command +14227,10626108,"TERMINAL",0,0,"]633;E;2025-07-03 19:12:42 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 19:12:42 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3316026 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316022 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316020 accelerat train_to tum_cte0 PD\t0:00 12 (Priority)3316019 accelerat train_to tum_cte0 PD\t0:00\t 8 (Priority)3316018 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3315981 dev_accel interact tum_cte0 R56:00\t 1 hkn0901",,terminal_output +14228,10627044,"TERMINAL",0,0,"srun",,terminal_focus +14229,10627145,"TERMINAL",0,0,"32",,terminal_output +14230,10627787,"TERMINAL",0,0,"\rsampled_token_idxs.shaoe",,terminal_output +14231,10628199,"TERMINAL",0,0,"53",,terminal_output +14232,10628215,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +14233,10628448,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +14234,10628764,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +14235,10629242,"TERMINAL",0,0,"64",,terminal_output +14236,10629617,"TERMINAL",0,0,"[?25lo\re[?25h",,terminal_output +14237,10630050,"TERMINAL",0,0,"[?25le\rpe[?25h",,terminal_output +14238,10630191,"TERMINAL",0,0,"\r\n(1, 920)\r\n(jdb) ",,terminal_output +14239,10630301,"TERMINAL",0,0,"75",,terminal_output +14240,10630900,"TERMINAL",0,0,"\rsampled_token_idxs.shape",,terminal_output +14241,10631321,"TERMINAL",0,0,"86",,terminal_output +14242,10632363,"TERMINAL",0,0,"97",,terminal_output +14243,10633271,"TERMINAL",0,0,"[?25l.\r[1@_.[?25h",,terminal_output +14244,10633397,"TERMINAL",0,0,"508",,terminal_output +14245,10633616,"TERMINAL",0,0,"[?25ls.\r[1@t.[?25h\r[1@m.",,terminal_output +14246,10633831,"TERMINAL",0,0,"[?25l.\r[1@p.[?25h",,terminal_output +14247,10634439,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +14248,10634466,"TERMINAL",0,0,"19",,terminal_output +14249,10635485,"TERMINAL",0,0,"210",,terminal_output +14250,10636577,"TERMINAL",0,0,"31",,terminal_output +14251,10637602,"TERMINAL",0,0,"42",,terminal_output +14252,10638727,"TERMINAL",0,0,"53",,terminal_output +14253,10639657,"TERMINAL",0,0,"64",,terminal_output +14254,10640775,"TERMINAL",0,0,"75",,terminal_output +14255,10641762,"TERMINAL",0,0,"86",,terminal_output +14256,10642823,"TERMINAL",0,0,"97",,terminal_output +14257,10643873,"TERMINAL",0,0,"3:008",,terminal_output +14258,10644937,"TERMINAL",0,0,"19",,terminal_output +14259,10645837,"genie.py",0,0,"",python,tab +14260,10646019,"TERMINAL",0,0,"220",,terminal_output +14261,10646790,"genie.py",9654,0,"",python,selection_mouse +14262,10646985,"TERMINAL",0,0,"31",,terminal_output +14263,10647914,"genie.py",9212,0,"",python,selection_mouse +14264,10647931,"genie.py",9209,9,"gather_fn",python,selection_mouse +14265,10648040,"TERMINAL",0,0,"42",,terminal_output +14266,10649145,"genie.py",9277,0,"",python,selection_mouse +14267,10649235,"TERMINAL",0,0,"53",,terminal_output +14268,10649302,"genie.py",9267,17,"final_token_probs",python,selection_mouse +14269,10650195,"TERMINAL",0,0,"65",,terminal_output +14270,10651221,"TERMINAL",0,0,"86",,terminal_output +14271,10651523,"genie.py",9277,0,"",python,selection_mouse +14272,10651970,"genie.py",9267,17,"final_token_probs",python,selection_mouse +14273,10652244,"TERMINAL",0,0,"97",,terminal_output +14274,10653313,"TERMINAL",0,0,"108",,terminal_output +14275,10654306,"TERMINAL",0,0,"19",,terminal_output +14276,10654815,"TERMINAL",0,0,"[?25lfinal_token_probs[?25h",,terminal_output +14277,10655151,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +14278,10655376,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14279,10655376,"TERMINAL",0,0,"230",,terminal_output +14280,10655881,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +14281,10655943,"TERMINAL",0,0,"[?25l,\r[?25h",,terminal_output +14282,10656187,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14283,10656339,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14284,10656414,"TERMINAL",0,0,"31",,terminal_output +14285,10656451,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +14286,10656607,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14287,10656668,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +14288,10656785,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14289,10656847,"TERMINAL",0,0,"\r\n(1, 920)\r\n(jdb) ",,terminal_output +14290,10657467,"TERMINAL",0,0,"42",,terminal_output +14291,10658491,"TERMINAL",0,0,"53",,terminal_output +14292,10659617,"TERMINAL",0,0,"64",,terminal_output +14293,10660574,"TERMINAL",0,0,"75",,terminal_output +14294,10660776,"genie.py",0,0,"",python,tab +14295,10660778,"genie.py",9372,0,"",python,selection_mouse +14296,10660900,"genie.py",9355,21,"final_token_probs_tmp",python,selection_mouse +14297,10661692,"TERMINAL",0,0,"86",,terminal_output +14298,10662669,"TERMINAL",0,0,"97",,terminal_output +14299,10663882,"TERMINAL",0,0,"208",,terminal_output +14300,10664503,"TERMINAL",0,0,"[?25lfinal_token_probs_tmp[?25h",,terminal_output +14301,10664758,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14302,10664818,"TERMINAL",0,0,"19",,terminal_output +14303,10664913,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14304,10665075,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +14305,10665138,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14306,10665870,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +14307,10665937,"TERMINAL",0,0,"240",,terminal_output +14308,10665999,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14309,10666174,"TERMINAL",0,0,"\r\n(1, 16, 920, 1024)\r\n(jdb) ",,terminal_output +14310,10666824,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +14311,10666924,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14312,10667034,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14313,10667081,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +14314,10667095,"TERMINAL",0,0,"31",,terminal_output +14315,10667277,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14316,10667501,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14317,10667553,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +14318,10667619,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14319,10667847,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +14320,10668004,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14321,10668131,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +14322,10668132,"TERMINAL",0,0,"42",,terminal_output +14323,10669243,"TERMINAL",0,0,"64",,terminal_output +14324,10670267,"TERMINAL",0,0,"75",,terminal_output +14325,10671290,"TERMINAL",0,0,"86",,terminal_output +14326,10672315,"TERMINAL",0,0,"97",,terminal_output +14327,10673312,"TERMINAL",0,0,"308",,terminal_output +14328,10674465,"TERMINAL",0,0,"19",,terminal_output +14329,10675490,"TERMINAL",0,0,"250",,terminal_output +14330,10676451,"TERMINAL",0,0,"31",,terminal_output +14331,10677540,"TERMINAL",0,0,"42",,terminal_output +14332,10678651,"genie.py",0,0,"",python,tab +14333,10678651,"genie.py",9500,0,"",python,selection_mouse +14334,10678813,"genie.py",9486,17,"final_token_probs",python,selection_mouse +14335,10678815,"TERMINAL",0,0,"53",,terminal_output +14336,10679625,"TERMINAL",0,0,"64",,terminal_output +14337,10680711,"TERMINAL",0,0,"75",,terminal_output +14338,10681736,"TERMINAL",0,0,"86",,terminal_output +14339,10682775,"TERMINAL",0,0,"97",,terminal_output +14340,10683818,"TERMINAL",0,0,"408",,terminal_output +14341,10684962,"TERMINAL",0,0,"19",,terminal_output +14342,10685968,"TERMINAL",0,0,"27:00",,terminal_output +14343,10686987,"TERMINAL",0,0,"31",,terminal_output +14344,10687991,"TERMINAL",0,0,"42",,terminal_output +14345,10689035,"TERMINAL",0,0,"53",,terminal_output +14346,10690084,"TERMINAL",0,0,"64",,terminal_output +14347,10691156,"TERMINAL",0,0,"76",,terminal_output +14348,10692283,"TERMINAL",0,0,"97",,terminal_output +14349,10693308,"TERMINAL",0,0,"508",,terminal_output +14350,10694331,"TERMINAL",0,0,"19",,terminal_output +14351,10695407,"TERMINAL",0,0,"210",,terminal_output +14352,10696481,"TERMINAL",0,0,"31",,terminal_output +14353,10697506,"TERMINAL",0,0,"42",,terminal_output +14354,10698472,"TERMINAL",0,0,"53",,terminal_output +14355,10699553,"TERMINAL",0,0,"64",,terminal_output +14356,10700550,"TERMINAL",0,0,"75",,terminal_output +14357,10701596,"TERMINAL",0,0,"86",,terminal_output +14358,10702729,"TERMINAL",0,0,"97",,terminal_output +14359,10703752,"TERMINAL",0,0,"4:008",,terminal_output +14360,10704775,"TERMINAL",0,0,"19",,terminal_output +14361,10705816,"TERMINAL",0,0,"220",,terminal_output +14362,10706941,"TERMINAL",0,0,"31",,terminal_output +14363,10707910,"TERMINAL",0,0,"42",,terminal_output +14364,10708415,"genie.py",0,0,"",python,tab +14365,10708416,"genie.py",9372,0,"",python,selection_mouse +14366,10708523,"genie.py",9355,21,"final_token_probs_tmp",python,selection_mouse +14367,10708979,"TERMINAL",0,0,"53",,terminal_output +14368,10709271,"genie.py",9369,0,"",python,selection_mouse +14369,10709272,"genie.py",9355,21,"final_token_probs_tmp",python,selection_mouse +14370,10709902,"genie.py",9368,0,"",python,selection_mouse +14371,10709903,"genie.py",9355,21,"final_token_probs_tmp",python,selection_mouse +14372,10709964,"TERMINAL",0,0,"64",,terminal_output +14373,10710649,"genie.py",9280,0,"",python,selection_mouse +14374,10710807,"genie.py",9267,17,"final_token_probs",python,selection_mouse +14375,10711000,"TERMINAL",0,0,"75",,terminal_output +14376,10711599,"genie.py",9370,0,"",python,selection_mouse +14377,10712033,"TERMINAL",0,0,"86",,terminal_output +14378,10712416,"genie.py",9221,0,"",python,selection_mouse +14379,10713078,"TERMINAL",0,0,"97",,terminal_output +14380,10714007,"genie.py",9221,0,"j",python,content +14381,10714007,"genie.py",9222,0,"",python,selection_keyboard +14382,10714088,"genie.py",9222,0,"a",python,content +14383,10714089,"genie.py",9223,0,"",python,selection_keyboard +14384,10714174,"TERMINAL",0,0,"109",,terminal_output +14385,10714270,"genie.py",9223,0,"x",python,content +14386,10714271,"genie.py",9224,0,"",python,selection_keyboard +14387,10714424,"genie.py",9224,0,".",python,content +14388,10714424,"genie.py",9225,0,"",python,selection_keyboard +14389,10715178,"TERMINAL",0,0,"230",,terminal_output +14390,10715803,"genie.py",9225,0,"v",python,content +14391,10715804,"genie.py",9226,0,"",python,selection_keyboard +14392,10715934,"genie.py",9226,0,"m",python,content +14393,10715934,"genie.py",9227,0,"",python,selection_keyboard +14394,10716207,"genie.py",9227,0,"a",python,content +14395,10716208,"genie.py",9228,0,"",python,selection_keyboard +14396,10716324,"genie.py",9228,0,"p",python,content +14397,10716325,"genie.py",9229,0,"",python,selection_keyboard +14398,10716449,"TERMINAL",0,0,"31",,terminal_output +14399,10717196,"genie.py",9229,0,"(",python,content +14400,10717197,"genie.py",9230,0,"",python,selection_keyboard +14401,10717313,"TERMINAL",0,0,"42",,terminal_output +14402,10718329,"TERMINAL",0,0,"53",,terminal_output +14403,10718492,"genie.py",9267,0,"",python,selection_mouse +14404,10719250,"genie.py",9267,0,")",python,content +14405,10719251,"genie.py",9268,0,"",python,selection_keyboard +14406,10719406,"TERMINAL",0,0,"64",,terminal_output +14407,10720443,"TERMINAL",0,0,"75",,terminal_output +14408,10721570,"TERMINAL",0,0,"86",,terminal_output +14409,10722539,"TERMINAL",0,0,"97",,terminal_output +14410,10723586,"TERMINAL",0,0,"208",,terminal_output +14411,10724642,"TERMINAL",0,0,"19",,terminal_output +14412,10725769,"TERMINAL",0,0,"240",,terminal_output +14413,10726793,"TERMINAL",0,0,"31",,terminal_output +14414,10727817,"TERMINAL",0,0,"42",,terminal_output +14415,10728841,"TERMINAL",0,0,"53",,terminal_output +14416,10729967,"TERMINAL",0,0,"64",,terminal_output +14417,10730991,"TERMINAL",0,0,"75",,terminal_output +14418,10731996,"TERMINAL",0,0,"86",,terminal_output +14419,10733039,"TERMINAL",0,0,"97",,terminal_output +14420,10734083,"TERMINAL",0,0,"308",,terminal_output +14421,10735135,"TERMINAL",0,0,"150",,terminal_output +14422,10736214,"TERMINAL",0,0,"31",,terminal_output +14423,10737232,"TERMINAL",0,0,"42",,terminal_output +14424,10738261,"TERMINAL",0,0,"53",,terminal_output +14425,10739388,"TERMINAL",0,0,"64",,terminal_output +14426,10740412,"TERMINAL",0,0,"75",,terminal_output +14427,10741436,"TERMINAL",0,0,"86",,terminal_output +14428,10742562,"TERMINAL",0,0,"97",,terminal_output +14429,10743587,"TERMINAL",0,0,"408",,terminal_output +14430,10744611,"TERMINAL",0,0,"19",,terminal_output +14431,10745636,"TERMINAL",0,0,"28:00",,terminal_output +14432,10746658,"TERMINAL",0,0,"31",,terminal_output +14433,10747785,"TERMINAL",0,0,"42",,terminal_output +14434,10748809,"TERMINAL",0,0,"53",,terminal_output +14435,10749833,"TERMINAL",0,0,"64",,terminal_output +14436,10750857,"TERMINAL",0,0,"75",,terminal_output +14437,10751880,"TERMINAL",0,0,"86",,terminal_output +14438,10752913,"TERMINAL",0,0,"97",,terminal_output +14439,10753930,"TERMINAL",0,0,"508",,terminal_output +14440,10754954,"TERMINAL",0,0,"19",,terminal_output +14441,10755995,"TERMINAL",0,0,"210",,terminal_output +14442,10757049,"TERMINAL",0,0,"31",,terminal_output +14443,10758096,"TERMINAL",0,0,"42",,terminal_output +14444,10758628,"genie.py",0,0,"",python,tab +14445,10758629,"genie.py",9200,0,"",python,selection_mouse +14446,10759046,"genie.py",9262,0,"",python,selection_mouse +14447,10759163,"TERMINAL",0,0,"54",,terminal_output +14448,10760198,"TERMINAL",0,0,"75",,terminal_output +14449,10760488,"genie.py",9262,0,"u",python,content +14450,10760489,"genie.py",9263,0,"",python,selection_keyboard +14451,10761143,"genie.py",9263,0,"u",python,content +14452,10761144,"genie.py",9264,0,"",python,selection_keyboard +14453,10761263,"TERMINAL",0,0,"86",,terminal_output +14454,10762121,"genie.py",9263,1,"",python,content +14455,10762226,"genie.py",9262,1,"",python,content +14456,10762305,"TERMINAL",0,0,"97",,terminal_output +14457,10762759,"genie.py",9261,0,"",python,selection_command +14458,10762974,"genie.py",9265,1,"",python,content +14459,10763013,"genie.py",9239,9,"",python,content +14460,10763026,"genie.py",9221,0,"",python,selection_command +14461,10763367,"TERMINAL",0,0,"5:008",,terminal_output +14462,10764478,"genie.py",9258,0,"\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))",python,content +14463,10764479,"TERMINAL",0,0,"19",,terminal_output +14464,10764526,"genie.py",9267,0,"",python,selection_command +14465,10764836,"genie.py",9268,0,"",python,selection_command +14466,10765311,"genie.py",9269,0,"",python,selection_command +14467,10765365,"genie.py",9270,0,"",python,selection_command +14468,10765408,"genie.py",9271,0,"",python,selection_command +14469,10765578,"genie.py",9272,0,"",python,selection_command +14470,10765579,"genie.py",9273,0,"",python,selection_command +14471,10765580,"genie.py",9274,0,"",python,selection_command +14472,10765581,"TERMINAL",0,0,"220",,terminal_output +14473,10765582,"genie.py",9275,0,"",python,selection_command +14474,10765765,"genie.py",9276,0,"",python,selection_command +14475,10766304,"genie.py",9276,0,"_",python,content +14476,10766304,"genie.py",9277,0,"",python,selection_keyboard +14477,10766482,"TERMINAL",0,0,"31",,terminal_output +14478,10766638,"genie.py",9277,0,"t",python,content +14479,10766639,"genie.py",9278,0,"",python,selection_keyboard +14480,10766702,"genie.py",9278,0,"m",python,content +14481,10766703,"genie.py",9279,0,"",python,selection_keyboard +14482,10766847,"genie.py",9279,0,"p",python,content +14483,10766848,"genie.py",9280,0,"",python,selection_keyboard +14484,10767137,"genie.py",9279,0,"",python,selection_command +14485,10767299,"genie.py",9280,0,"",python,selection_command +14486,10767524,"TERMINAL",0,0,"42",,terminal_output +14487,10767939,"genie.py",9281,0,"",python,selection_command +14488,10768055,"genie.py",9282,0,"",python,selection_command +14489,10768576,"TERMINAL",0,0,"53",,terminal_output +14490,10769038,"genie.py",9283,0,"",python,selection_command +14491,10769369,"genie.py",9283,0,"j",python,content +14492,10769370,"genie.py",9284,0,"",python,selection_keyboard +14493,10769458,"genie.py",9284,0,"a",python,content +14494,10769459,"genie.py",9285,0,"",python,selection_keyboard +14495,10769594,"genie.py",9285,0,"x",python,content +14496,10769595,"genie.py",9286,0,"",python,selection_keyboard +14497,10769645,"TERMINAL",0,0,"63316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)4",,terminal_output +14498,10769693,"genie.py",9286,0,".",python,content +14499,10769694,"genie.py",9287,0,"",python,selection_keyboard +14500,10770236,"genie.py",9287,0,"v",python,content +14501,10770237,"genie.py",9288,0,"",python,selection_keyboard +14502,10770357,"genie.py",9288,0,"m",python,content +14503,10770357,"genie.py",9289,0,"",python,selection_keyboard +14504,10770732,"genie.py",9289,0,"p",python,content +14505,10770733,"genie.py",9290,0,"",python,selection_keyboard +14506,10770786,"TERMINAL",0,0,"75",,terminal_output +14507,10771059,"genie.py",9289,1,"",python,content +14508,10771180,"genie.py",9289,0,"a",python,content +14509,10771181,"genie.py",9290,0,"",python,selection_keyboard +14510,10771277,"genie.py",9290,0,"p",python,content +14511,10771278,"genie.py",9291,0,"",python,selection_keyboard +14512,10771635,"genie.py",9290,0,"",python,selection_command +14513,10771714,"TERMINAL",0,0,"86",,terminal_output +14514,10772779,"TERMINAL",0,0,"97",,terminal_output +14515,10772975,"TERMINAL",0,0,"^DERROR:2025-07-03 19:15:09,796:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nE0703 19:15:09.799778 4006432 pjrt_stream_executor_client.cc:2917] Execution of replica 0 failed: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\njaxlib._jax.XlaRuntimeError: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\n",,terminal_output +14516,10773918,"genie.py",0,0,"",python,tab +14517,10773919,"genie.py",9291,0,"",python,selection_mouse +14518,10774022,"TERMINAL",0,0,"108",,terminal_output +14519,10774354,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +14520,10774899,"TERMINAL",0,0,"19",,terminal_output +14521,10775865,"TERMINAL",0,0,"230",,terminal_output +14522,10776007,"genie.py",9291,0,"(",python,content +14523,10776008,"genie.py",9292,0,"",python,selection_keyboard +14524,10776941,"genie.py",9329,0,"",python,selection_mouse +14525,10777003,"TERMINAL",0,0,"31",,terminal_output +14526,10777620,"genie.py",9329,0,")",python,content +14527,10777621,"genie.py",9330,0,"",python,selection_keyboard +14528,10777979,"TERMINAL",0,0,"42",,terminal_output +14529,10779016,"TERMINAL",0,0,"53",,terminal_output +14530,10779904,"genie.py",9461,0,"",python,selection_mouse +14531,10780034,"TERMINAL",0,0,"64",,terminal_output +14532,10780995,"genie.py",9460,0,"",python,selection_command +14533,10781086,"TERMINAL",0,0,"75",,terminal_output +14534,10781317,"genie.py",9460,0,"_",python,content +14535,10781317,"genie.py",9461,0,"",python,selection_keyboard +14536,10781703,"genie.py",9461,0,"t",python,content +14537,10781703,"genie.py",9462,0,"",python,selection_keyboard +14538,10781771,"genie.py",9462,0,"m",python,content +14539,10781771,"genie.py",9463,0,"",python,selection_keyboard +14540,10782013,"genie.py",9463,0,"p",python,content +14541,10782013,"genie.py",9464,0,"",python,selection_keyboard +14542,10782187,"TERMINAL",0,0,"87",,terminal_output +14543,10783347,"TERMINAL",0,0,"208",,terminal_output +14544,10783696,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +14545,10784230,"TERMINAL",0,0,"19",,terminal_output +14546,10784359,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +14547,10784495,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +14548,10785354,"TERMINAL",0,0,"240",,terminal_output +14549,10786343,"TERMINAL",0,0,"31",,terminal_output +14550,10787413,"TERMINAL",0,0,"42",,terminal_output +14551,10787423,"TERMINAL",0,0,"2025-07-03 19:15:24.296054: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14552,10787822,"genie.py",0,0,"",python,tab +14553,10787835,"genie.py",8698,0,"",python,selection_mouse +14554,10787934,"genie.py",8697,0,"",python,selection_command +14555,10788432,"TERMINAL",0,0,"53",,terminal_output +14556,10788927,"genie.py",8675,31,"",python,content +14557,10789441,"TERMINAL",0,0,"64",,terminal_output +14558,10790526,"TERMINAL",0,0,"75",,terminal_output +14559,10791612,"TERMINAL",0,0,"86",,terminal_output +14560,10792554,"TERMINAL",0,0,"97",,terminal_output +14561,10793592,"TERMINAL",0,0,"308",,terminal_output +14562,10794644,"TERMINAL",0,0,"19",,terminal_output +14563,10795708,"TERMINAL",0,0,"250",,terminal_output +14564,10796741,"TERMINAL",0,0,"31",,terminal_output +14565,10797875,"TERMINAL",0,0,"42",,terminal_output +14566,10798882,"TERMINAL",0,0,"53",,terminal_output +14567,10799889,"TERMINAL",0,0,"64",,terminal_output +14568,10800625,"TERMINAL",0,0,"2025-07-03 19:15:37.441980: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14569,10800940,"TERMINAL",0,0,"75",,terminal_output +14570,10802064,"TERMINAL",0,0,"86",,terminal_output +14571,10803036,"TERMINAL",0,0,"97",,terminal_output +14572,10804092,"TERMINAL",0,0,"408",,terminal_output +14573,10805232,"TERMINAL",0,0,"19:00",,terminal_output +14574,10806256,"TERMINAL",0,0,"31",,terminal_output +14575,10807208,"TERMINAL",0,0,"42",,terminal_output +14576,10808339,"TERMINAL",0,0,"53",,terminal_output +14577,10809301,"TERMINAL",0,0,"64",,terminal_output +14578,10810345,"TERMINAL",0,0,"75",,terminal_output +14579,10810968,"TERMINAL",0,0,"2025-07-03 19:15:47.848001: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14580,10811479,"TERMINAL",0,0,"86",,terminal_output +14581,10812502,"TERMINAL",0,0,"97",,terminal_output +14582,10813526,"TERMINAL",0,0,"508",,terminal_output +14583,10814562,"TERMINAL",0,0,"19",,terminal_output +14584,10815583,"TERMINAL",0,0,"210",,terminal_output +14585,10816701,"TERMINAL",0,0,"31",,terminal_output +14586,10817725,"TERMINAL",0,0,"42",,terminal_output +14587,10818851,"TERMINAL",0,0,"53",,terminal_output +14588,10819875,"TERMINAL",0,0,"64",,terminal_output +14589,10820899,"TERMINAL",0,0,"75",,terminal_output +14590,10821923,"TERMINAL",0,0,"86",,terminal_output +14591,10822991,"TERMINAL",0,0,"97",,terminal_output +14592,10824073,"TERMINAL",0,0,"6:008",,terminal_output +14593,10824073,"TERMINAL",0,0,"2025-07-03 19:16:00.945205: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14594,10825098,"TERMINAL",0,0,"19",,terminal_output +14595,10826141,"TERMINAL",0,0,"221",,terminal_output +14596,10826741,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +14597,10827256,"TERMINAL",0,0,"461722",,terminal_output +14598,10828284,"TERMINAL",0,0,"53",,terminal_output +14599,10829244,"TERMINAL",0,0,"64",,terminal_output +14600,10830293,"TERMINAL",0,0,"75",,terminal_output +14601,10831346,"TERMINAL",0,0,"86",,terminal_output +14602,10832471,"TERMINAL",0,0,"97",,terminal_output +14603,10833496,"TERMINAL",0,0,"108",,terminal_output +14604,10834512,"TERMINAL",0,0,"19",,terminal_output +14605,10835036,"TERMINAL",0,0,"2025-07-03 19:16:11.937268: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14606,10835645,"TERMINAL",0,0,"230",,terminal_output +14607,10836669,"TERMINAL",0,0,"31",,terminal_output +14608,10837694,"TERMINAL",0,0,"42",,terminal_output +14609,10838156,"TERMINAL",0,0,"2025-07-03 19:16:15.049581: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14610,10838717,"TERMINAL",0,0,"53",,terminal_output +14611,10839732,"TERMINAL",0,0,"64",,terminal_output +14612,10840868,"TERMINAL",0,0,"75",,terminal_output +14613,10841891,"TERMINAL",0,0,"86",,terminal_output +14614,10842859,"TERMINAL",0,0,"97",,terminal_output +14615,10843890,"TERMINAL",0,0,"208",,terminal_output +14616,10844657,"TERMINAL",0,0,"2025-07-03 19:16:21.510669: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14617,10844964,"TERMINAL",0,0,"19",,terminal_output +14618,10845982,"TERMINAL",0,0,"240",,terminal_output +14619,10846612,"TERMINAL",0,0,"2025-07-03 19:16:23.419100: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14620,10847208,"TERMINAL",0,0,"31",,terminal_output +14621,10847977,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +14622,10848086,"TERMINAL",0,0,"42",,terminal_output +14623,10849180,"TERMINAL",0,0,"54",,terminal_output +14624,10850187,"TERMINAL",0,0,"75",,terminal_output +14625,10851223,"TERMINAL",0,0,"86",,terminal_output +14626,10852263,"TERMINAL",0,0,"97",,terminal_output +14627,10853311,"TERMINAL",0,0,"308",,terminal_output +14628,10854357,"TERMINAL",0,0,"19",,terminal_output +14629,10855010,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nEntering jdb:\r\n(jdb) ",,terminal_output +14630,10855512,"TERMINAL",0,0,"250",,terminal_output +14631,10856461,"TERMINAL",0,0,"31",,terminal_output +14632,10857580,"TERMINAL",0,0,"42",,terminal_output +14633,10858103,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14634,10858166,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(270)\r\n )\r\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\r\n gather_fn_tmp = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\r\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\r\n final_token_probs_tmp = gather_fn_tmp(jax.nn.softmax(final_logits_tmp), sampled_token_idxs_tmp)\r\n jax.debug.breakpoint()\r\n-> final_token_probs += ~mask\r\n # Update masked tokens only\r\n jax.debug.print(""maskgit-sampled-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\r\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\r\n jax.debug.print(""maskgit-token_idxs[0,:,0]: {}"", token_idxs[0,:,0])\r\n \r\n(jdb) ",,terminal_output +14635,10858582,"TERMINAL",0,0,"53",,terminal_output +14636,10859650,"TERMINAL",0,0,"64",,terminal_output +14637,10860734,"TERMINAL",0,0,"75",,terminal_output +14638,10861758,"TERMINAL",0,0,"86",,terminal_output +14639,10862764,"TERMINAL",0,0,"97",,terminal_output +14640,10863805,"TERMINAL",0,0,"408",,terminal_output +14641,10864239,"TERMINAL",0,0,"[?25lf[?25h[?25li[?25h",,terminal_output +14642,10864376,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +14643,10864458,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +14644,10864564,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14645,10864828,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +14646,10864838,"TERMINAL",0,0,"19",,terminal_output +14647,10865070,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +14648,10865231,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +14649,10865571,"TERMINAL",0,0,"[?25lk\r[?25h",,terminal_output +14650,10865724,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +14651,10865811,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +14652,10865865,"TERMINAL",0,0,"21:00:00",,terminal_output +14653,10865874,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +14654,10866092,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +14655,10866454,"TERMINAL",0,0,"[?25ln\r[?25h",,terminal_output +14656,10866610,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +14657,10866732,"TERMINAL",0,0,"[?25ll\r[?25h",,terminal_output +14658,10866841,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +14659,10866953,"TERMINAL",0,0,"31",,terminal_output +14660,10867015,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +14661,10867078,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +14662,10867151,"TERMINAL",0,0,"salloc: Job 3315981 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3315981.interactive ON hkn0901 CANCELLED AT 2025-07-03T19:16:44 DUE TO TIME LIMIT ***\r\nTerminated\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +14663,10867992,"TERMINAL",0,0,"4CG2",,terminal_output +14664,10869027,"TERMINAL",0,0,"5\t",,terminal_output +14665,10870155,"TERMINAL",0,0,"6\t",,terminal_output +14666,10871092,"TERMINAL",0,0,"7\t",,terminal_output +14667,10871457,"genie.py",0,0,"",python,tab +14668,10871458,"genie.py",9415,0,"",python,selection_mouse +14669,10872140,"TERMINAL",0,0,"8\t",,terminal_output +14670,10873227,"TERMINAL",0,0,"50\t",,terminal_output +14671,10873791,"genie.py",9189,0,"",python,selection_mouse +14672,10874251,"TERMINAL",0,0,"1\t",,terminal_output +14673,10874377,"genie.py",9326,0,"",python,selection_mouse +14674,10875295,"TERMINAL",0,0,"2\t",,terminal_output +14675,10876362,"TERMINAL",0,0,"3\t",,terminal_output +14676,10876510,"genie.py",9387,0,"\n ",python,content +14677,10876747,"genie.py",9396,0,"p",python,content +14678,10876748,"genie.py",9397,0,"",python,selection_keyboard +14679,10877408,"TERMINAL",0,0,"4\t",,terminal_output +14680,10877509,"genie.py",9396,1,"",python,content +14681,10878461,"TERMINAL",0,0,"5\t",,terminal_output +14682,10879575,"TERMINAL",0,0,"6\t",,terminal_output +14683,10879664,"genie.py",9396,0,"p",python,content +14684,10879665,"genie.py",9397,0,"",python,selection_keyboard +14685,10880178,"genie.py",9397,0,"r",python,content +14686,10880180,"genie.py",9398,0,"",python,selection_keyboard +14687,10880580,"TERMINAL",0,0,"7\t",,terminal_output +14688,10880941,"genie.py",9397,1,"",python,content +14689,10881065,"genie.py",9396,1,"",python,content +14690,10881609,"TERMINAL",0,0,"8\t",,terminal_output +14691,10881810,"genie.py",9396,0,"j",python,content +14692,10881811,"genie.py",9397,0,"",python,selection_keyboard +14693,10881967,"genie.py",9397,0,"a",python,content +14694,10881968,"genie.py",9398,0,"",python,selection_keyboard +14695,10882141,"genie.py",9398,0,"x",python,content +14696,10882142,"genie.py",9399,0,"",python,selection_keyboard +14697,10882661,"TERMINAL",0,0,"9\t",,terminal_output +14698,10883312,"genie.py",9399,0,".",python,content +14699,10883313,"genie.py",9400,0,"",python,selection_keyboard +14700,10883706,"TERMINAL",0,0,"7:00\t",,terminal_output +14701,10884121,"genie.py",9400,0,"d",python,content +14702,10884134,"genie.py",9401,0,"",python,selection_keyboard +14703,10884324,"genie.py",9401,0,"e",python,content +14704,10884324,"genie.py",9402,0,"",python,selection_keyboard +14705,10884380,"genie.py",9402,0,"b",python,content +14706,10884380,"genie.py",9403,0,"",python,selection_keyboard +14707,10884504,"genie.py",9403,0,"u",python,content +14708,10884505,"genie.py",9404,0,"",python,selection_keyboard +14709,10884647,"genie.py",9404,0,"g",python,content +14710,10884648,"genie.py",9405,0,"",python,selection_keyboard +14711,10884757,"TERMINAL",0,0,"1\t",,terminal_output +14712,10884813,"genie.py",9405,0,".",python,content +14713,10884814,"genie.py",9406,0,"",python,selection_keyboard +14714,10885160,"genie.py",9406,0,"p",python,content +14715,10885161,"genie.py",9407,0,"",python,selection_keyboard +14716,10885340,"genie.py",9407,0,"r",python,content +14717,10885341,"genie.py",9408,0,"",python,selection_keyboard +14718,10885429,"genie.py",9408,0,"i",python,content +14719,10885430,"genie.py",9409,0,"",python,selection_keyboard +14720,10885513,"genie.py",9409,0,"n",python,content +14721,10885514,"genie.py",9410,0,"",python,selection_keyboard +14722,10885597,"genie.py",9410,0,"t",python,content +14723,10885598,"genie.py",9411,0,"",python,selection_keyboard +14724,10885817,"TERMINAL",0,0,"2\t",,terminal_output +14725,10886453,"genie.py",9411,0,"()",python,content +14726,10886454,"genie.py",9412,0,"",python,selection_keyboard +14727,10886821,"TERMINAL",0,0,"3\t",,terminal_output +14728,10887875,"TERMINAL",0,0,"4\t",,terminal_output +14729,10889001,"TERMINAL",0,0,"5\t",,terminal_output +14730,10889655,"genie.py",9636,0,"",python,selection_mouse +14731,10889841,"genie.py",9632,5,"debug",python,selection_mouse +14732,10890031,"TERMINAL",0,0,"6\t",,terminal_output +14733,10890925,"genie.py",9405,0,"",python,selection_mouse +14734,10891036,"genie.py",9400,5,"debug",python,selection_mouse +14735,10891049,"TERMINAL",0,0,"7\t",,terminal_output +14736,10891184,"genie.py",9388,26," jax.debug.print()\n",python,selection_mouse +14737,10891504,"genie.py",9388,26,"",python,content +14738,10892169,"TERMINAL",0,0,"8\t",,terminal_output +14739,10892921,"genie.py",9300,0,"",python,selection_command +14740,10893092,"genie.py",9228,0,"",python,selection_command +14741,10893116,"TERMINAL",0,0,"9\t",,terminal_output +14742,10894170,"genie.py",9300,0,"",python,selection_command +14743,10894231,"TERMINAL",0,0,"11\t",,terminal_output +14744,10895245,"TERMINAL",0,0,"2\t",,terminal_output +14745,10895914,"genie.py",9300,1," ",python,selection_command +14746,10896251,"TERMINAL",0,0,"3\t",,terminal_output +14747,10896489,"genie.py",9300,1," ",python,selection_command +14748,10896895,"TERMINAL",0,0,"srun: error: hkn0901: task 0: Killed\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;137]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +14749,10897402,"TERMINAL",0,0,"4\t",,terminal_output +14750,10897456,"genie.py",9388,0,"",python,selection_command +14751,10897771,"genie.py",9300,0,"",python,selection_command +14752,10898355,"TERMINAL",0,0,"5\t",,terminal_output +14753,10898772,"genie.py",9300,87," final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)",python,selection_command +14754,10899433,"TERMINAL",0,0,"6\t",,terminal_output +14755,10900431,"TERMINAL",0,0,"7\t",,terminal_output +14756,10900627,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +14757,10900680,"TERMINAL",0,0,"]633;E;2025-07-03 19:17:17 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;dd830a57-5de1-42ee-9f5b-a467117add9f]633;Csalloc: Pending job allocation 3316038\r\nsalloc: job 3316038 queued and waiting for resources\r\n",,terminal_output +14758,10901489,"TERMINAL",0,0,"83316038 dev_accel interact tum_cte0 PD\t0:00\t 1 (Resources)",,terminal_output +14759,10902523,"TERMINAL",0,0,"9\t",,terminal_output +14760,10902835,"TERMINAL",0,0,"watch",,terminal_focus +14761,10903363,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0",,terminal_output +14762,10905510,"TERMINAL",0,0,"idling",,terminal_command +14763,10905564,"TERMINAL",0,0,"]633;E;2025-07-03 19:17:22 idling;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Thu Jul 3 19:17:22 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 67 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +14764,10906608,"TERMINAL",0,0,"3\t",,terminal_output +14765,10907373,"TERMINAL",0,0,"salloc",,terminal_focus +14766,10907737,"TERMINAL",0,0,"4\t",,terminal_output +14767,10908692,"TERMINAL",0,0,"5\t",,terminal_output +14768,10909785,"TERMINAL",0,0,"6\t",,terminal_output +14769,10910218,"TERMINAL",0,0,"salloc: job 3316038 has been allocated resources\r\nsalloc: Granted job allocation 3316038\r\nsalloc: Waiting for resource configuration\r\n",,terminal_output +14770,10910808,"TERMINAL",0,0,"70",,terminal_output +14771,10911833,"TERMINAL",0,0,"8\t",,terminal_output +14772,10912022,"genie.py",0,0,"",python,tab +14773,10912496,"genie.py",9408,0,"",python,selection_mouse +14774,10912831,"TERMINAL",0,0,"9\t",,terminal_output +14775,10913312,"genie.py",9520,0,"",python,selection_mouse +14776,10913981,"TERMINAL",0,0,"30\t",,terminal_output +14777,10914259,"genie.py",9514,0,"",python,selection_mouse +14778,10914397,"genie.py",9510,10,"breakpoint",python,selection_mouse +14779,10914946,"genie.py",9503,0,"",python,selection_mouse +14780,10915035,"TERMINAL",0,0,"1\t",,terminal_output +14781,10915438,"genie.py",9408,0,"",python,selection_mouse +14782,10915955,"TERMINAL",0,0,"2\t",,terminal_output +14783,10915974,"genie.py",9518,0,"",python,selection_mouse +14784,10916976,"TERMINAL",0,0,"3\t",,terminal_output +14785,10918024,"TERMINAL",0,0,"4\t",,terminal_output +14786,10918617,"genie.py",9522,0,"",python,selection_mouse +14787,10918643,"genie.py",9521,0,"",python,selection_command +14788,10919061,"TERMINAL",0,0,"5\t",,terminal_output +14789,10919877,"genie.py",9622,0,"",python,selection_mouse +14790,10920112,"TERMINAL",0,0,"6\t",,terminal_output +14791,10921125,"genie.py",9321,0,"",python,selection_mouse +14792,10921176,"TERMINAL",0,0,"8\t",,terminal_output +14793,10922209,"TERMINAL",0,0,"9\t",,terminal_output +14794,10922473,"genie.py",9300,87," final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)",python,selection_command +14795,10922660,"genie.py",9228,159," gather_fn_tmp = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)",python,selection_command +14796,10923394,"genie.py",9236,0,"",python,selection_command +14797,10923434,"TERMINAL",0,0,"40\t",,terminal_output +14798,10924279,"TERMINAL",0,0,"1\t",,terminal_output +14799,10925302,"TERMINAL",0,0,"2\t",,terminal_output +14800,10926430,"TERMINAL",0,0,"3\t",,terminal_output +14801,10927390,"TERMINAL",0,0,"4\t",,terminal_output +14802,10928210,"genie.py",9249,0,"",python,selection_command +14803,10928431,"TERMINAL",0,0,"5\t",,terminal_output +14804,10928972,"genie.py",9319,0,"",python,selection_mouse +14805,10929477,"TERMINAL",0,0,"6\t",,terminal_output +14806,10929638,"genie.py",9300,87," final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)",python,selection_command +14807,10929851,"genie.py",9300,191," final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs_tmp = gather_fn_tmp(jax.nn.softmax(final_logits_tmp), sampled_token_idxs_tmp)",python,selection_command +14808,10930571,"TERMINAL",0,0,"7\t",,terminal_output +14809,10930992,"genie.py",9308,0,"",python,selection_command +14810,10931593,"TERMINAL",0,0,"8\t",,terminal_output +14811,10932725,"TERMINAL",0,0,"9\t",,terminal_output +14812,10933615,"TERMINAL",0,0,"50\t",,terminal_output +14813,10934696,"TERMINAL",0,0,"1\t",,terminal_output +14814,10935662,"TERMINAL",0,0,"2\t",,terminal_output +14815,10936675,"TERMINAL",0,0,"3\t",,terminal_output +14816,10937701,"genie.py",9300,0,"",python,selection_command +14817,10937748,"TERMINAL",0,0,"4\t",,terminal_output +14818,10938113,"TERMINAL",0,0,"salloc: Prolog hung on node hkn0901\r\n",,terminal_output +14819,10938752,"TERMINAL",0,0,"5\t",,terminal_output +14820,10939807,"TERMINAL",0,0,"6\t",,terminal_output +14821,10940564,"genie.py",9492,0," jax.debug.print(""final_token_probs shape: {}"", final_token_probs.shape)\n",python,content +14822,10940705,"genie.py",9572,0," jax.debug.print(""final_token_probs_tmp shape: {}"", final_token_probs_tmp.shape)\n",python,content +14823,10940846,"TERMINAL",0,0,"7\t",,terminal_output +14824,10941911,"TERMINAL",0,0,"8\t",,terminal_output +14825,10943101,"TERMINAL",0,0,"9\t",,terminal_output +14826,10943990,"TERMINAL",0,0,"8:00\t",,terminal_output +14827,10944610,"genie.py",8783,0,"",python,selection_mouse +14828,10945009,"TERMINAL",0,0,"1\t",,terminal_output +14829,10945425,"genie.py",8759,66," sampled_token_idxs = jnp.argmax(final_logits, axis=-1)",python,selection_command +14830,10945669,"genie.py",8759,141," sampled_token_idxs = jnp.argmax(final_logits, axis=-1) sampled_token_idxs_tmp = jnp.argmax(final_logits_tmp, axis=-1)",python,content +14831,10945716,"genie.py",8825,0,"",python,selection_command +14832,10946057,"TERMINAL",0,0,"2\t",,terminal_output +14833,10946803,"genie.py",8825,0,"\n ",python,content +14834,10946897,"genie.py",8824,0,"",python,selection_command +14835,10948447,"genie.py",8759,66," sampled_token_idxs = jnp.argmax(final_logits, axis=-1)",python,selection_command +14836,10948634,"TERMINAL",0,0,"36",,terminal_output +14837,10948701,"genie.py",8759,141," sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n sampled_token_idxs_tmp = jnp.argmax(final_logits_tmp, axis=-1)",python,selection_command +14838,10948788,"TERMINAL",0,0,"salloc: Nodes hkn0901 are ready for job\r\n",,terminal_output +14839,10949636,"genie.py",8771,0,"",python,selection_command +14840,10949659,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h[tum_cte0515@hkn0901 jafar]$ ",,terminal_output +14841,10949733,"TERMINAL",0,0,"6\t",,terminal_output +14842,10950745,"TERMINAL",0,0,"7\t",,terminal_output +14843,10950824,"genie.py",8759,0,"",python,selection_command +14844,10951689,"genie.py",8550,0,"",python,selection_mouse +14845,10951791,"TERMINAL",0,0,"8\t",,terminal_output +14846,10952781,"genie.py",8901,0," jax.debug.print(""sampled_token_idxs shape: {}"", sampled_token_idxs.shape)\n",python,content +14847,10952971,"genie.py",8987,0," jax.debug.print(""sampled_token_idxs_tmp shape: {}"", sampled_token_idxs_tmp.shape)\n",python,content +14848,10952995,"genie.py",8526,75," final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp",python,selection_command +14849,10952996,"TERMINAL",0,0,"9\t",,terminal_output +14850,10953079,"genie.py",8526,148," final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n final_logits_tmp = self.dynamics.dynamics(vid_embed) / step_temp",python,selection_command +14851,10953820,"genie.py",8534,0,"",python,selection_command +14852,10953955,"TERMINAL",0,0,"10\t",,terminal_output +14853,10954800,"genie.py",8526,0,"",python,selection_command +14854,10954887,"TERMINAL",0,0,"1\t",,terminal_output +14855,10955920,"TERMINAL",0,0,"2\t",,terminal_output +14856,10956816,"genie.py",8675,0," jax.debug.print(""final_logits shape: {}"", final_logits.shape)\n",python,content +14857,10956980,"genie.py",8745,0," jax.debug.print(""final_logits_tmp shape: {}"", final_logits_tmp.shape)\n",python,content +14858,10957114,"TERMINAL",0,0,"3\t",,terminal_output +14859,10958019,"TERMINAL",0,0,"4\t",,terminal_output +14860,10959147,"TERMINAL",0,0,"5\t",,terminal_output +14861,10960098,"TERMINAL",0,0,"6\t",,terminal_output +14862,10960163,"TERMINAL",0,0,"python",,terminal_output +14863,10960598,"TERMINAL",0,0,"",,terminal_output +14864,10961166,"TERMINAL",0,0,"8\t",,terminal_output +14865,10961219,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +14866,10961377,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14867,10961941,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +14868,10962041,"TERMINAL",0,0,"[?25lo[?25h[?25lu[?25h",,terminal_output +14869,10962158,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +14870,10962229,"TERMINAL",0,0,"9\t",,terminal_output +14871,10962326,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +14872,10962512,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +14873,10962660,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +14874,10962771,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +14875,10963136,"TERMINAL",0,0,"env/",,terminal_output +14876,10963240,"TERMINAL",0,0,"20\t",,terminal_output +14877,10963486,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +14878,10963852,"TERMINAL",0,0,"in/",,terminal_output +14879,10964257,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +14880,10964315,"TERMINAL",0,0,"1\t",,terminal_output +14881,10964722,"TERMINAL",0,0,"[?25la[?25h[?25lc[?25h",,terminal_output +14882,10964944,"TERMINAL",0,0,"tivate",,terminal_output +14883,10965387,"TERMINAL",0,0,"2\t",,terminal_output +14884,10965620,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +14885,10965949,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +14886,10966116,"TERMINAL",0,0,"python",,terminal_output +14887,10966415,"TERMINAL",0,0,"3\t",,terminal_output +14888,10966471,"TERMINAL",0,0,"import numpy as np",,terminal_output +14889,10967022,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +14890,10967200,"TERMINAL",0,0,"python",,terminal_output +14891,10967359,"TERMINAL",0,0,"queue",,terminal_output +14892,10967422,"TERMINAL",0,0,"4\t",,terminal_output +14893,10967851,"TERMINAL",0,0,"scancel 3313564",,terminal_output +14894,10967997,"TERMINAL",0,0,"queue",,terminal_output +14895,10968313,"TERMINAL",0,0,"idling",,terminal_output +14896,10968416,"TERMINAL",0,0,"queue",,terminal_output +14897,10968478,"TERMINAL",0,0,"5\t",,terminal_output +14898,10968568,"TERMINAL",0,0,"idling",,terminal_output +14899,10968954,"TERMINAL",0,0,"salloc --time=03:30:00 --partition=accelerated --nodes=2 --ntasks-per-node=5 --gres=gpu:4 --cpus-per-task=10 ",,terminal_output +14900,10969136,"TERMINAL",0,0,"\r/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_output +14901,10969499,"TERMINAL",0,0,"6\t",,terminal_output +14902,10969831,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5\r\n\r\r\n\r",,terminal_output +14903,10970444,"TERMINAL",0,0,"\r3",,terminal_output +14904,10970547,"TERMINAL",0,0,"7\t",,terminal_output +14905,10970611,"TERMINAL",0,0,"\r",,terminal_output +14906,10971085,"TERMINAL",0,0,"\rh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +14907,10971619,"TERMINAL",0,0,"8\t",,terminal_output +14908,10972252,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +14909,10972399,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +14910,10972642,"TERMINAL",0,0,"9\t",,terminal_output +14911,10973681,"TERMINAL",0,0,"30\t",,terminal_output +14912,10974809,"TERMINAL",0,0,"1\t",,terminal_output +14913,10975320,"TERMINAL",0,0,"2025-07-03 19:18:32.190727: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14914,10975833,"TERMINAL",0,0,"2\t",,terminal_output +14915,10976809,"TERMINAL",0,0,"3\t",,terminal_output +14916,10977883,"TERMINAL",0,0,"4\t",,terminal_output +14917,10978905,"TERMINAL",0,0,"5\t",,terminal_output +14918,10979932,"TERMINAL",0,0,"6\t",,terminal_output +14919,10981055,"TERMINAL",0,0,"7\t",,terminal_output +14920,10982079,"TERMINAL",0,0,"8\t",,terminal_output +14921,10983041,"TERMINAL",0,0,"9\t",,terminal_output +14922,10984128,"TERMINAL",0,0,"40\t",,terminal_output +14923,10985151,"TERMINAL",0,0,"1\t",,terminal_output +14924,10986278,"TERMINAL",0,0,"3\t",,terminal_output +14925,10987273,"TERMINAL",0,0,"4\t",,terminal_output +14926,10988223,"TERMINAL",0,0,"2025-07-03 19:18:45.058221: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14927,10988330,"TERMINAL",0,0,"5\t",,terminal_output +14928,10989452,"TERMINAL",0,0,"6\t",,terminal_output +14929,10990476,"TERMINAL",0,0,"7\t",,terminal_output +14930,10991499,"TERMINAL",0,0,"8\t",,terminal_output +14931,10992524,"TERMINAL",0,0,"9\t",,terminal_output +14932,10993551,"TERMINAL",0,0,"50\t",,terminal_output +14933,10994573,"TERMINAL",0,0,"1\t",,terminal_output +14934,10995603,"TERMINAL",0,0,"2\t",,terminal_output +14935,10996635,"TERMINAL",0,0,"3\t",,terminal_output +14936,10997683,"TERMINAL",0,0,"4\t",,terminal_output +14937,10998122,"TERMINAL",0,0,"2025-07-03 19:18:55.023332: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14938,10998771,"TERMINAL",0,0,"5\t",,terminal_output +14939,10999795,"TERMINAL",0,0,"6\t",,terminal_output +14940,11000818,"TERMINAL",0,0,"7\t",,terminal_output +14941,11001856,"TERMINAL",0,0,"8\t",,terminal_output +14942,11002876,"TERMINAL",0,0,"9\t",,terminal_output +14943,11003993,"TERMINAL",0,0,"9:00\t",,terminal_output +14944,11005017,"TERMINAL",0,0,"1\t",,terminal_output +14945,11005978,"TERMINAL",0,0,"2\t",,terminal_output +14946,11007065,"TERMINAL",0,0,"3\t",,terminal_output +14947,11008069,"TERMINAL",0,0,"4\t",,terminal_output +14948,11009114,"TERMINAL",0,0,"5\t",,terminal_output +14949,11010258,"TERMINAL",0,0,"7\t",,terminal_output +14950,11011264,"TERMINAL",0,0,"8\t",,terminal_output +14951,11011274,"TERMINAL",0,0,"2025-07-03 19:19:08.103906: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14952,11012288,"TERMINAL",0,0,"9\t",,terminal_output +14953,11013312,"TERMINAL",0,0,"10\t",,terminal_output +14954,11014029,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +14955,11014368,"TERMINAL",0,0,"1\t",,terminal_output +14956,11015360,"TERMINAL",0,0,"2\t",,terminal_output +14957,11016384,"TERMINAL",0,0,"3\t",,terminal_output +14958,11017408,"TERMINAL",0,0,"4\t",,terminal_output +14959,11018406,"TERMINAL",0,0,"5\t",,terminal_output +14960,11019566,"TERMINAL",0,0,"6\t",,terminal_output +14961,11020587,"TERMINAL",0,0,"7\t",,terminal_output +14962,11021614,"TERMINAL",0,0,"8\t",,terminal_output +14963,11022156,"TERMINAL",0,0,"2025-07-03 19:19:19.058935: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14964,11022631,"TERMINAL",0,0,"9\t",,terminal_output +14965,11023623,"TERMINAL",0,0,"20\t",,terminal_output +14966,11024664,"TERMINAL",0,0,"17",,terminal_output +14967,11025293,"TERMINAL",0,0,"2025-07-03 19:19:22.094907: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14968,11025702,"TERMINAL",0,0,"2\t",,terminal_output +14969,11026747,"TERMINAL",0,0,"3\t",,terminal_output +14970,11027799,"TERMINAL",0,0,"4\t",,terminal_output +14971,11028878,"TERMINAL",0,0,"5\t",,terminal_output +14972,11029901,"TERMINAL",0,0,"6\t",,terminal_output +14973,11031027,"TERMINAL",0,0,"76",,terminal_output +14974,11031847,"TERMINAL",0,0,"2025-07-03 19:19:28.728005: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14975,11031972,"TERMINAL",0,0,"8\t",,terminal_output +14976,11033019,"TERMINAL",0,0,"9\t",,terminal_output +14977,11033712,"TERMINAL",0,0,"2025-07-03 19:19:30.613538: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14978,11034099,"TERMINAL",0,0,"30\t",,terminal_output +14979,11035123,"TERMINAL",0,0,"1\t",,terminal_output +14980,11035124,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +14981,11036161,"TERMINAL",0,0,"2\t",,terminal_output +14982,11036583,"TERMINAL",0,0,"final_logits shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True), Array(1024, dtype=int32, weak_type=True))\r\nfinal_logits_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True), Array(1024, dtype=int32, weak_type=True))\r\nsampled_token_idxs shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\nsampled_token_idxs_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\n",,terminal_output +14983,11036689,"TERMINAL",0,0,"final_token_probs shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\nfinal_token_probs_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\n",,terminal_output +14984,11037274,"TERMINAL",0,0,"4\t",,terminal_output +14985,11038239,"TERMINAL",0,0,"5\t",,terminal_output +14986,11039268,"TERMINAL",0,0,"6\t",,terminal_output +14987,11040376,"TERMINAL",0,0,"7\t",,terminal_output +14988,11041370,"TERMINAL",0,0,"8\t",,terminal_output +14989,11042304,"TERMINAL",0,0,"final_token_probs shape: (Array(1, dtype=int32), Array(920, dtype=int32))\r\nsampled_token_idxs_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32))\r\nsampled_token_idxs shape: (Array(1, dtype=int32), Array(920, dtype=int32))\r\nfinal_logits_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32), Array(1024, dtype=int32))\r\nfinal_logits shape: (Array(1, dtype=int32), Array(920, dtype=int32), Array(1024, dtype=int32))\r\nfinal_token_probs_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32))\r\nmaskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nEntering jdb:\r\n(jdb) ",,terminal_output +14990,11042409,"TERMINAL",0,0,"9\t",,terminal_output +14991,11043459,"TERMINAL",0,0,"40\t",,terminal_output +14992,11044494,"TERMINAL",0,0,"1\t",,terminal_output +14993,11045577,"TERMINAL",0,0,"2\t",,terminal_output +14994,11046591,"TERMINAL",0,0,"3\t",,terminal_output +14995,11047616,"TERMINAL",0,0,"4\t",,terminal_output +14996,11048634,"TERMINAL",0,0,"5\t",,terminal_output +14997,11049676,"TERMINAL",0,0,"6\t",,terminal_output +14998,11050718,"TERMINAL",0,0,"7\t",,terminal_output +14999,11051814,"TERMINAL",0,0,"8\t",,terminal_output +15000,11052838,"TERMINAL",0,0,"9\t",,terminal_output +15001,11053863,"TERMINAL",0,0,"50\t",,terminal_output +15002,11054990,"TERMINAL",0,0,"1\t",,terminal_output +15003,11056012,"TERMINAL",0,0,"2\t",,terminal_output +15004,11057202,"genie.py",0,0,"",python,tab +15005,11057204,"genie.py",9739,0,"",python,selection_mouse +15006,11057204,"genie.py",9724,21,"final_token_probs_tmp",python,selection_mouse +15007,11057386,"TERMINAL",0,0,"3\t",,terminal_output +15008,11057885,"genie.py",9645,0,"",python,selection_mouse +15009,11058121,"genie.py",9636,17,"final_token_probs",python,selection_mouse +15010,11058122,"TERMINAL",0,0,"4\t",,terminal_output +15011,11059084,"TERMINAL",0,0,"5\t",,terminal_output +15012,11060112,"TERMINAL",0,0,"6\t",,terminal_output +15013,11061166,"TERMINAL",0,0,"8\t",,terminal_output +15014,11061680,"TERMINAL",0,0,"final_token_probs",,terminal_output +15015,11062055,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +15016,11062119,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +15017,11062219,"TERMINAL",0,0,"9\t",,terminal_output +15018,11062282,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +15019,11062440,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +15020,11062502,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +15021,11062630,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +15022,11062756,"TERMINAL",0,0,"\r\n(1, 920)\r\n(jdb) ",,terminal_output +15023,11063225,"TERMINAL",0,0,"20:00\t",,terminal_output +15024,11063320,"TERMINAL",0,0,"\rfinal_token_probs.shape",,terminal_output +15025,11064308,"TERMINAL",0,0,"1\t",,terminal_output +15026,11064756,"TERMINAL",0,0,"\r",,terminal_output +15027,11065312,"TERMINAL",0,0,"27",,terminal_output +15028,11066341,"TERMINAL",0,0,"3\t",,terminal_output +15029,11066931,"TERMINAL",0,0,"[?25l.\r[1@_.[?25h",,terminal_output +15030,11067346,"TERMINAL",0,0,"[?25ls.\r[1@t.\r[1@m.[?25h",,terminal_output +15031,11067373,"TERMINAL",0,0,"4\t",,terminal_output +15032,11067408,"TERMINAL",0,0,"[?25l.\r[1@p.[?25h",,terminal_output +15033,11067996,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +15034,11068407,"TERMINAL",0,0,"5\t",,terminal_output +15035,11069531,"TERMINAL",0,0,"66",,terminal_output +15036,11069690,"TERMINAL",0,0,"\rfinal_token_probs_tmp.shape",,terminal_output +15037,11070140,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +15038,11070271,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +15039,11070419,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +15040,11070527,"TERMINAL",0,0,"7\t",,terminal_output +15041,11070535,"TERMINAL",0,0,"[?25lh\r[?25h",,terminal_output +15042,11070650,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +15043,11071098,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +15044,11071536,"TERMINAL",0,0,"8\t",,terminal_output +15045,11071732,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +15046,11072609,"TERMINAL",0,0,"9\t",,terminal_output +15047,11073442,"TERMINAL",0,0,"[?25l+[?25h",,terminal_output +15048,11073615,"TERMINAL",0,0,"10\t",,terminal_output +15049,11073976,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +15050,11074204,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +15051,11074665,"TERMINAL",0,0,"~",,terminal_output +15052,11074685,"TERMINAL",0,0,"1\t",,terminal_output +15053,11075123,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +15054,11075184,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +15055,11075252,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +15056,11075314,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +15057,11075700,"TERMINAL",0,0,"2\t",,terminal_output +15058,11075805,"TERMINAL",0,0,"\r\n*** SyntaxError: invalid syntax\r\n(jdb) ",,terminal_output +15059,11076744,"TERMINAL",0,0,"3\t",,terminal_output +15060,11077824,"TERMINAL",0,0,"4\t",,terminal_output +15061,11078849,"TERMINAL",0,0,"5\t",,terminal_output +15062,11079868,"TERMINAL",0,0,"6\t",,terminal_output +15063,11081000,"TERMINAL",0,0,"7\t",,terminal_output +15064,11081354,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +15065,11081599,"TERMINAL",0,0,"\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +15066,11081835,"TERMINAL",0,0,"final_logits shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True), Array(1024, dtype=int32, weak_type=True))\r\nfinal_logits_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True), Array(1024, dtype=int32, weak_type=True))\r\nsampled_token_idxs shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\nsampled_token_idxs_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\nfinal_token_probs shape: (Array(1, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\nfinal_token_probs_tmp shape: (Array(1, dtype=int32, weak_type=True), Array(16, dtype=int32, weak_type=True), Array(920, dtype=int32, weak_type=True))\r\n",,terminal_output +15067,11081969,"TERMINAL",0,0,"8\t",,terminal_output +15068,11083017,"TERMINAL",0,0,"9\t",,terminal_output +15069,11084072,"TERMINAL",0,0,"20\t",,terminal_output +15070,11085106,"TERMINAL",0,0,"1\t",,terminal_output +15071,11085223,"TERMINAL",0,0,"final_token_probs shape: (Array(1, dtype=int32), Array(920, dtype=int32))\r\nsampled_token_idxs_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32))\r\nsampled_token_idxs shape: (Array(1, dtype=int32), Array(920, dtype=int32))\r\nfinal_logits_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32), Array(1024, dtype=int32))\r\nfinal_logits shape: (Array(1, dtype=int32), Array(920, dtype=int32), Array(1024, dtype=int32))\r\nfinal_token_probs_tmp shape: (Array(1, dtype=int32), Array(16, dtype=int32), Array(920, dtype=int32))\r\nmaskgit-sampled-token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nEntering jdb:\r\n(jdb) ",,terminal_output +15072,11086221,"TERMINAL",0,0,"3\t",,terminal_output +15073,11087411,"genie.py",0,0,"",python,tab +15074,11087412,"genie.py",10005,0,"",python,selection_mouse +15075,11087413,"TERMINAL",0,0,"47",,terminal_output +15076,11088269,"TERMINAL",0,0,"5\t",,terminal_output +15077,11089303,"TERMINAL",0,0,"6\t",,terminal_output +15078,11090321,"TERMINAL",0,0,"76",,terminal_output +15079,11091152,"genie.py",9988,31,"",python,content +15080,11091209,"genie.py",9996,0,"",python,selection_command +15081,11091351,"TERMINAL",0,0,"8\t",,terminal_output +15082,11091602,"genie.py",9908,0,"",python,selection_command +15083,11092096,"genie.py",9828,0,"",python,selection_command +15084,11092155,"genie.py",9724,0,"",python,selection_command +15085,11092189,"genie.py",9636,0,"",python,selection_command +15086,11092273,"genie.py",9564,0,"",python,selection_command +15087,11092274,"genie.py",9506,0,"",python,selection_command +15088,11092296,"genie.py",9492,0,"",python,selection_command +15089,11092297,"genie.py",9432,0,"",python,selection_command +15090,11092427,"genie.py",9381,0,"",python,selection_command +15091,11092428,"genie.py",9341,0,"",python,selection_command +15092,11092523,"genie.py",9297,0,"",python,selection_command +15093,11092524,"genie.py",9251,0,"",python,selection_command +15094,11092524,"genie.py",9237,0,"",python,selection_command +15095,11092614,"genie.py",9143,0,"",python,selection_command +15096,11092615,"genie.py",9057,0,"",python,selection_command +15097,11092615,"genie.py",8982,0,"",python,selection_command +15098,11092663,"genie.py",8915,0,"",python,selection_command +15099,11092664,"genie.py",8884,0,"",python,selection_command +15100,11092665,"genie.py",8832,0,"",python,selection_command +15101,11092679,"TERMINAL",0,0,"9\t",,terminal_output +15102,11092832,"genie.py",8823,0,"",python,selection_command +15103,11092956,"genie.py",8753,0,"",python,selection_command +15104,11093092,"genie.py",8683,0,"",python,selection_command +15105,11093228,"genie.py",8610,0,"",python,selection_command +15106,11093405,"TERMINAL",0,0,"30\t",,terminal_output +15107,11093957,"genie.py",8683,0,"",python,selection_command +15108,11094443,"TERMINAL",0,0,"1\t",,terminal_output +15109,11095540,"TERMINAL",0,0,"2\t",,terminal_output +15110,11095696,"genie.py",8675,69," jax.debug.print(""final_logits shape: {}"", final_logits.shape)",python,selection_command +15111,11095949,"genie.py",8675,147," jax.debug.print(""final_logits shape: {}"", final_logits.shape)\n jax.debug.print(""final_logits_tmp shape: {}"", final_logits_tmp.shape)",python,selection_command +15112,11096238,"genie.py",8683,0,"",python,selection_command +15113,11096564,"TERMINAL",0,0,"3\t",,terminal_output +15114,11097001,"genie.py",8753,0,"#",python,content +15115,11097002,"genie.py",8683,0,"#",python,content +15116,11097002,"genie.py",8684,0,"",python,selection_keyboard +15117,11097082,"genie.py",8755,0," ",python,content +15118,11097082,"genie.py",8684,0," ",python,content +15119,11097083,"genie.py",8685,0,"",python,selection_keyboard +15120,11097594,"genie.py",8684,0,"",python,selection_command +15121,11097611,"TERMINAL",0,0,"4\t",,terminal_output +15122,11097968,"genie.py",8756,0,"",python,selection_command +15123,11098084,"genie.py",8827,0,"",python,selection_command +15124,11098241,"genie.py",8837,0,"",python,selection_command +15125,11098381,"genie.py",8889,0,"",python,selection_command +15126,11098505,"genie.py",8920,0,"",python,selection_command +15127,11098637,"genie.py",8987,0,"",python,selection_command +15128,11098652,"TERMINAL",0,0,"57",,terminal_output +15129,11098791,"genie.py",9062,0,"",python,selection_command +15130,11099732,"genie.py",10017,0,"",python,selection_mouse +15131,11099743,"TERMINAL",0,0,"6\t",,terminal_output +15132,11099919,"genie.py",10017,1," ",python,selection_command +15133,11099919,"genie.py",10000,17,"final_token_probs",python,selection_mouse +15134,11100123,"genie.py",10000,18,"final_token_probs ",python,selection_command +15135,11100694,"genie.py",10000,18,"final_token_probs ",python,selection_command +15136,11100736,"TERMINAL",0,0,"7\t",,terminal_output +15137,11101016,"genie.py",10017,0,"",python,selection_command +15138,11101234,"genie.py",9929,0,"",python,selection_command +15139,11101786,"TERMINAL",0,0,"8\t",,terminal_output +15140,11101883,"genie.py",9904,87," jax.debug.print(""final_token_probs_tmp shape: {}"", final_token_probs_tmp.shape)",python,selection_command +15141,11102053,"genie.py",9824,167," jax.debug.print(""final_token_probs shape: {}"", final_token_probs.shape)\n jax.debug.print(""final_token_probs_tmp shape: {}"", final_token_probs_tmp.shape)",python,selection_command +15142,11102814,"TERMINAL",0,0,"9\t",,terminal_output +15143,11103840,"TERMINAL",0,0,"40\t",,terminal_output +15144,11104276,"genie.py",9832,0,"",python,selection_command +15145,11104870,"TERMINAL",0,0,"1\t",,terminal_output +15146,11105107,"genie.py",9912,0,"#",python,content +15147,11105108,"genie.py",9832,0,"#",python,content +15148,11105108,"genie.py",9833,0,"",python,selection_keyboard +15149,11105223,"genie.py",9914,0," ",python,content +15150,11105224,"genie.py",9833,0," ",python,content +15151,11105225,"genie.py",9834,0,"",python,selection_keyboard +15152,11105633,"genie.py",9833,0,"",python,selection_command +15153,11105728,"genie.py",9729,0,"",python,selection_command +15154,11105930,"TERMINAL",0,0,"2\t",,terminal_output +15155,11106239,"genie.py",9641,0,"",python,selection_command +15156,11106287,"genie.py",9569,0,"",python,selection_command +15157,11106299,"genie.py",9511,0,"",python,selection_command +15158,11106376,"genie.py",9497,0,"",python,selection_command +15159,11106377,"genie.py",9437,0,"",python,selection_command +15160,11106407,"genie.py",9386,0,"",python,selection_command +15161,11106461,"genie.py",9346,0,"",python,selection_command +15162,11106624,"genie.py",9302,0,"",python,selection_command +15163,11106752,"genie.py",9256,0,"",python,selection_command +15164,11106901,"genie.py",9242,0,"",python,selection_command +15165,11106962,"TERMINAL",0,0,"3\t",,terminal_output +15166,11107039,"genie.py",9148,0,"",python,selection_command +15167,11107164,"genie.py",9062,0,"",python,selection_command +15168,11107813,"genie.py",9053,85," jax.debug.print(""sampled_token_idxs shape: {}"", sampled_token_idxs.shape)",python,selection_command +15169,11107995,"genie.py",9053,179," jax.debug.print(""sampled_token_idxs shape: {}"", sampled_token_idxs.shape)\n jax.debug.print(""sampled_token_idxs_tmp shape: {}"", sampled_token_idxs_tmp.shape)",python,selection_command +15170,11108026,"TERMINAL",0,0,"4\t",,terminal_output +15171,11108465,"genie.py",9065,0,"",python,selection_command +15172,11109056,"TERMINAL",0,0,"5\t",,terminal_output +15173,11109134,"genie.py",9151,0,"#",python,content +15174,11109135,"genie.py",9065,0,"#",python,content +15175,11109135,"genie.py",9066,0,"",python,selection_keyboard +15176,11109216,"genie.py",9153,0," ",python,content +15177,11109216,"genie.py",9066,0," ",python,content +15178,11109216,"genie.py",9067,0,"",python,selection_keyboard +15179,11109513,"genie.py",9066,0,"",python,selection_command +15180,11109728,"genie.py",9154,0,"",python,selection_command +15181,11110098,"TERMINAL",0,0,"6\t",,terminal_output +15182,11110168,"genie.py",9249,0,"",python,selection_command +15183,11110346,"genie.py",9264,0,"",python,selection_command +15184,11110347,"genie.py",9310,0,"",python,selection_command +15185,11110347,"genie.py",9354,0,"",python,selection_command +15186,11110441,"genie.py",9394,0,"",python,selection_command +15187,11110441,"genie.py",9445,0,"",python,selection_command +15188,11110442,"genie.py",9504,0,"",python,selection_command +15189,11110572,"genie.py",9519,0,"",python,selection_command +15190,11110573,"genie.py",9577,0,"",python,selection_command +15191,11110573,"genie.py",9649,0,"",python,selection_command +15192,11110683,"genie.py",9737,0,"",python,selection_command +15193,11110684,"genie.py",9841,0,"",python,selection_command +15194,11110684,"genie.py",9923,0,"",python,selection_command +15195,11110684,"genie.py",10013,0,"",python,selection_command +15196,11110719,"genie.py",10048,0,"",python,selection_command +15197,11110720,"genie.py",10084,0,"",python,selection_command +15198,11110721,"genie.py",10168,0,"",python,selection_command +15199,11110799,"genie.py",10237,0,"",python,selection_command +15200,11110941,"genie.py",10300,0,"",python,selection_command +15201,11111132,"genie.py",10237,0,"",python,selection_command +15202,11111142,"TERMINAL",0,0,"7\t",,terminal_output +15203,11111285,"genie.py",10168,0,"",python,selection_command +15204,11111455,"genie.py",10084,0,"",python,selection_command +15205,11111834,"genie.py",10079,0,"",python,selection_command +15206,11112179,"TERMINAL",0,0,"9\t",,terminal_output +15207,11112547,"genie.py",10079,0,"#",python,content +15208,11112547,"genie.py",10080,0,"",python,selection_keyboard +15209,11112676,"genie.py",10080,0," ",python,content +15210,11112677,"genie.py",10081,0,"",python,selection_keyboard +15211,11113047,"genie.py",10080,0,"",python,selection_command +15212,11113199,"genie.py",10166,0,"",python,selection_command +15213,11113243,"TERMINAL",0,0,"50\t",,terminal_output +15214,11113322,"genie.py",10235,0,"",python,selection_command +15215,11113646,"genie.py",10234,0,"",python,selection_command +15216,11114002,"genie.py",10234,0," ",python,content +15217,11114003,"genie.py",10235,0,"",python,selection_keyboard +15218,11114286,"TERMINAL",0,0,"1\t",,terminal_output +15219,11114377,"genie.py",10234,1,"",python,content +15220,11114814,"genie.py",10234,0,"#",python,content +15221,11114815,"genie.py",10235,0,"",python,selection_keyboard +15222,11114925,"genie.py",10235,0," ",python,content +15223,11114926,"genie.py",10236,0,"",python,selection_keyboard +15224,11115124,"genie.py",10235,0,"",python,selection_command +15225,11115308,"TERMINAL",0,0,"2\t",,terminal_output +15226,11116171,"genie.py",10058,0,"",python,selection_mouse +15227,11116353,"TERMINAL",0,0,"3\t",,terminal_output +15228,11116756,"genie.py",10034,0,"",python,selection_mouse +15229,11116810,"genie.py",10033,0,"",python,selection_command +15230,11117253,"genie.py",10121,0,"",python,selection_mouse +15231,11117397,"TERMINAL",0,0,"4\t",,terminal_output +15232,11118173,"genie.py",9853,0,"",python,selection_mouse +15233,11118451,"TERMINAL",0,0,"5\t",,terminal_output +15234,11118660,"genie.py",9750,0,"",python,selection_mouse +15235,11119476,"TERMINAL",0,0,"6\t",,terminal_output +15236,11119753,"genie.py",9749,0,"",python,selection_mouse +15237,11120525,"TERMINAL",0,0,"7\t",,terminal_output +15238,11120685,"genie.py",9660,0,"",python,selection_mouse +15239,11121552,"TERMINAL",0,0,"8\t",,terminal_output +15240,11122269,"genie.py",9661,0,"",python,selection_command +15241,11122597,"TERMINAL",0,0,"9\t",,terminal_output +15242,11123700,"TERMINAL",0,0,"1:00\t",,terminal_output +15243,11124231,"genie.py",9661,0,"_",python,content +15244,11124232,"genie.py",9662,0,"",python,selection_keyboard +15245,11124481,"genie.py",9662,0,"o",python,content +15246,11124482,"genie.py",9663,0,"",python,selection_keyboard +15247,11124612,"genie.py",9663,0,"l",python,content +15248,11124613,"genie.py",9664,0,"",python,selection_keyboard +15249,11124690,"genie.py",9664,0,"d",python,content +15250,11124691,"genie.py",9665,0,"",python,selection_keyboard +15251,11124755,"TERMINAL",0,0,"1\t",,terminal_output +15252,11125014,"genie.py",9664,0,"",python,selection_command +15253,11125227,"genie.py",9756,0,"",python,selection_command +15254,11125541,"genie.py",9755,0,"",python,selection_command +15255,11125685,"genie.py",9754,0,"",python,selection_command +15256,11125747,"TERMINAL",0,0,"2\t",,terminal_output +15257,11125888,"genie.py",9753,0,"",python,selection_command +15258,11126772,"TERMINAL",0,0,"3\t",,terminal_output +15259,11127524,"genie.py",9753,5,"",python,content +15260,11127843,"TERMINAL",0,0,"4\t",,terminal_output +15261,11128853,"TERMINAL",0,0,"5\t",,terminal_output +15262,11129123,"genie.py",9745,0,"",python,selection_mouse +15263,11129272,"genie.py",9736,17,"final_token_probs",python,selection_mouse +15264,11129898,"TERMINAL",0,0,"6\t",,terminal_output +15265,11130587,"genie.py",10019,0,"",python,selection_mouse +15266,11130686,"genie.py",10007,17,"final_token_probs",python,selection_mouse +15267,11130940,"TERMINAL",0,0,"7\t",,terminal_output +15268,11131986,"TERMINAL",0,0,"8\t",,terminal_output +15269,11132319,"genie.py",8937,0,"",python,selection_mouse +15270,11133012,"TERMINAL",0,0,"9\t",,terminal_output +15271,11134145,"TERMINAL",0,0,"10\t",,terminal_output +15272,11134870,"genie.py",9754,0,"",python,selection_mouse +15273,11135169,"TERMINAL",0,0,"1\t",,terminal_output +15274,11135376,"genie.py",9659,0,"",python,selection_mouse +15275,11136193,"TERMINAL",0,0,"3\t",,terminal_output +15276,11137222,"TERMINAL",0,0,"4\t",,terminal_output +15277,11138396,"TERMINAL",0,0,"5\t",,terminal_output +15278,11139290,"TERMINAL",0,0,"6\t",,terminal_output +15279,11139463,"TERMINAL",0,0,"",,terminal_output +15280,11140362,"TERMINAL",0,0,"7\t",,terminal_output +15281,11141415,"TERMINAL",0,0,"8\t",,terminal_output +15282,11142438,"TERMINAL",0,0,"9\t",,terminal_output +15283,11143466,"TERMINAL",0,0,"20\t",,terminal_output +15284,11144495,"TERMINAL",0,0,"1\t",,terminal_output +15285,11145529,"TERMINAL",0,0,"2\t",,terminal_output +15286,11146473,"TERMINAL",0,0,"^C",,terminal_output +15287,11146591,"TERMINAL",0,0,"3\t",,terminal_output +15288,11146611,"TERMINAL",0,0,"--KeyboardInterrupt--\r\nEntering jdb:\r\n(jdb) ",,terminal_output +15289,11147301,"TERMINAL",0,0,"^DERROR:2025-07-03 19:21:24,115:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\nSystemExit: 0\r\nE0703 19:21:24.118260 4014349 pjrt_stream_executor_client.cc:2917] Execution of replica 0 failed: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n vid = _autoreg_sample_mihir(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n generated_vid = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n c, ys = lax.scan(\r\njaxlib._jax.XlaRuntimeError: INTERNAL: CpuCallback error calling callback: Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 157, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 118, in _autoreg_sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 155, in sample_mihir\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 360, in wrapped_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 328, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1031, in inner\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/axes_scan.py"", line 179, in scan_fn\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/control_flow/loops.py"", line 355, in scan\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1884, in _pjit_call_impl_python\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1297, in __call__\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/callback.py"", line 782, in _wrapped_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 200, in _callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 97, in debug_callback_impl\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 162, in run\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 145, in cmdloop\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\nSystemExit: 0\r\n",,terminal_output +15290,11147661,"TERMINAL",0,0,"4\t",,terminal_output +15291,11148587,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +15292,11148683,"TERMINAL",0,0,"5\t",,terminal_output +15293,11149050,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +15294,11149712,"TERMINAL",0,0,"6\t",,terminal_output +15295,11150874,"TERMINAL",0,0,"7\t",,terminal_output +15296,11151868,"genie.py",0,0,"",python,tab +15297,11151869,"genie.py",6596,0,"",python,selection_mouse +15298,11151919,"genie.py",6595,0,"",python,selection_command +15299,11151920,"TERMINAL",0,0,"86",,terminal_output +15300,11152812,"TERMINAL",0,0,"9\t",,terminal_output +15301,11153845,"TERMINAL",0,0,"30\t",,terminal_output +15302,11154885,"TERMINAL",0,0,"1\t",,terminal_output +15303,11155925,"TERMINAL",0,0,"2\t",,terminal_output +15304,11156969,"TERMINAL",0,0,"3\t",,terminal_output +15305,11157311,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +15306,11157424,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +15307,11158009,"TERMINAL",0,0,"4\t",,terminal_output +15308,11159132,"TERMINAL",0,0,"5\t",,terminal_output +15309,11160102,"TERMINAL",0,0,"6\t",,terminal_output +15310,11160461,"TERMINAL",0,0,"2025-07-03 19:21:37.288294: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15311,11161179,"TERMINAL",0,0,"7\t",,terminal_output +15312,11162186,"TERMINAL",0,0,"9\t",,terminal_output +15313,11163227,"TERMINAL",0,0,"40\t",,terminal_output +15314,11164354,"TERMINAL",0,0,"1\t",,terminal_output +15315,11165377,"TERMINAL",0,0,"2\t",,terminal_output +15316,11166402,"TERMINAL",0,0,"3\t",,terminal_output +15317,11167426,"TERMINAL",0,0,"4\t",,terminal_output +15318,11168494,"TERMINAL",0,0,"5\t",,terminal_output +15319,11169474,"TERMINAL",0,0,"6\t",,terminal_output +15320,11170600,"TERMINAL",0,0,"7\t",,terminal_output +15321,11171629,"TERMINAL",0,0,"8\t",,terminal_output +15322,11172648,"TERMINAL",0,0,"9\t",,terminal_output +15323,11173365,"TERMINAL",0,0,"2025-07-03 19:21:50.167085: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15324,11173625,"TERMINAL",0,0,"50\t",,terminal_output +15325,11174696,"TERMINAL",0,0,"1\t",,terminal_output +15326,11175721,"TERMINAL",0,0,"2\t",,terminal_output +15327,11176715,"TERMINAL",0,0,"3\t",,terminal_output +15328,11177768,"TERMINAL",0,0,"4\t",,terminal_output +15329,11178895,"TERMINAL",0,0,"5\t",,terminal_output +15330,11179831,"TERMINAL",0,0,"6\t",,terminal_output +15331,11180873,"TERMINAL",0,0,"7\t",,terminal_output +15332,11181922,"TERMINAL",0,0,"8\t",,terminal_output +15333,11182976,"TERMINAL",0,0,"9\t",,terminal_output +15334,11183509,"TERMINAL",0,0,"2025-07-03 19:22:00.403607: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15335,11184000,"TERMINAL",0,0,"2:00\t",,terminal_output +15336,11185049,"TERMINAL",0,0,"1\t",,terminal_output +15337,11186081,"TERMINAL",0,0,"2\t",,terminal_output +15338,11187157,"TERMINAL",0,0,"3\t",,terminal_output +15339,11188214,"TERMINAL",0,0,"5\t",,terminal_output +15340,11189648,"TERMINAL",0,0,"6\t",,terminal_output +15341,11190774,"TERMINAL",0,0,"7\t",,terminal_output +15342,11191806,"TERMINAL",0,0,"8\t",,terminal_output +15343,11192827,"TERMINAL",0,0,"9\t",,terminal_output +15344,11193845,"TERMINAL",0,0,"10\t",,terminal_output +15345,11194873,"TERMINAL",0,0,"1\t",,terminal_output +15346,11195898,"TERMINAL",0,0,"2\t",,terminal_output +15347,11196621,"TERMINAL",0,0,"2025-07-03 19:22:13.486005: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15348,11196935,"TERMINAL",0,0,"3\t",,terminal_output +15349,11197980,"TERMINAL",0,0,"4\t",,terminal_output +15350,11199027,"TERMINAL",0,0,"5\t",,terminal_output +15351,11199137,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +15352,11200091,"TERMINAL",0,0,"6\t",,terminal_output +15353,11201116,"TERMINAL",0,0,"7\t",,terminal_output +15354,11202242,"TERMINAL",0,0,"9\t",,terminal_output +15355,11203266,"TERMINAL",0,0,"20\t",,terminal_output +15356,11204205,"TERMINAL",0,0,"17",,terminal_output +15357,11205314,"TERMINAL",0,0,"2\t",,terminal_output +15358,11206339,"TERMINAL",0,0,"3\t",,terminal_output +15359,11207362,"TERMINAL",0,0,"4\t",,terminal_output +15360,11207373,"TERMINAL",0,0,"2025-07-03 19:22:24.239497: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15361,11208386,"TERMINAL",0,0,"5\t",,terminal_output +15362,11209513,"TERMINAL",0,0,"6\t",,terminal_output +15363,11210537,"TERMINAL",0,0,"76",,terminal_output +15364,11210547,"TERMINAL",0,0,"2025-07-03 19:22:27.382758: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15365,11211488,"TERMINAL",0,0,"8\t",,terminal_output +15366,11212604,"TERMINAL",0,0,"9\t",,terminal_output +15367,11213608,"TERMINAL",0,0,"30\t",,terminal_output +15368,11214607,"TERMINAL",0,0,"1\t",,terminal_output +15369,11215673,"TERMINAL",0,0,"2\t",,terminal_output +15370,11216685,"TERMINAL",0,0,"3\t",,terminal_output +15371,11217090,"TERMINAL",0,0,"2025-07-03 19:22:33.913354: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15372,11217819,"TERMINAL",0,0,"4\t",,terminal_output +15373,11218832,"TERMINAL",0,0,"5\t",,terminal_output +15374,11219204,"TERMINAL",0,0,"2025-07-03 19:22:36.046624: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15375,11219826,"TERMINAL",0,0,"6\t",,terminal_output +15376,11220441,"genie.py",0,0,"",python,tab +15377,11220442,"genie.py",5973,0,"",python,selection_mouse +15378,11220533,"genie.py",5972,0,"",python,selection_command +15379,11220616,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +15380,11220850,"TERMINAL",0,0,"7\t",,terminal_output +15381,11221883,"TERMINAL",0,0,"8\t",,terminal_output +15382,11222928,"TERMINAL",0,0,"9\t",,terminal_output +15383,11223971,"TERMINAL",0,0,"40\t",,terminal_output +15384,11225017,"TERMINAL",0,0,"1\t",,terminal_output +15385,11226060,"TERMINAL",0,0,"2\t",,terminal_output +15386,11227346,"TERMINAL",0,0,"3\t",,terminal_output +15387,11228157,"TERMINAL",0,0,"5\t",,terminal_output +15388,11229212,"TERMINAL",0,0,"6\t",,terminal_output +15389,11230247,"TERMINAL",0,0,"7\t",,terminal_output +15390,11231323,"TERMINAL",0,0,"8\t",,terminal_output +15391,11232348,"TERMINAL",0,0,"9\t",,terminal_output +15392,11233476,"TERMINAL",0,0,"50\t",,terminal_output +15393,11234499,"TERMINAL",0,0,"1\t",,terminal_output +15394,11235522,"TERMINAL",0,0,"2\t",,terminal_output +15395,11236547,"TERMINAL",0,0,"3\t",,terminal_output +15396,11237534,"TERMINAL",0,0,"4\t",,terminal_output +15397,11238596,"TERMINAL",0,0,"5\t",,terminal_output +15398,11239747,"TERMINAL",0,0,"6\t",,terminal_output +15399,11240745,"TERMINAL",0,0,"7\t",,terminal_output +15400,11241769,"TERMINAL",0,0,"8\t",,terminal_output +15401,11242796,"TERMINAL",0,0,"9\t",,terminal_output +15402,11243919,"TERMINAL",0,0,"3:00\t",,terminal_output +15403,11244864,"TERMINAL",0,0,"1\t",,terminal_output +15404,11245907,"TERMINAL",0,0,"2\t",,terminal_output +15405,11246955,"TERMINAL",0,0,"3\t",,terminal_output +15406,11248002,"TERMINAL",0,0,"4\t",,terminal_output +15407,11249049,"TERMINAL",0,0,"5\t",,terminal_output +15408,11250167,"TERMINAL",0,0,"6\t",,terminal_output +15409,11251125,"TERMINAL",0,0,"7\t",,terminal_output +15410,11252214,"TERMINAL",0,0,"9\t",,terminal_output +15411,11253237,"TERMINAL",0,0,"10\t",,terminal_output +15412,11254261,"TERMINAL",0,0,"17",,terminal_output +15413,11255298,"TERMINAL",0,0,"2\t",,terminal_output +15414,11256335,"TERMINAL",0,0,"3\t",,terminal_output +15415,11257437,"TERMINAL",0,0,"4\t",,terminal_output +15416,11258468,"TERMINAL",0,0,"5\t",,terminal_output +15417,11259484,"TERMINAL",0,0,"6\t",,terminal_output +15418,11260508,"TERMINAL",0,0,"7\t",,terminal_output +15419,11261533,"TERMINAL",0,0,"88",,terminal_output +15420,11262661,"TERMINAL",0,0,"973",,terminal_output +15421,11263617,"TERMINAL",0,0,"20\t",,terminal_output +15422,11264655,"TERMINAL",0,0,"1\t",,terminal_output +15423,11265731,"TERMINAL",0,0,"2\t",,terminal_output +15424,11266742,"TERMINAL",0,0,"3\t",,terminal_output +15425,11267881,"TERMINAL",0,0,"4\t",,terminal_output +15426,11268905,"TERMINAL",0,0,"5\t",,terminal_output +15427,11269868,"TERMINAL",0,0,"6\t",,terminal_output +15428,11270914,"TERMINAL",0,0,"71",,terminal_output +15429,11271957,"TERMINAL",0,0,"8\t",,terminal_output +15430,11272998,"TERMINAL",0,0,"9\t",,terminal_output +15431,11274029,"TERMINAL",0,0,"30\t",,terminal_output +15432,11275075,"TERMINAL",0,0,"1\t",,terminal_output +15433,11276164,"TERMINAL",0,0,"2\t",,terminal_output +15434,11277200,"TERMINAL",0,0,"4\t",,terminal_output +15435,11278194,"TERMINAL",0,0,"5\t",,terminal_output +15436,11279249,"TERMINAL",0,0,"6\t",,terminal_output +15437,11280376,"TERMINAL",0,0,"7\t",,terminal_output +15438,11281305,"TERMINAL",0,0,"8\t",,terminal_output +15439,11282352,"TERMINAL",0,0,"9\t",,terminal_output +15440,11283446,"TERMINAL",0,0,"40\t",,terminal_output +15441,11284470,"TERMINAL",0,0,"1\t",,terminal_output +15442,11285494,"TERMINAL",0,0,"2\t",,terminal_output +15443,11286527,"TERMINAL",0,0,"3\t",,terminal_output +15444,11287766,"TERMINAL",0,0,"4\t",,terminal_output +15445,11287947,"TERMINAL",0,0,"SSIM: 0.3250111937522888\r\n",,terminal_output +15446,11288618,"TERMINAL",0,0,"5\t",,terminal_output +15447,11289793,"TERMINAL",0,0,"6\t",,terminal_output +15448,11290456,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +15449,11290757,"TERMINAL",0,0,"7\t",,terminal_output +15450,11291928,"TERMINAL",0,0,"8\t",,terminal_output +15451,11292876,"TERMINAL",0,0,"9\t",,terminal_output +15452,11293860,"TERMINAL",0,0,"50\t",,terminal_output +15453,11294990,"genie.py",0,0,"",python,tab +15454,11294991,"genie.py",3862,0,"",python,selection_mouse +15455,11295089,"genie.py",3861,0,"",python,selection_command +15456,11295090,"TERMINAL",0,0,"1\t",,terminal_output +15457,11296042,"TERMINAL",0,0,"2\t",,terminal_output +15458,11296824,"genie.py",3862,0,"\n ",python,content +15459,11296985,"TERMINAL",0,0,"3\t",,terminal_output +15460,11298009,"TERMINAL",0,0,"4\t",,terminal_output +15461,11298316,"genie.py",3875,0,"p",python,content +15462,11298317,"genie.py",3876,0,"",python,selection_keyboard +15463,11298423,"genie.py",3876,0,"r",python,content +15464,11298424,"genie.py",3877,0,"",python,selection_keyboard +15465,11298533,"genie.py",3877,0,"i",python,content +15466,11298534,"genie.py",3878,0,"",python,selection_keyboard +15467,11298559,"genie.py",3878,0,"n",python,content +15468,11298560,"genie.py",3879,0,"",python,selection_keyboard +15469,11298640,"genie.py",3879,0,"t",python,content +15470,11298641,"genie.py",3880,0,"",python,selection_keyboard +15471,11299234,"TERMINAL",0,0,"5\t",,terminal_output +15472,11299454,"genie.py",3880,0,"()",python,content +15473,11299454,"genie.py",3881,0,"",python,selection_keyboard +15474,11300055,"genie.py",3881,0,"""""",python,content +15475,11300056,"genie.py",3882,0,"",python,selection_keyboard +15476,11300126,"TERMINAL",0,0,"6\t",,terminal_output +15477,11300274,"genie.py",3882,0,"s",python,content +15478,11300275,"genie.py",3883,0,"",python,selection_keyboard +15479,11300400,"genie.py",3883,0,"a",python,content +15480,11300400,"genie.py",3884,0,"",python,selection_keyboard +15481,11300495,"genie.py",3884,0,"m",python,content +15482,11300495,"genie.py",3885,0,"",python,selection_keyboard +15483,11300666,"genie.py",3885,0,"p",python,content +15484,11300666,"genie.py",3886,0,"",python,selection_keyboard +15485,11300739,"genie.py",3886,0,"l",python,content +15486,11300740,"genie.py",3887,0,"",python,selection_keyboard +15487,11300973,"genie.py",3887,0,"i",python,content +15488,11300973,"genie.py",3888,0,"",python,selection_keyboard +15489,11301051,"genie.py",3888,0,"n",python,content +15490,11301051,"genie.py",3889,0,"",python,selection_keyboard +15491,11301122,"genie.py",3889,0,"g",python,content +15492,11301122,"genie.py",3890,0,"",python,selection_keyboard +15493,11301157,"TERMINAL",0,0,"7\t",,terminal_output +15494,11301303,"genie.py",3890,0," ",python,content +15495,11301304,"genie.py",3891,0,"",python,selection_keyboard +15496,11302165,"TERMINAL",0,0,"9\t",,terminal_output +15497,11302541,"genie.py",3891,0,"f",python,content +15498,11302542,"genie.py",3892,0,"",python,selection_keyboard +15499,11302649,"genie.py",3892,0,"r",python,content +15500,11302650,"genie.py",3893,0,"",python,selection_keyboard +15501,11302842,"genie.py",3893,0,"a",python,content +15502,11302843,"genie.py",3894,0,"",python,selection_keyboard +15503,11302977,"genie.py",3894,0,"m",python,content +15504,11302978,"genie.py",3895,0,"",python,selection_keyboard +15505,11303086,"genie.py",3895,0,"e",python,content +15506,11303087,"genie.py",3896,0,"",python,selection_keyboard +15507,11303189,"TERMINAL",0,0,"4:00\t",,terminal_output +15508,11304249,"TERMINAL",0,0,"1\t",,terminal_output +15509,11305360,"TERMINAL",0,0,"2\t",,terminal_output +15510,11306296,"genie.py",3881,0,"",python,selection_mouse +15511,11306347,"TERMINAL",0,0,"3\t",,terminal_output +15512,11306709,"genie.py",3881,0,"f",python,content +15513,11306710,"genie.py",3882,0,"",python,selection_keyboard +15514,11307407,"TERMINAL",0,0,"4\t",,terminal_output +15515,11307612,"genie.py",3897,0,"",python,selection_mouse +15516,11308116,"genie.py",3897,0," ",python,content +15517,11308117,"genie.py",3898,0,"",python,selection_keyboard +15518,11308397,"TERMINAL",0,0,"5\t",,terminal_output +15519,11309260,"genie.py",3898,0,"{}",python,content +15520,11309261,"genie.py",3899,0,"",python,selection_keyboard +15521,11309466,"TERMINAL",0,0,"6\t",,terminal_output +15522,11309695,"genie.py",3899,0,"T",python,content +15523,11309695,"genie.py",3900,0,"",python,selection_keyboard +15524,11310432,"genie.py",3899,0,"",python,selection_command +15525,11310503,"TERMINAL",0,0,"766",,terminal_output +15526,11311607,"TERMINAL",0,0,"8\t",,terminal_output +15527,11312651,"TERMINAL",0,0,"9\t",,terminal_output +15528,11313747,"TERMINAL",0,0,"10\t",,terminal_output +15529,11314861,"TERMINAL",0,0,"1\t",,terminal_output +15530,11315804,"TERMINAL",0,0,"2\t",,terminal_output +15531,11316830,"TERMINAL",0,0,"3\t",,terminal_output +15532,11317853,"TERMINAL",0,0,"4\t",,terminal_output +15533,11318890,"TERMINAL",0,0,"5\t",,terminal_output +15534,11319907,"TERMINAL",0,0,"6\t",,terminal_output +15535,11320927,"TERMINAL",0,0,"7\t",,terminal_output +15536,11322009,"TERMINAL",0,0,"8\t",,terminal_output +15537,11323002,"TERMINAL",0,0,"9\t",,terminal_output +15538,11324039,"TERMINAL",0,0,"20\t",,terminal_output +15539,11325124,"TERMINAL",0,0,"1\t",,terminal_output +15540,11326120,"TERMINAL",0,0,"2\t",,terminal_output +15541,11327172,"TERMINAL",0,0,"4\t",,terminal_output +15542,11328298,"TERMINAL",0,0,"5\t",,terminal_output +15543,11329322,"TERMINAL",0,0,"6\t",,terminal_output +15544,11330346,"TERMINAL",0,0,"7\t",,terminal_output +15545,11331370,"TERMINAL",0,0,"8\t",,terminal_output +15546,11332364,"TERMINAL",0,0,"9\t",,terminal_output +15547,11333463,"TERMINAL",0,0,"30\t",,terminal_output +15548,11334545,"TERMINAL",0,0,"1\t",,terminal_output +15549,11335582,"TERMINAL",0,0,"2\t",,terminal_output +15550,11336578,"TERMINAL",0,0,"3\t",,terminal_output +15551,11337617,"TERMINAL",0,0,"4\t",,terminal_output +15552,11338602,"TERMINAL",0,0,"5\t",,terminal_output +15553,11339665,"TERMINAL",0,0,"6\t",,terminal_output +15554,11340742,"TERMINAL",0,0,"7\t",,terminal_output +15555,11341595,"genie.py",0,0,"",python,tab +15556,11341851,"TERMINAL",0,0,"8\t",,terminal_output +15557,11342893,"TERMINAL",0,0,"9\t",,terminal_output +15558,11343904,"TERMINAL",0,0,"40\t",,terminal_output +15559,11345135,"TERMINAL",0,0,"1\t",,terminal_output +15560,11345913,"TERMINAL",0,0,"23",,terminal_output +15561,11346935,"TERMINAL",0,0,"3\t",,terminal_output +15562,11347976,"TERMINAL",0,0,"4\t",,terminal_output +15563,11349009,"TERMINAL",0,0,"5\t",,terminal_output +15564,11349713,"genie.py",10120,0,"",python,selection_mouse +15565,11350109,"TERMINAL",0,0,"6\t",,terminal_output +15566,11351010,"genie.py",10119,0,"",python,selection_mouse +15567,11351101,"TERMINAL",0,0,"7\t",,terminal_output +15568,11351788,"genie.py",10119,1,"",python,content +15569,11351892,"genie.py",10119,1,"",python,content +15570,11352157,"TERMINAL",0,0,"9\t",,terminal_output +15571,11352892,"genie.py",10203,0,"",python,selection_command +15572,11353025,"genie.py",10272,0,"",python,selection_command +15573,11353218,"TERMINAL",0,0,"50\t",,terminal_output +15574,11353848,"genie.py",10272,1,"",python,content +15575,11353948,"genie.py",10272,1,"",python,content +15576,11354271,"TERMINAL",0,0,"1\t",,terminal_output +15577,11355412,"TERMINAL",0,0,"2\t",,terminal_output +15578,11356095,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +15579,11356355,"TERMINAL",0,0,"3\t",,terminal_output +15580,11357200,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +15581,11357324,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +15582,11357385,"TERMINAL",0,0,"4\t",,terminal_output +15583,11358447,"TERMINAL",0,0,"5\t",,terminal_output +15584,11359523,"TERMINAL",0,0,"6\t",,terminal_output +15585,11360247,"TERMINAL",0,0,"2025-07-03 19:24:57.095025: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15586,11360555,"TERMINAL",0,0,"7\t",,terminal_output +15587,11361565,"TERMINAL",0,0,"8\t",,terminal_output +15588,11362614,"TERMINAL",0,0,"9\t",,terminal_output +15589,11363617,"TERMINAL",0,0,"5:00\t",,terminal_output +15590,11364754,"TERMINAL",0,0,"1\t",,terminal_output +15591,11365703,"TERMINAL",0,0,"2\t",,terminal_output +15592,11366768,"TERMINAL",0,0,"3\t",,terminal_output +15593,11367825,"TERMINAL",0,0,"4\t",,terminal_output +15594,11368951,"TERMINAL",0,0,"5\t",,terminal_output +15595,11369980,"TERMINAL",0,0,"6\t",,terminal_output +15596,11371002,"TERMINAL",0,0,"7\t",,terminal_output +15597,11371985,"TERMINAL",0,0,"8\t",,terminal_output +15598,11373010,"TERMINAL",0,0,"9\t",,terminal_output +15599,11373355,"TERMINAL",0,0,"2025-07-03 19:25:10.180518: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15600,11374078,"TERMINAL",0,0,"10\t",,terminal_output +15601,11375198,"TERMINAL",0,0,"1\t",,terminal_output +15602,11376138,"TERMINAL",0,0,"3\t",,terminal_output +15603,11377247,"TERMINAL",0,0,"4\t",,terminal_output +15604,11378270,"TERMINAL",0,0,"5\t",,terminal_output +15605,11379295,"TERMINAL",0,0,"6\t",,terminal_output +15606,11380318,"TERMINAL",0,0,"7\t",,terminal_output +15607,11381444,"TERMINAL",0,0,"8\t",,terminal_output +15608,11382393,"TERMINAL",0,0,"9\t",,terminal_output +15609,11383441,"TERMINAL",0,0,"20\t",,terminal_output +15610,11383697,"TERMINAL",0,0,"2025-07-03 19:25:20.583743: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15611,11384516,"TERMINAL",0,0,"1\t",,terminal_output +15612,11385519,"TERMINAL",0,0,"2\t",,terminal_output +15613,11386565,"TERMINAL",0,0,"3\t",,terminal_output +15614,11387588,"TERMINAL",0,0,"4\t",,terminal_output +15615,11388715,"TERMINAL",0,0,"5\t",,terminal_output +15616,11389739,"TERMINAL",0,0,"6\t",,terminal_output +15617,11390763,"TERMINAL",0,0,"7711",,terminal_output +15618,11391788,"TERMINAL",0,0,"8\t",,terminal_output +15619,11392807,"TERMINAL",0,0,"9\t",,terminal_output +15620,11393943,"TERMINAL",0,0,"30\t",,terminal_output +15621,11394962,"TERMINAL",0,0,"1\t",,terminal_output +15622,11395986,"TERMINAL",0,0,"2\t",,terminal_output +15623,11396751,"TERMINAL",0,0,"2025-07-03 19:25:33.652271: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15624,11396944,"TERMINAL",0,0,"3\t",,terminal_output +15625,11397975,"TERMINAL",0,0,"4\t",,terminal_output +15626,11399019,"TERMINAL",0,0,"5\t",,terminal_output +15627,11399468,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +15628,11400141,"TERMINAL",0,0,"6\t",,terminal_output +15629,11401110,"TERMINAL",0,0,"7\t",,terminal_output +15630,11402262,"TERMINAL",0,0,"9\t",,terminal_output +15631,11403256,"TERMINAL",0,0,"40\t",,terminal_output +15632,11404280,"TERMINAL",0,0,"1\t",,terminal_output +15633,11405304,"TERMINAL",0,0,"2\t",,terminal_output +15634,11406328,"TERMINAL",0,0,"3\t",,terminal_output +15635,11407352,"TERMINAL",0,0,"4\t",,terminal_output +15636,11407701,"TERMINAL",0,0,"2025-07-03 19:25:44.604002: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15637,11408480,"TERMINAL",0,0,"5\t",,terminal_output +15638,11409502,"TERMINAL",0,0,"6\t",,terminal_output +15639,11410527,"TERMINAL",0,0,"7\t",,terminal_output +15640,11410937,"TERMINAL",0,0,"2025-07-03 19:25:47.815093: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15641,11411551,"TERMINAL",0,0,"8\t",,terminal_output +15642,11412677,"TERMINAL",0,0,"9\t",,terminal_output +15643,11413621,"TERMINAL",0,0,"50\t",,terminal_output +15644,11414724,"TERMINAL",0,0,"1\t",,terminal_output +15645,11415749,"TERMINAL",0,0,"2\t",,terminal_output +15646,11416749,"TERMINAL",0,0,"3\t",,terminal_output +15647,11417446,"TERMINAL",0,0,"2025-07-03 19:25:54.348020: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15648,11417900,"TERMINAL",0,0,"4\t",,terminal_output +15649,11418923,"TERMINAL",0,0,"5\t",,terminal_output +15650,11419436,"TERMINAL",0,0,"2025-07-03 19:25:56.255279: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +15651,11419957,"TERMINAL",0,0,"6\t",,terminal_output +15652,11420869,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +15653,11420929,"TERMINAL",0,0,"7\t",,terminal_output +15654,11421095,"TERMINAL",0,0,"sampling frame 1\r\n",,terminal_output +15655,11421997,"TERMINAL",0,0,"8\t",,terminal_output +15656,11422992,"TERMINAL",0,0,"9\t",,terminal_output +15657,11424005,"TERMINAL",0,0,"6:00\t",,terminal_output +15658,11425046,"TERMINAL",0,0,"1\t",,terminal_output +15659,11426099,"TERMINAL",0,0,"2\t",,terminal_output +15660,11427128,"TERMINAL",0,0,"3\t",,terminal_output +15661,11427730,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 2\r\n",,terminal_output +15662,11428243,"TERMINAL",0,0,"5\t",,terminal_output +15663,11429196,"TERMINAL",0,0,"6\t",,terminal_output +15664,11430701,"TERMINAL",0,0,"766",,terminal_output +15665,11431246,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 3\r\n",,terminal_output +15666,11431724,"TERMINAL",0,0,"8\t",,terminal_output +15667,11432748,"TERMINAL",0,0,"9\t",,terminal_output +15668,11433772,"TERMINAL",0,0,"10\t",,terminal_output +15669,11434898,"TERMINAL",0,0,"1\t",,terminal_output +15670,11434968,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 4\r\n",,terminal_output +15671,11435940,"TERMINAL",0,0,"2\t",,terminal_output +15672,11436898,"TERMINAL",0,0,"3\t",,terminal_output +15673,11437983,"TERMINAL",0,0,"4\t",,terminal_output +15674,11438548,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 5\r\n",,terminal_output +15675,11438981,"TERMINAL",0,0,"5\t",,terminal_output +15676,11440036,"TERMINAL",0,0,"6\t",,terminal_output +15677,11441083,"TERMINAL",0,0,"7\t",,terminal_output +15678,11442105,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 6\r\n",,terminal_output +15679,11442105,"TERMINAL",0,0,"8\t",,terminal_output +15680,11443192,"TERMINAL",0,0,"20\t",,terminal_output +15681,11444217,"TERMINAL",0,0,"1\t",,terminal_output +15682,11445240,"TERMINAL",0,0,"2\t",,terminal_output +15683,11445605,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 7\r\n",,terminal_output +15684,11446264,"TERMINAL",0,0,"3\t",,terminal_output +15685,11447391,"TERMINAL",0,0,"4\t",,terminal_output +15686,11448417,"TERMINAL",0,0,"5\t",,terminal_output +15687,11449336,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 8\r\n",,terminal_output +15688,11449400,"TERMINAL",0,0,"6\t",,terminal_output +15689,11450600,"TERMINAL",0,0,"7\t",,terminal_output +15690,11451544,"TERMINAL",0,0,"8\t",,terminal_output +15691,11452594,"TERMINAL",0,0,"9\t",,terminal_output +15692,11452794,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 9\r\n",,terminal_output +15693,11453566,"TERMINAL",0,0,"30\t",,terminal_output +15694,11454661,"TERMINAL",0,0,"1\t",,terminal_output +15695,11455649,"TERMINAL",0,0,"2\t",,terminal_output +15696,11456373,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 10\r\n",,terminal_output +15697,11456681,"TERMINAL",0,0,"3\t",,terminal_output +15698,11457734,"TERMINAL",0,0,"4\t",,terminal_output +15699,11458859,"TERMINAL",0,0,"5\t",,terminal_output +15700,11459884,"TERMINAL",0,0,"6\t",,terminal_output +15701,11459952,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 11\r\n",,terminal_output +15702,11460909,"TERMINAL",0,0,"7\t",,terminal_output +15703,11461939,"TERMINAL",0,0,"8\t",,terminal_output +15704,11462964,"TERMINAL",0,0,"9\t",,terminal_output +15705,11463617,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 12\r\n",,terminal_output +15706,11464002,"TERMINAL",0,0,"40\t",,terminal_output +15707,11465046,"TERMINAL",0,0,"1\t",,terminal_output +15708,11466087,"TERMINAL",0,0,"2\t",,terminal_output +15709,11467125,"TERMINAL",0,0,"3\t",,terminal_output +15710,11467185,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 13\r\n",,terminal_output +15711,11468197,"TERMINAL",0,0,"5\t",,terminal_output +15712,11469209,"TERMINAL",0,0,"6\t",,terminal_output +15713,11470337,"TERMINAL",0,0,"7\t",,terminal_output +15714,11470739,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 14\r\n",,terminal_output +15715,11471352,"TERMINAL",0,0,"8\t",,terminal_output +15716,11472377,"TERMINAL",0,0,"9\t",,terminal_output +15717,11473401,"TERMINAL",0,0,"50\t",,terminal_output +15718,11474323,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\nsampling frame 15\r\n",,terminal_output +15719,11474430,"TERMINAL",0,0,"1\t",,terminal_output +15720,11475551,"TERMINAL",0,0,"2\t",,terminal_output +15721,11476575,"TERMINAL",0,0,"3\t",,terminal_output +15722,11477598,"TERMINAL",0,0,"4\t",,terminal_output +15723,11477803,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 0]\r\nmaskgit-token_idxs[0,:,0]: [151 635 635 635 635 635 635 635 635 635 635 635 635 635 635 635]\r\n",,terminal_output +15724,11478585,"TERMINAL",0,0,"5\t",,terminal_output +15725,11479647,"TERMINAL",0,0,"6\t",,terminal_output +15726,11480773,"TERMINAL",0,0,"73",,terminal_output +15727,11481799,"TERMINAL",0,0,"8\t",,terminal_output +15728,11482822,"TERMINAL",0,0,"9\t",,terminal_output +15729,11483790,"TERMINAL",0,0,"7:00\t",,terminal_output +15730,11484870,"TERMINAL",0,0,"1\t",,terminal_output +15731,11485717,"TERMINAL",0,0,"SSIM: 0.32501327991485596\r\n",,terminal_output +15732,11485859,"TERMINAL",0,0,"2\t",,terminal_output +15733,11486918,"TERMINAL",0,0,"3\t",,terminal_output +15734,11488000,"TERMINAL",0,0,"4\t",,terminal_output +15735,11488127,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +15736,11489068,"TERMINAL",0,0,"5\t",,terminal_output +15737,11490021,"TERMINAL",0,0,"6\t",,terminal_output +15738,11491060,"TERMINAL",0,0,"7\t",,terminal_output +15739,11492101,"TERMINAL",0,0,"8\t",,terminal_output +15740,11493141,"TERMINAL",0,0,"10\t",,terminal_output +15741,11494290,"TERMINAL",0,0,"1\t",,terminal_output +15742,11495216,"TERMINAL",0,0,"2\t",,terminal_output +15743,11496340,"TERMINAL",0,0,"3\t",,terminal_output +15744,11497395,"TERMINAL",0,0,"4\t",,terminal_output +15745,11498387,"TERMINAL",0,0,"5\t",,terminal_output +15746,11499410,"TERMINAL",0,0,"6\t",,terminal_output +15747,11500434,"TERMINAL",0,0,"7\t",,terminal_output +15748,11501458,"TERMINAL",0,0,"8\t",,terminal_output +15749,11502460,"TERMINAL",0,0,"9\t",,terminal_output +15750,11503490,"TERMINAL",0,0,"20\t",,terminal_output +15751,11504531,"TERMINAL",0,0,"1\t",,terminal_output +15752,11505658,"TERMINAL",0,0,"271",,terminal_output +15753,11506607,"TERMINAL",0,0,"3\t",,terminal_output +15754,11507706,"TERMINAL",0,0,"4\t",,terminal_output +15755,11508730,"TERMINAL",0,0,"5\t",,terminal_output +15756,11509754,"TERMINAL",0,0,"6\t",,terminal_output +15757,11510777,"TERMINAL",0,0,"7\t",,terminal_output +15758,11511801,"TERMINAL",0,0,"8\t",,terminal_output +15759,11512822,"TERMINAL",0,0,"9\t",,terminal_output +15760,11513969,"TERMINAL",0,0,"30\t",,terminal_output +15761,11514910,"TERMINAL",0,0,"1\t",,terminal_output +15762,11515970,"TERMINAL",0,0,"2\t",,terminal_output +15763,11516991,"TERMINAL",0,0,"3\t",,terminal_output +15764,11518031,"TERMINAL",0,0,"4\t",,terminal_output +15765,11519083,"TERMINAL",0,0,"5\t",,terminal_output +15766,11520114,"TERMINAL",0,0,"6\t",,terminal_output +15767,11521156,"TERMINAL",0,0,"8\t",,terminal_output +15768,11522198,"TERMINAL",0,0,"9\t",,terminal_output +15769,11523271,"TERMINAL",0,0,"40\t",,terminal_output +15770,11524299,"TERMINAL",0,0,"1\t",,terminal_output +15771,11525327,"TERMINAL",0,0,"2\t",,terminal_output +15772,11526445,"TERMINAL",0,0,"3\t",,terminal_output +15773,11527470,"TERMINAL",0,0,"4\t",,terminal_output +15774,11528492,"TERMINAL",0,0,"5\t",,terminal_output +15775,11529517,"TERMINAL",0,0,"62",,terminal_output +15776,11530644,"TERMINAL",0,0,"7\t",,terminal_output +15777,11531592,"TERMINAL",0,0,"8\t",,terminal_output +15778,11532692,"TERMINAL",0,0,"9\t",,terminal_output +15779,11533674,"TERMINAL",0,0,"50\t",,terminal_output +15780,11534738,"TERMINAL",0,0,"1\t",,terminal_output +15781,11535866,"TERMINAL",0,0,"2\t",,terminal_output +15782,11536804,"TERMINAL",0,0,"3\t",,terminal_output +15783,11537912,"TERMINAL",0,0,"4\t",,terminal_output +15784,11538938,"TERMINAL",0,0,"5\t",,terminal_output +15785,11539932,"TERMINAL",0,0,"6\t",,terminal_output +15786,11541089,"TERMINAL",0,0,"7\t",,terminal_output +15787,11542025,"TERMINAL",0,0,"8\t",,terminal_output +15788,11543118,"TERMINAL",0,0,"9\t",,terminal_output +15789,11544113,"TERMINAL",0,0,"8:00\t",,terminal_output +15790,11545156,"TERMINAL",0,0,"2\t",,terminal_output +15791,11546204,"TERMINAL",0,0,"3\t",,terminal_output +15792,11547335,"TERMINAL",0,0,"4\t",,terminal_output +15793,11548359,"TERMINAL",0,0,"5\t",,terminal_output +15794,11549382,"TERMINAL",0,0,"6\t",,terminal_output +15795,11550406,"TERMINAL",0,0,"7\t",,terminal_output +15796,11551432,"TERMINAL",0,0,"866",,terminal_output +15797,11552558,"TERMINAL",0,0,"9\t",,terminal_output +15798,11553520,"TERMINAL",0,0,"10\t",,terminal_output +15799,11554606,"TERMINAL",0,0,"1\t",,terminal_output +15800,11555628,"TERMINAL",0,0,"2\t",,terminal_output +15801,11556654,"TERMINAL",0,0,"3\t",,terminal_output +15802,11557779,"TERMINAL",0,0,"4\t",,terminal_output +15803,11558804,"TERMINAL",0,0,"5\t",,terminal_output +15804,11559809,"TERMINAL",0,0,"6\t",,terminal_output +15805,11560852,"TERMINAL",0,0,"7\t",,terminal_output +15806,11561857,"TERMINAL",0,0,"8\t",,terminal_output +15807,11562900,"TERMINAL",0,0,"9\t",,terminal_output +15808,11563956,"TERMINAL",0,0,"20\t",,terminal_output +15809,11564964,"TERMINAL",0,0,"1\t",,terminal_output +15810,11566075,"TERMINAL",0,0,"2\t",,terminal_output +15811,11567099,"TERMINAL",0,0,"3\t",,terminal_output +15812,11568091,"TERMINAL",0,0,"4\t",,terminal_output +15813,11569134,"TERMINAL",0,0,"5\t",,terminal_output +15814,11570179,"TERMINAL",0,0,"7\t",,terminal_output +15815,11571220,"TERMINAL",0,0,"8\t",,terminal_output +15816,11572321,"TERMINAL",0,0,"9\t",,terminal_output +15817,11573345,"TERMINAL",0,0,"30\t",,terminal_output +15818,11574368,"TERMINAL",0,0,"1\t",,terminal_output +15819,11575396,"TERMINAL",0,0,"2\t",,terminal_output +15820,11576427,"TERMINAL",0,0,"3\t",,terminal_output +15821,11577543,"TERMINAL",0,0,"4\t",,terminal_output +15822,11578511,"TERMINAL",0,0,"5\t",,terminal_output +15823,11579591,"TERMINAL",0,0,"6\t",,terminal_output +15824,11580606,"TERMINAL",0,0,"7\t",,terminal_output +15825,11581741,"TERMINAL",0,0,"8\t",,terminal_output +15826,11582697,"TERMINAL",0,0,"9\t",,terminal_output +15827,11583801,"TERMINAL",0,0,"40\t",,terminal_output +15828,11584813,"TERMINAL",0,0,"1\t",,terminal_output +15829,11585940,"TERMINAL",0,0,"2\t",,terminal_output +15830,11586964,"TERMINAL",0,0,"3\t",,terminal_output +15831,11587923,"TERMINAL",0,0,"4\t",,terminal_output +15832,11589012,"TERMINAL",0,0,"5\t",,terminal_output +15833,11590038,"TERMINAL",0,0,"67",,terminal_output +15834,11591062,"TERMINAL",0,0,"7\t",,terminal_output +15835,11592094,"TERMINAL",0,0,"8\t",,terminal_output +15836,11593141,"TERMINAL",0,0,"9\t",,terminal_output +15837,11594172,"TERMINAL",0,0,"51\t",,terminal_output +15838,11595259,"TERMINAL",0,0,"2712",,terminal_output +15839,11596250,"TERMINAL",0,0,"3\t",,terminal_output +15840,11597307,"TERMINAL",0,0,"4\t",,terminal_output +15841,11598434,"TERMINAL",0,0,"5\t",,terminal_output +15842,11599390,"TERMINAL",0,0,"6\t",,terminal_output +15843,11600481,"TERMINAL",0,0,"7\t",,terminal_output +15844,11601505,"TERMINAL",0,0,"8\t",,terminal_output +15845,11602642,"TERMINAL",0,0,"9\t",,terminal_output +15846,11603566,"TERMINAL",0,0,"9:00\t",,terminal_output +15847,11604679,"TERMINAL",0,0,"1\t",,terminal_output +15848,11605652,"TERMINAL",0,0,"2\t",,terminal_output +15849,11606728,"TERMINAL",0,0,"3\t",,terminal_output +15850,11607724,"TERMINAL",0,0,"4\t",,terminal_output +15851,11608761,"TERMINAL",0,0,"5\t",,terminal_output +15852,11609901,"TERMINAL",0,0,"6\t",,terminal_output +15853,11610926,"TERMINAL",0,0,"7\t",,terminal_output +15854,11611950,"TERMINAL",0,0,"8\t",,terminal_output +15855,11612974,"TERMINAL",0,0,"9\t",,terminal_output +15856,11614013,"TERMINAL",0,0,"10\t",,terminal_output +15857,11615033,"TERMINAL",0,0,"1\t",,terminal_output +15858,11616064,"TERMINAL",0,0,"2\t",,terminal_output +15859,11617108,"TERMINAL",0,0,"3\t",,terminal_output +15860,11618162,"TERMINAL",0,0,"5\t",,terminal_output +15861,11619220,"TERMINAL",0,0,"6\t",,terminal_output +15862,11620348,"TERMINAL",0,0,"7\t",,terminal_output +15863,11621371,"TERMINAL",0,0,"8\t",,terminal_output +15864,11622395,"TERMINAL",0,0,"9\t",,terminal_output +15865,11623419,"TERMINAL",0,0,"20\t",,terminal_output +15866,11624459,"TERMINAL",0,0,"16",,terminal_output +15867,11625468,"TERMINAL",0,0,"2\t",,terminal_output +15868,11626490,"TERMINAL",0,0,"3\t",,terminal_output +15869,11627618,"TERMINAL",0,0,"4\t",,terminal_output +15870,11628547,"TERMINAL",0,0,"5\t",,terminal_output +15871,11629666,"TERMINAL",0,0,"6\t",,terminal_output +15872,11630690,"TERMINAL",0,0,"75",,terminal_output +15873,11631714,"TERMINAL",0,0,"8\t",,terminal_output +15874,11632737,"TERMINAL",0,0,"9\t",,terminal_output +15875,11633752,"TERMINAL",0,0,"30\t",,terminal_output +15876,11634785,"TERMINAL",0,0,"1\t",,terminal_output +15877,11635912,"TERMINAL",0,0,"2\t",,terminal_output +15878,11636857,"TERMINAL",0,0,"3\t",,terminal_output +15879,11637968,"TERMINAL",0,0,"4\t",,terminal_output +15880,11638983,"TERMINAL",0,0,"5\t",,terminal_output +15881,11640012,"TERMINAL",0,0,"6\t",,terminal_output +15882,11641032,"TERMINAL",0,0,"7\t",,terminal_output +15883,11642070,"TERMINAL",0,0,"8\t",,terminal_output +15884,11643117,"TERMINAL",0,0,"9\t",,terminal_output +15885,11644156,"TERMINAL",0,0,"41\t",,terminal_output +15886,11645178,"TERMINAL",0,0,"2\t",,terminal_output +15887,11646281,"TERMINAL",0,0,"3\t",,terminal_output +15888,11647279,"TERMINAL",0,0,"4\t",,terminal_output +15889,11648405,"TERMINAL",0,0,"5\t",,terminal_output +15890,11649430,"TERMINAL",0,0,"6\t",,terminal_output +15891,11650452,"TERMINAL",0,0,"7\t",,terminal_output +15892,11651477,"TERMINAL",0,0,"8\t",,terminal_output +15893,11652501,"TERMINAL",0,0,"9\t",,terminal_output +15894,11653520,"TERMINAL",0,0,"50\t",,terminal_output +15895,11654640,"TERMINAL",0,0,"1\t",,terminal_output +15896,11655588,"TERMINAL",0,0,"2\t",,terminal_output +15897,11656699,"TERMINAL",0,0,"3\t",,terminal_output +15898,11657723,"TERMINAL",0,0,"4\t",,terminal_output +15899,11658720,"TERMINAL",0,0,"5\t",,terminal_output +15900,11659767,"TERMINAL",0,0,"6\t",,terminal_output +15901,11660898,"TERMINAL",0,0,"7\t",,terminal_output +15902,11661852,"TERMINAL",0,0,"8\t",,terminal_output +15903,11662979,"TERMINAL",0,0,"9\t",,terminal_output +15904,11663969,"TERMINAL",0,0,"30:00\t",,terminal_output +15905,11664995,"TERMINAL",0,0,"1\t",,terminal_output +15906,11666036,"TERMINAL",0,0,"2\t",,terminal_output +15907,11667063,"TERMINAL",0,0,"3\t",,terminal_output +15908,11668113,"TERMINAL",0,0,"4\t",,terminal_output +15909,11669150,"TERMINAL",0,0,"6\t",,terminal_output +15910,11670217,"TERMINAL",0,0,"7\t",,terminal_output +15911,11671757,"TERMINAL",0,0,"86910",,terminal_output +15912,11672880,"TERMINAL",0,0,"9\t",,terminal_output +15913,11673820,"TERMINAL",0,0,"10\t",,terminal_output +15914,11674874,"TERMINAL",0,0,"1\t",,terminal_output +15915,11675953,"TERMINAL",0,0,"2\t",,terminal_output +15916,11676957,"TERMINAL",0,0,"3\t",,terminal_output +15917,11677993,"TERMINAL",0,0,"4\t",,terminal_output +15918,11679043,"TERMINAL",0,0,"5\t",,terminal_output +15919,11680097,"TERMINAL",0,0,"6\t",,terminal_output +15920,11681121,"TERMINAL",0,0,"7\t",,terminal_output +15921,11682182,"TERMINAL",0,0,"9\t",,terminal_output +15922,11683215,"TERMINAL",0,0,"20\t",,terminal_output +15923,11684262,"TERMINAL",0,0,"1\t",,terminal_output +15924,11685372,"TERMINAL",0,0,"2\t",,terminal_output +15925,11686396,"TERMINAL",0,0,"3\t",,terminal_output +15926,11687420,"TERMINAL",0,0,"4\t",,terminal_output +15927,11688445,"TERMINAL",0,0,"5\t",,terminal_output +15928,11689571,"TERMINAL",0,0,"6\t",,terminal_output +15929,11690595,"TERMINAL",0,0,"7\t",,terminal_output +15930,11691618,"TERMINAL",0,0,"8\t",,terminal_output +15931,11692588,"TERMINAL",0,0,"9\t",,terminal_output +15932,11693629,"TERMINAL",0,0,"30\t",,terminal_output +15933,11694689,"TERMINAL",0,0,"1\t",,terminal_output +15934,11695817,"TERMINAL",0,0,"2\t",,terminal_output +15935,11696740,"TERMINAL",0,0,"3\t",,terminal_output +15936,11697865,"TERMINAL",0,0,"4\t",,terminal_output +15937,11698889,"TERMINAL",0,0,"5\t",,terminal_output +15938,11699862,"TERMINAL",0,0,"6\t",,terminal_output +15939,11700916,"TERMINAL",0,0,"7\t",,terminal_output +15940,11701961,"TERMINAL",0,0,"8\t",,terminal_output +15941,11702996,"TERMINAL",0,0,"9\t",,terminal_output +15942,11704113,"TERMINAL",0,0,"40\t",,terminal_output +15943,11705137,"TERMINAL",0,0,"1\t",,terminal_output +15944,11706160,"TERMINAL",0,0,"2\t",,terminal_output +15945,11707131,"TERMINAL",0,0,"3\t",,terminal_output +15946,11708172,"TERMINAL",0,0,"5\t",,terminal_output +15947,11709197,"TERMINAL",0,0,"6\t",,terminal_output +15948,11710227,"TERMINAL",0,0,"7\t",,terminal_output +15949,11711274,"TERMINAL",0,0,"8\t",,terminal_output +15950,11712405,"TERMINAL",0,0,"9\t",,terminal_output +15951,11713430,"TERMINAL",0,0,"50\t",,terminal_output +15952,11714454,"TERMINAL",0,0,"1\t",,terminal_output +15953,11715478,"TERMINAL",0,0,"2\t",,terminal_output +15954,11716501,"TERMINAL",0,0,"3\t",,terminal_output +15955,11717524,"TERMINAL",0,0,"4\t",,terminal_output +15956,11718570,"TERMINAL",0,0,"5\t",,terminal_output +15957,11719677,"TERMINAL",0,0,"6\t",,terminal_output +15958,11720700,"TERMINAL",0,0,"7\t",,terminal_output +15959,11721694,"TERMINAL",0,0,"8\t",,terminal_output +15960,11722748,"TERMINAL",0,0,"9\t",,terminal_output +15961,11723875,"TERMINAL",0,0,"1:00\t",,terminal_output +15962,11724899,"TERMINAL",0,0,"1\t",,terminal_output +15963,11725863,"TERMINAL",0,0,"2\t",,terminal_output +15964,11726947,"TERMINAL",0,0,"3\t",,terminal_output +15965,11727991,"TERMINAL",0,0,"4\t",,terminal_output +15966,11729096,"TERMINAL",0,0,"5\t",,terminal_output +15967,11730122,"TERMINAL",0,0,"6\t",,terminal_output +15968,11731147,"TERMINAL",0,0,"7\t",,terminal_output +15969,11732171,"TERMINAL",0,0,"8\t",,terminal_output +15970,11733188,"TERMINAL",0,0,"10\t",,terminal_output +15971,11734227,"TERMINAL",0,0,"1\t",,terminal_output +15972,11735268,"TERMINAL",0,0,"2\t",,terminal_output +15973,11736321,"TERMINAL",0,0,"3\t",,terminal_output +15974,11737392,"TERMINAL",0,0,"43",,terminal_output +15975,11738457,"TERMINAL",0,0,"5\t",,terminal_output +15976,11739442,"TERMINAL",0,0,"6\t",,terminal_output +15977,11740567,"TERMINAL",0,0,"7\t",,terminal_output +15978,11741591,"TERMINAL",0,0,"8\t",,terminal_output +15979,11742564,"TERMINAL",0,0,"9\t",,terminal_output +15980,11743608,"TERMINAL",0,0,"20\t",,terminal_output +15981,11744663,"TERMINAL",0,0,"1\t",,terminal_output +15982,11745789,"TERMINAL",0,0,"274",,terminal_output +15983,11746814,"TERMINAL",0,0,"3\t",,terminal_output +15984,11747837,"TERMINAL",0,0,"4\t",,terminal_output +15985,11748860,"TERMINAL",0,0,"5\t",,terminal_output +15986,11749885,"TERMINAL",0,0,"6\t",,terminal_output +15987,11750910,"TERMINAL",0,0,"7\t",,terminal_output +15988,11752035,"TERMINAL",0,0,"8\t",,terminal_output +15989,11753095,"TERMINAL",0,0,"9\t",,terminal_output +15990,11754248,"TERMINAL",0,0,"30\t",,terminal_output +15991,11755107,"TERMINAL",0,0,"1\t",,terminal_output +15992,11756132,"TERMINAL",0,0,"2\t",,terminal_output +15993,11757160,"TERMINAL",0,0,"4\t",,terminal_output +15994,11758189,"TERMINAL",0,0,"5\t",,terminal_output +15995,11759219,"TERMINAL",0,0,"6\t",,terminal_output +15996,11760261,"TERMINAL",0,0,"7\t",,terminal_output +15997,11761331,"TERMINAL",0,0,"8\t",,terminal_output +15998,11762377,"TERMINAL",0,0,"9\t",,terminal_output +15999,11763402,"TERMINAL",0,0,"40\t",,terminal_output +16000,11764414,"TERMINAL",0,0,"1\t",,terminal_output +16001,11765552,"TERMINAL",0,0,"2\t",,terminal_output +16002,11766576,"TERMINAL",0,0,"3\t",,terminal_output +16003,11767600,"TERMINAL",0,0,"4\t",,terminal_output +16004,11768591,"TERMINAL",0,0,"5\t",,terminal_output +16005,11769650,"TERMINAL",0,0,"6\t",,terminal_output +16006,11770673,"TERMINAL",0,0,"7\t",,terminal_output +16007,11771798,"TERMINAL",0,0,"8\t",,terminal_output +16008,11772823,"TERMINAL",0,0,"9\t",,terminal_output +16009,11773846,"TERMINAL",0,0,"50\t",,terminal_output +16010,11774849,"TERMINAL",0,0,"1\t",,terminal_output +16011,11775895,"TERMINAL",0,0,"2\t",,terminal_output +16012,11777021,"TERMINAL",0,0,"3\t",,terminal_output +16013,11777978,"TERMINAL",0,0,"4\t",,terminal_output +16014,11779080,"TERMINAL",0,0,"5\t",,terminal_output +16015,11780093,"TERMINAL",0,0,"6\t",,terminal_output +16016,11781118,"TERMINAL",0,0,"7\t",,terminal_output +16017,11782155,"TERMINAL",0,0,"9\t",,terminal_output +16018,11783202,"TERMINAL",0,0,"2:005",,terminal_output +16019,11784291,"TERMINAL",0,0,"1\t",,terminal_output +16020,11785292,"TERMINAL",0,0,"2\t",,terminal_output +16021,11786326,"TERMINAL",0,0,"3\t",,terminal_output +16022,11787384,"TERMINAL",0,0,"4\t",,terminal_output +16023,11788490,"TERMINAL",0,0,"5\t",,terminal_output +16024,11789514,"TERMINAL",0,0,"6\t",,terminal_output +16025,11790537,"TERMINAL",0,0,"7\t",,terminal_output +16026,11791525,"TERMINAL",0,0,"8\t",,terminal_output +16027,11792586,"TERMINAL",0,0,"969",,terminal_output +16028,11793610,"TERMINAL",0,0,"10\t",,terminal_output +16029,11794737,"TERMINAL",0,0,"1\t",,terminal_output +16030,11795760,"TERMINAL",0,0,"2\t",,terminal_output +16031,11796784,"TERMINAL",0,0,"3\t",,terminal_output +16032,11797752,"TERMINAL",0,0,"4\t",,terminal_output +16033,11798832,"TERMINAL",0,0,"5\t",,terminal_output +16034,11799869,"TERMINAL",0,0,"6\t",,terminal_output +16035,11800983,"TERMINAL",0,0,"7\t",,terminal_output +16036,11801931,"TERMINAL",0,0,"8\t",,terminal_output +16037,11802992,"TERMINAL",0,0,"9\t",,terminal_output +16038,11804055,"TERMINAL",0,0,"20\t",,terminal_output +16039,11805090,"TERMINAL",0,0,"1\t",,terminal_output +16040,11806104,"TERMINAL",0,0,"2\t",,terminal_output +16041,11807154,"TERMINAL",0,0,"3\t",,terminal_output +16042,11808170,"TERMINAL",0,0,"5\t",,terminal_output +16043,11809213,"TERMINAL",0,0,"6\t",,terminal_output +16044,11810261,"TERMINAL",0,0,"7\t",,terminal_output +16045,11811291,"TERMINAL",0,0,"8\t",,terminal_output +16046,11812359,"TERMINAL",0,0,"9\t",,terminal_output +16047,11813382,"TERMINAL",0,0,"30\t",,terminal_output +16048,11814499,"TERMINAL",0,0,"1\t",,terminal_output +16049,11815524,"TERMINAL",0,0,"2\t",,terminal_output +16050,11816507,"TERMINAL",0,0,"3\t",,terminal_output +16051,11817578,"TERMINAL",0,0,"4\t",,terminal_output +16052,11818596,"TERMINAL",0,0,"5\t",,terminal_output +16053,11819722,"TERMINAL",0,0,"6\t",,terminal_output +16054,11820747,"TERMINAL",0,0,"7\t",,terminal_output +16055,11821770,"TERMINAL",0,0,"8\t",,terminal_output +16056,11822796,"TERMINAL",0,0,"9\t",,terminal_output +16057,11823920,"TERMINAL",0,0,"40\t",,terminal_output +16058,11824945,"TERMINAL",0,0,"1\t",,terminal_output +16059,11825908,"TERMINAL",0,0,"2\t",,terminal_output +16060,11826952,"TERMINAL",0,0,"3\t",,terminal_output +16061,11827977,"TERMINAL",0,0,"4\t",,terminal_output +16062,11829041,"TERMINAL",0,0,"5\t",,terminal_output +16063,11830065,"TERMINAL",0,0,"6\t",,terminal_output +16064,11831192,"TERMINAL",0,0,"7\t",,terminal_output +16065,11832217,"TERMINAL",0,0,"8\t",,terminal_output +16066,11833185,"TERMINAL",0,0,"50\t",,terminal_output +16067,11834229,"TERMINAL",0,0,"1\t",,terminal_output +16068,11835272,"TERMINAL",0,0,"2\t",,terminal_output +16069,11836315,"TERMINAL",0,0,"3\t",,terminal_output +16070,11837381,"TERMINAL",0,0,"4\t",,terminal_output +16071,11838461,"TERMINAL",0,0,"5\t",,terminal_output +16072,11839485,"TERMINAL",0,0,"6\t",,terminal_output +16073,11840529,"TERMINAL",0,0,"7\t",,terminal_output +16074,11841644,"TERMINAL",0,0,"8\t",,terminal_output +16075,11842617,"TERMINAL",0,0,"9\t",,terminal_output +16076,11843622,"TERMINAL",0,0,"3:00\t",,terminal_output +16077,11844708,"TERMINAL",0,0,"1\t",,terminal_output +16078,11845711,"TERMINAL",0,0,"2\t",,terminal_output +16079,11846756,"TERMINAL",0,0,"3\t",,terminal_output +16080,11847780,"TERMINAL",0,0,"4\t",,terminal_output +16081,11848907,"TERMINAL",0,0,"5\t",,terminal_output +16082,11849932,"TERMINAL",0,0,"6\t",,terminal_output +16083,11850955,"TERMINAL",0,0,"7\t",,terminal_output +16084,11851979,"TERMINAL",0,0,"8\t",,terminal_output +16085,11852979,"TERMINAL",0,0,"9\t",,terminal_output +16086,11854027,"TERMINAL",0,0,"10\t",,terminal_output +16087,11855153,"TERMINAL",0,0,"1\t",,terminal_output +16088,11856178,"TERMINAL",0,0,"2\t",,terminal_output +16089,11857202,"TERMINAL",0,0,"3\t",,terminal_output +16090,11858185,"TERMINAL",0,0,"574",,terminal_output +16091,11859228,"TERMINAL",0,0,"6\t",,terminal_output +16092,11860259,"TERMINAL",0,0,"7\t",,terminal_output +16093,11861298,"TERMINAL",0,0,"8\t",,terminal_output +16094,11862352,"TERMINAL",0,0,"9\t",,terminal_output +16095,11863448,"TERMINAL",0,0,"20\t",,terminal_output +16096,11864472,"TERMINAL",0,0,"1\t",,terminal_output +16097,11865598,"TERMINAL",0,0,"2\t",,terminal_output +16098,11866521,"TERMINAL",0,0,"3\t",,terminal_output +16099,11867562,"TERMINAL",0,0,"4\t",,terminal_output +16100,11868605,"TERMINAL",0,0,"5\t",,terminal_output +16101,11869695,"TERMINAL",0,0,"6\t",,terminal_output +16102,11870718,"TERMINAL",0,0,"7\t",,terminal_output +16103,11871721,"TERMINAL",0,0,"8\t",,terminal_output +16104,11872768,"TERMINAL",0,0,"9\t",,terminal_output +16105,11873893,"TERMINAL",0,0,"30\t",,terminal_output +16106,11874917,"TERMINAL",0,0,"1\t",,terminal_output +16107,11875941,"TERMINAL",0,0,"2\t",,terminal_output +16108,11876982,"TERMINAL",0,0,"3\t",,terminal_output +16109,11877988,"TERMINAL",0,0,"4\t",,terminal_output +16110,11879129,"TERMINAL",0,0,"51",,terminal_output +16111,11880073,"TERMINAL",0,0,"6\t",,terminal_output +16112,11881163,"TERMINAL",0,0,"7\t",,terminal_output +16113,11882187,"TERMINAL",0,0,"9\t",,terminal_output +16114,11883205,"TERMINAL",0,0,"40\t",,terminal_output +16115,11884240,"TERMINAL",0,0,"1\t",,terminal_output +16116,11885285,"TERMINAL",0,0,"2\t",,terminal_output +16117,11886327,"TERMINAL",0,0,"3\t",,terminal_output +16118,11887410,"TERMINAL",0,0,"45",,terminal_output +16119,11888477,"TERMINAL",0,0,"5\t",,terminal_output +16120,11889561,"TERMINAL",0,0,"6\t",,terminal_output +16121,11890584,"TERMINAL",0,0,"7\t",,terminal_output +16122,11891608,"TERMINAL",0,0,"8\t",,terminal_output +16123,11892632,"TERMINAL",0,0,"9\t",,terminal_output +16124,11893630,"TERMINAL",0,0,"50\t",,terminal_output +16125,11894682,"TERMINAL",0,0,"1\t",,terminal_output +16126,11895807,"TERMINAL",0,0,"2\t",,terminal_output +16127,11896831,"TERMINAL",0,0,"3\t",,terminal_output +16128,11897780,"TERMINAL",0,0,"4\t",,terminal_output +16129,11898878,"TERMINAL",0,0,"5\t",,terminal_output +16130,11899902,"TERMINAL",0,0,"6\t",,terminal_output +16131,11900906,"TERMINAL",0,0,"7\t",,terminal_output +16132,11902054,"TERMINAL",0,0,"8\t",,terminal_output +16133,11902992,"TERMINAL",0,0,"9\t",,terminal_output +16134,11904101,"TERMINAL",0,0,"4:00\t",,terminal_output +16135,11905105,"TERMINAL",0,0,"1\t",,terminal_output +16136,11906104,"TERMINAL",0,0,"2\t",,terminal_output +16137,11907182,"TERMINAL",0,0,"3\t",,terminal_output +16138,11908198,"TERMINAL",0,0,"56",,terminal_output +16139,11909230,"TERMINAL",0,0,"6\t",,terminal_output +16140,11910270,"TERMINAL",0,0,"7\t",,terminal_output +16141,11911309,"TERMINAL",0,0,"8\t",,terminal_output +16142,11912993,"TERMINAL",0,0,"9690",,terminal_output +16143,11914034,"TERMINAL",0,0,"10\t",,terminal_output +16144,11915058,"TERMINAL",0,0,"1\t",,terminal_output +16145,11916082,"TERMINAL",0,0,"2\t",,terminal_output +16146,11917209,"TERMINAL",0,0,"3\t",,terminal_output +16147,11918233,"TERMINAL",0,0,"5\t",,terminal_output +16148,11919257,"TERMINAL",0,0,"6\t",,terminal_output +16149,11920227,"TERMINAL",0,0,"7\t",,terminal_output +16150,11921266,"TERMINAL",0,0,"8\t",,terminal_output +16151,11922307,"TERMINAL",0,0,"9\t",,terminal_output +16152,11923338,"TERMINAL",0,0,"20\t",,terminal_output +16153,11924479,"TERMINAL",0,0,"1\t",,terminal_output +16154,11925503,"TERMINAL",0,0,"2\t",,terminal_output +16155,11926479,"TERMINAL",0,0,"3\t",,terminal_output +16156,11927551,"TERMINAL",0,0,"4\t",,terminal_output +16157,11928559,"TERMINAL",0,0,"5\t",,terminal_output +16158,11929604,"TERMINAL",0,0,"6\t",,terminal_output +16159,11930727,"TERMINAL",0,0,"7\t",,terminal_output +16160,11931750,"TERMINAL",0,0,"8\t",,terminal_output +16161,11932739,"TERMINAL",0,0,"9\t",,terminal_output +16162,11933797,"TERMINAL",0,0,"30\t",,terminal_output +16163,11934924,"TERMINAL",0,0,"1\t",,terminal_output +16164,11935948,"TERMINAL",0,0,"2\t",,terminal_output +16165,11936972,"TERMINAL",0,0,"3\t",,terminal_output +16166,11937999,"TERMINAL",0,0,"4\t",,terminal_output +16167,11939020,"TERMINAL",0,0,"5\t",,terminal_output +16168,11940159,"TERMINAL",0,0,"6\t",,terminal_output +16169,11941079,"TERMINAL",0,0,"7\t",,terminal_output +16170,11942194,"TERMINAL",0,0,"8\t",,terminal_output +16171,11943218,"TERMINAL",0,0,"40\t",,terminal_output +16172,11944244,"TERMINAL",0,0,"1\t",,terminal_output +16173,11945248,"TERMINAL",0,0,"2\t",,terminal_output +16174,11946296,"TERMINAL",0,0,"3\t",,terminal_output +16175,11947341,"TERMINAL",0,0,"4\t",,terminal_output +16176,11948441,"TERMINAL",0,0,"5\t",,terminal_output +16177,11949465,"TERMINAL",0,0,"6\t",,terminal_output +16178,11950489,"TERMINAL",0,0,"7\t",,terminal_output +16179,11951513,"TERMINAL",0,0,"8\t",,terminal_output +16180,11952536,"TERMINAL",0,0,"9\t",,terminal_output +16181,11953573,"TERMINAL",0,0,"50\t",,terminal_output +16182,11954687,"TERMINAL",0,0,"1\t",,terminal_output +16183,11955654,"TERMINAL",0,0,"2\t",,terminal_output +16184,11956735,"TERMINAL",0,0,"3\t",,terminal_output +16185,11957759,"TERMINAL",0,0,"4\t",,terminal_output +16186,11958750,"TERMINAL",0,0,"5\t",,terminal_output +16187,11959807,"TERMINAL",0,0,"6\t",,terminal_output +16188,11960832,"TERMINAL",0,0,"7\t",,terminal_output +16189,11961857,"TERMINAL",0,0,"8\t",,terminal_output +16190,11962888,"TERMINAL",0,0,"9\t",,terminal_output +16191,11964006,"TERMINAL",0,0,"5:00\t",,terminal_output +16192,11965043,"TERMINAL",0,0,"1\t",,terminal_output +16193,11966053,"TERMINAL",0,0,"2\t",,terminal_output +16194,11967057,"TERMINAL",0,0,"3\t",,terminal_output +16195,11968205,"TERMINAL",0,0,"4\t",,terminal_output +16196,11969229,"TERMINAL",0,0,"6\t",,terminal_output +16197,11970254,"TERMINAL",0,0,"7\t",,terminal_output +16198,11971278,"TERMINAL",0,0,"8\t",,terminal_output +16199,11972271,"TERMINAL",0,0,"9\t",,terminal_output +16200,11973315,"TERMINAL",0,0,"10\t",,terminal_output +16201,11974355,"TERMINAL",0,0,"1\t",,terminal_output +16202,11975419,"TERMINAL",0,0,"2\t",,terminal_output +16203,11976499,"TERMINAL",0,0,"374",,terminal_output +16204,11977521,"TERMINAL",0,0,"4\t",,terminal_output +16205,11978543,"TERMINAL",0,0,"5\t",,terminal_output +16206,11979673,"TERMINAL",0,0,"6\t",,terminal_output +16207,11980697,"TERMINAL",0,0,"7\t",,terminal_output +16208,11981671,"TERMINAL",0,0,"8\t",,terminal_output +16209,11982745,"TERMINAL",0,0,"9\t",,terminal_output +16210,11983769,"TERMINAL",0,0,"20\t",,terminal_output +16211,11984895,"TERMINAL",0,0,"15",,terminal_output +16212,11985426,"genie.py",0,0,"",python,tab +16213,11985427,"genie.py",10868,0,"",python,selection_mouse +16214,11985513,"genie.py",10867,0,"",python,selection_command +16215,11985833,"TERMINAL",0,0,"2\t",,terminal_output +16216,11986004,"genie.py",10317,0,"",python,selection_mouse +16217,11986874,"TERMINAL",0,0,"3\t",,terminal_output +16218,11987964,"TERMINAL",0,0,"4\t",,terminal_output +16219,11988952,"TERMINAL",0,0,"5\t",,terminal_output +16220,11990015,"TERMINAL",0,0,"6\t",,terminal_output +16221,11991142,"TERMINAL",0,0,"74",,terminal_output +16222,11992180,"TERMINAL",0,0,"8\t",,terminal_output +16223,11993123,"TERMINAL",0,0,"9\t",,terminal_output +16224,11994149,"TERMINAL",0,0,"31\t",,terminal_output +16225,11995239,"TERMINAL",0,0,"2\t",,terminal_output +16226,11996244,"TERMINAL",0,0,"3\t",,terminal_output +16227,11997271,"TERMINAL",0,0,"4\t",,terminal_output +16228,11998339,"TERMINAL",0,0,"5\t",,terminal_output +16229,11999366,"TERMINAL",0,0,"6\t",,terminal_output +16230,12000395,"TERMINAL",0,0,"7\t",,terminal_output +16231,12001435,"TERMINAL",0,0,"8\t",,terminal_output +16232,12002509,"TERMINAL",0,0,"9\t",,terminal_output +16233,12003513,"TERMINAL",0,0,"40\t",,terminal_output +16234,12004632,"TERMINAL",0,0,"1\t",,terminal_output +16235,12005683,"TERMINAL",0,0,"2\t",,terminal_output +16236,12006707,"TERMINAL",0,0,"3\t",,terminal_output +16237,12007731,"TERMINAL",0,0,"4\t",,terminal_output +16238,12008755,"TERMINAL",0,0,"5\t",,terminal_output +16239,12009882,"TERMINAL",0,0,"6\t",,terminal_output +16240,12010940,"TERMINAL",0,0,"7\t",,terminal_output +16241,12011929,"TERMINAL",0,0,"8\t",,terminal_output +16242,12012560,"genie.py",0,0,"",python,tab +16243,12012561,"genie.py",10072,0,"",python,selection_mouse +16244,12012687,"genie.py",10070,4,"mask",python,selection_mouse +16245,12012912,"TERMINAL",0,0,"9\t",,terminal_output +16246,12013738,"genie.py",10059,0,"",python,selection_mouse +16247,12013900,"genie.py",10048,17,"final_token_probs",python,selection_mouse +16248,12013987,"TERMINAL",0,0,"50\t",,terminal_output +16249,12014481,"genie.py",10072,0,"",python,selection_mouse +16250,12014632,"genie.py",10070,4,"mask",python,selection_mouse +16251,12015001,"TERMINAL",0,0,"1\t",,terminal_output +16252,12016128,"TERMINAL",0,0,"2\t",,terminal_output +16253,12017151,"TERMINAL",0,0,"3\t",,terminal_output +16254,12017257,"genie.py",10061,0,"",python,selection_mouse +16255,12017429,"genie.py",10048,17,"final_token_probs",python,selection_mouse +16256,12018121,"TERMINAL",0,0,"4\t",,terminal_output +16257,12019051,"genie.py",10070,0,"",python,selection_mouse +16258,12019213,"TERMINAL",0,0,"6\t",,terminal_output +16259,12020207,"TERMINAL",0,0,"7\t",,terminal_output +16260,12020572,"genie.py",10073,0,"",python,selection_mouse +16261,12020731,"genie.py",10070,4,"mask",python,selection_mouse +16262,12021256,"TERMINAL",0,0,"8\t",,terminal_output +16263,12022018,"genie.py",9799,0,"",python,selection_mouse +16264,12022331,"TERMINAL",0,0,"95",,terminal_output +16265,12022676,"genie.py",9979,0,"",python,selection_mouse +16266,12023278,"genie.py",9882,0,"",python,selection_mouse +16267,12023407,"TERMINAL",0,0,"6:00\t",,terminal_output +16268,12023604,"genie.py",9882,5,"debug",python,selection_mouse +16269,12024417,"genie.py",9291,0,"",python,selection_mouse +16270,12024449,"genie.py",9290,0,"",python,selection_command +16271,12024475,"TERMINAL",0,0,"1\t",,terminal_output +16272,12025549,"TERMINAL",0,0,"2\t",,terminal_output +16273,12026086,"genie.py",10495,0,"",python,selection_mouse +16274,12026499,"TERMINAL",0,0,"3\t",,terminal_output +16275,12026728,"genie.py",10739,0,"",python,selection_mouse +16276,12027597,"TERMINAL",0,0,"4\t",,terminal_output +16277,12028621,"TERMINAL",0,0,"5\t",,terminal_output +16278,12029645,"TERMINAL",0,0,"6\t",,terminal_output +16279,12030772,"TERMINAL",0,0,"7\t",,terminal_output +16280,12030875,"genie.py",8868,0,"",python,selection_mouse +16281,12031638,"genie.py",8840,0,"",python,selection_mouse +16282,12031723,"TERMINAL",0,0,"8\t",,terminal_output +16283,12032678,"genie.py",8694,0,"",python,selection_mouse +16284,12032789,"TERMINAL",0,0,"9\t",,terminal_output +16285,12033478,"genie.py",8583,0,"",python,selection_mouse +16286,12033798,"genie.py",8575,12,"final_logits",python,selection_mouse +16287,12033869,"TERMINAL",0,0,"101",,terminal_output +16288,12034887,"TERMINAL",0,0,"1\t",,terminal_output +16289,12035995,"TERMINAL",0,0,"2\t",,terminal_output +16290,12036943,"TERMINAL",0,0,"3\t",,terminal_output +16291,12037992,"TERMINAL",0,0,"4\t",,terminal_output +16292,12039041,"TERMINAL",0,0,"5\t",,terminal_output +16293,12040090,"TERMINAL",0,0,"6\t",,terminal_output +16294,12041212,"TERMINAL",0,0,"7\t",,terminal_output +16295,12042244,"TERMINAL",0,0,"9\t",,terminal_output +16296,12042741,"genie.py",9790,0,"",python,selection_mouse +16297,12042876,"genie.py",9777,17,"final_token_probs",python,selection_mouse +16298,12043188,"TERMINAL",0,0,"20\t",,terminal_output +16299,12044313,"TERMINAL",0,0,"1\t",,terminal_output +16300,12045313,"TERMINAL",0,0,"2\t",,terminal_output +16301,12046321,"TERMINAL",0,0,"3\t",,terminal_output +16302,12047370,"TERMINAL",0,0,"4\t",,terminal_output +16303,12048486,"TERMINAL",0,0,"5\t",,terminal_output +16304,12049511,"TERMINAL",0,0,"6\t",,terminal_output +16305,12050535,"TERMINAL",0,0,"74",,terminal_output +16306,12051666,"TERMINAL",0,0,"8\t",,terminal_output +16307,12052607,"TERMINAL",0,0,"9\t",,terminal_output +16308,12053653,"TERMINAL",0,0,"30\t",,terminal_output +16309,12054698,"TERMINAL",0,0,"1\t",,terminal_output +16310,12055972,"TERMINAL",0,0,"2\t",,terminal_output +16311,12056803,"TERMINAL",0,0,"3\t",,terminal_output +16312,12058021,"TERMINAL",0,0,"4\t",,terminal_output +16313,12058942,"TERMINAL",0,0,"5\t",,terminal_output +16314,12059956,"TERMINAL",0,0,"6\t",,terminal_output +16315,12060998,"TERMINAL",0,0,"7\t",,terminal_output +16316,12062107,"TERMINAL",0,0,"8\t",,terminal_output +16317,12063042,"TERMINAL",0,0,"9\t",,terminal_output +16318,12064154,"TERMINAL",0,0,"40\t",,terminal_output +16319,12065179,"TERMINAL",0,0,"15",,terminal_output +16320,12066202,"TERMINAL",0,0,"3\t",,terminal_output +16321,12067243,"TERMINAL",0,0,"4\t",,terminal_output +16322,12068260,"TERMINAL",0,0,"5\t",,terminal_output +16323,12069288,"TERMINAL",0,0,"6\t",,terminal_output +16324,12070333,"TERMINAL",0,0,"7\t",,terminal_output +16325,12071362,"TERMINAL",0,0,"8\t",,terminal_output +16326,12072382,"TERMINAL",0,0,"96",,terminal_output +16327,12073473,"TERMINAL",0,0,"50\t",,terminal_output +16328,12074474,"TERMINAL",0,0,"1\t",,terminal_output +16329,12075623,"TERMINAL",0,0,"2\t",,terminal_output +16330,12076648,"TERMINAL",0,0,"3\t",,terminal_output +16331,12077590,"TERMINAL",0,0,"4\t",,terminal_output +16332,12078625,"TERMINAL",0,0,"5\t",,terminal_output +16333,12079719,"TERMINAL",0,0,"6\t",,terminal_output +16334,12080743,"TERMINAL",0,0,"7\t",,terminal_output +16335,12081768,"TERMINAL",0,0,"8\t",,terminal_output +16336,12082893,"TERMINAL",0,0,"9\t",,terminal_output +16337,12083921,"TERMINAL",0,0,"7:00\t",,terminal_output +16338,12085045,"TERMINAL",0,0,"1\t",,terminal_output +16339,12086068,"TERMINAL",0,0,"2\t",,terminal_output +16340,12086942,"TERMINAL",0,0,"37",,terminal_output +16341,12087991,"TERMINAL",0,0,"4\t",,terminal_output +16342,12089141,"TERMINAL",0,0,"5\t",,terminal_output +16343,12090165,"TERMINAL",0,0,"6\t",,terminal_output +16344,12091188,"TERMINAL",0,0,"7\t",,terminal_output +16345,12092213,"TERMINAL",0,0,"9\t",,terminal_output +16346,12093245,"TERMINAL",0,0,"10\t",,terminal_output +16347,12094259,"TERMINAL",0,0,"1\t",,terminal_output +16348,12095292,"TERMINAL",0,0,"2\t",,terminal_output +16349,12096309,"TERMINAL",0,0,"3\t",,terminal_output +16350,12097353,"TERMINAL",0,0,"4\t",,terminal_output +16351,12098403,"TERMINAL",0,0,"5\t",,terminal_output +16352,12099446,"TERMINAL",0,0,"6\t",,terminal_output +16353,12100483,"TERMINAL",0,0,"7\t",,terminal_output +16354,12101531,"TERMINAL",0,0,"8\t",,terminal_output +16355,12102574,"TERMINAL",0,0,"9\t",,terminal_output +16356,12103593,"TERMINAL",0,0,"20\t",,terminal_output +16357,12104705,"TERMINAL",0,0,"1\t",,terminal_output +16358,12105730,"TERMINAL",0,0,"2\t",,terminal_output +16359,12106733,"TERMINAL",0,0,"3\t",,terminal_output +16360,12107616,"genie.py",9040,0,"",python,selection_mouse +16361,12107784,"genie.py",9031,22,"sampled_token_idxs_tmp",python,selection_mouse +16362,12107805,"TERMINAL",0,0,"43",,terminal_output +16363,12108902,"TERMINAL",0,0,"5\t",,terminal_output +16364,12109927,"TERMINAL",0,0,"6\t",,terminal_output +16365,12110951,"TERMINAL",0,0,"74",,terminal_output +16366,12111976,"TERMINAL",0,0,"8\t",,terminal_output +16367,12113001,"TERMINAL",0,0,"9\t",,terminal_output +16368,12114036,"TERMINAL",0,0,"30\t",,terminal_output +16369,12114098,"genie.py",10781,0,"",python,selection_mouse +16370,12114259,"genie.py",10779,10,"token_idxs",python,selection_mouse +16371,12115150,"TERMINAL",0,0,"1\t",,terminal_output +16372,12116109,"TERMINAL",0,0,"2\t",,terminal_output +16373,12116918,"genie.py",10306,0,"",python,selection_mouse +16374,12117128,"TERMINAL",0,0,"3\t",,terminal_output +16375,12117452,"genie.py",10239,0,"",python,selection_mouse +16376,12117603,"genie.py",10232,18,"sampled_token_idxs",python,selection_mouse +16377,12118222,"TERMINAL",0,0,"5\t",,terminal_output +16378,12119262,"TERMINAL",0,0,"6\t",,terminal_output +16379,12120259,"TERMINAL",0,0,"7\t",,terminal_output +16380,12121304,"TERMINAL",0,0,"8\t",,terminal_output +16381,12122331,"TERMINAL",0,0,"9\t",,terminal_output +16382,12122865,"genie.py",8982,0,"",python,selection_mouse +16383,12123379,"TERMINAL",0,0,"40\t",,terminal_output +16384,12124404,"TERMINAL",0,0,"1\t",,terminal_output +16385,12125238,"genie.py",8982,0,"_",python,content +16386,12125238,"genie.py",8983,0,"",python,selection_keyboard +16387,12125498,"TERMINAL",0,0,"2\t",,terminal_output +16388,12125671,"genie.py",8983,0,"o",python,content +16389,12125672,"genie.py",8984,0,"",python,selection_keyboard +16390,12125800,"genie.py",8984,0,"l",python,content +16391,12125801,"genie.py",8985,0,"",python,selection_keyboard +16392,12125975,"genie.py",8985,0,"d",python,content +16393,12125976,"genie.py",8986,0,"",python,selection_keyboard +16394,12126419,"genie.py",8985,0,"",python,selection_command +16395,12126488,"TERMINAL",0,0,"3\t",,terminal_output +16396,12126626,"genie.py",9056,0,"",python,selection_command +16397,12126980,"genie.py",9055,0,"",python,selection_command +16398,12127158,"genie.py",9054,0,"",python,selection_command +16399,12127294,"genie.py",9053,0,"",python,selection_command +16400,12127554,"genie.py",9053,1,"",python,content +16401,12127555,"TERMINAL",0,0,"4\t",,terminal_output +16402,12127724,"genie.py",9053,0,"s",python,content +16403,12127725,"genie.py",9054,0,"",python,selection_keyboard +16404,12127880,"genie.py",9054,0,"s",python,content +16405,12127881,"genie.py",9055,0,"",python,selection_keyboard +16406,12128511,"genie.py",9054,1,"",python,content +16407,12128624,"TERMINAL",0,0,"5\t",,terminal_output +16408,12128870,"genie.py",9053,1,"",python,content +16409,12128987,"genie.py",9052,1,"",python,content +16410,12129476,"genie.py",9052,1,"",python,content +16411,12129642,"genie.py",9052,1,"",python,content +16412,12129654,"TERMINAL",0,0,"6\t",,terminal_output +16413,12130031,"genie.py",9052,1,"",python,content +16414,12130159,"genie.py",9052,0,"s",python,content +16415,12130160,"genie.py",9053,0,"",python,selection_keyboard +16416,12130687,"TERMINAL",0,0,"7\t",,terminal_output +16417,12131518,"genie.py",10074,0,"",python,selection_mouse +16418,12131740,"TERMINAL",0,0,"8\t",,terminal_output +16419,12132328,"genie.py",9850,0,"",python,selection_mouse +16420,12132756,"TERMINAL",0,0,"9\t",,terminal_output +16421,12133021,"genie.py",9863,0,"",python,selection_mouse +16422,12133937,"TERMINAL",0,0,"50\t",,terminal_output +16423,12134307,"genie.py",9862,0,"",python,selection_mouse +16424,12134915,"TERMINAL",0,0,"1\t",,terminal_output +16425,12135230,"genie.py",9862,4,"",python,content +16426,12135919,"TERMINAL",0,0,"2\t",,terminal_output +16427,12136078,"genie.py",9767,0,"",python,selection_mouse +16428,12136923,"TERMINAL",0,0,"3\t",,terminal_output +16429,12137467,"genie.py",9767,0,"_",python,content +16430,12137468,"genie.py",9768,0,"",python,selection_keyboard +16431,12137840,"genie.py",9768,0,"o",python,content +16432,12137841,"genie.py",9769,0,"",python,selection_keyboard +16433,12138006,"genie.py",9769,0,"l",python,content +16434,12138006,"genie.py",9770,0,"",python,selection_keyboard +16435,12138007,"TERMINAL",0,0,"4\t",,terminal_output +16436,12138074,"genie.py",9770,0,"d",python,content +16437,12138075,"genie.py",9771,0,"",python,selection_keyboard +16438,12138684,"genie.py",9770,0,"",python,selection_command +16439,12139033,"TERMINAL",0,0,"5\t",,terminal_output +16440,12140136,"TERMINAL",0,0,"65",,terminal_output +16441,12141184,"TERMINAL",0,0,"7\t",,terminal_output +16442,12141570,"genie.py",8587,0,"",python,selection_mouse +16443,12142167,"TERMINAL",0,0,"8\t",,terminal_output +16444,12142892,"genie.py",8587,0,"_",python,content +16445,12142893,"genie.py",8588,0,"",python,selection_keyboard +16446,12143110,"genie.py",8588,0,"o",python,content +16447,12143110,"genie.py",8589,0,"",python,selection_keyboard +16448,12143212,"TERMINAL",0,0,"8:00\t",,terminal_output +16449,12143239,"genie.py",8589,0,"l",python,content +16450,12143240,"genie.py",8590,0,"",python,selection_keyboard +16451,12143357,"genie.py",8590,0,"d",python,content +16452,12143358,"genie.py",8591,0,"",python,selection_keyboard +16453,12143853,"genie.py",8590,0,"",python,selection_command +16454,12144018,"genie.py",8670,0,"",python,selection_command +16455,12144208,"TERMINAL",0,0,"1\t",,terminal_output +16456,12144318,"genie.py",8669,0,"",python,selection_command +16457,12144466,"genie.py",8668,0,"",python,selection_command +16458,12144682,"genie.py",8667,0,"",python,selection_command +16459,12144942,"genie.py",8667,5,"",python,content +16460,12145253,"TERMINAL",0,0,"2\t",,terminal_output +16461,12146302,"TERMINAL",0,0,"3\t",,terminal_output +16462,12146488,"genie.py",9011,0,"",python,selection_mouse +16463,12147337,"TERMINAL",0,0,"4\t",,terminal_output +16464,12148276,"genie.py",9011,0,"_",python,content +16465,12148277,"genie.py",9012,0,"",python,selection_keyboard +16466,12148387,"TERMINAL",0,0,"5\t",,terminal_output +16467,12148640,"genie.py",9012,0,"o",python,content +16468,12148641,"genie.py",9013,0,"",python,selection_keyboard +16469,12148869,"genie.py",9013,0,"l",python,content +16470,12148870,"genie.py",9014,0,"",python,selection_keyboard +16471,12148961,"genie.py",9014,0,"d",python,content +16472,12148961,"genie.py",9015,0,"",python,selection_keyboard +16473,12149365,"genie.py",9014,0,"",python,selection_command +16474,12149418,"TERMINAL",0,0,"6\t",,terminal_output +16475,12149495,"genie.py",9089,0,"",python,selection_command +16476,12149696,"genie.py",9088,0,"",python,selection_command +16477,12149938,"genie.py",9087,0,"",python,selection_command +16478,12150322,"genie.py",9086,0,"",python,selection_command +16479,12150470,"genie.py",9085,0,"",python,selection_command +16480,12150471,"TERMINAL",0,0,"7\t",,terminal_output +16481,12150604,"genie.py",9084,0,"",python,selection_command +16482,12150748,"genie.py",9083,0,"",python,selection_command +16483,12150879,"genie.py",9082,0,"",python,selection_command +16484,12151177,"genie.py",9082,4,"",python,content +16485,12151526,"TERMINAL",0,0,"8\t",,terminal_output +16486,12152568,"TERMINAL",0,0,"9\t",,terminal_output +16487,12153350,"genie.py",9841,0,"",python,selection_mouse +16488,12154169,"TERMINAL",0,0,"104",,terminal_output +16489,12154613,"genie.py",9840,0,"",python,selection_command +16490,12155081,"genie.py",9840,4,"",python,content +16491,12155203,"TERMINAL",0,0,"2\t",,terminal_output +16492,12155585,"genie.py",9744,0,"",python,selection_command +16493,12155918,"genie.py",9745,0,"",python,selection_command +16494,12156217,"TERMINAL",0,0,"3\t",,terminal_output +16495,12156492,"genie.py",9745,0,"_",python,content +16496,12156493,"genie.py",9746,0,"",python,selection_keyboard +16497,12156761,"genie.py",9746,0,"o",python,content +16498,12156762,"genie.py",9747,0,"",python,selection_keyboard +16499,12156862,"genie.py",9747,0,"l",python,content +16500,12156863,"genie.py",9748,0,"",python,selection_keyboard +16501,12156956,"genie.py",9748,0,"d",python,content +16502,12156956,"genie.py",9749,0,"",python,selection_keyboard +16503,12157264,"genie.py",9748,0,"",python,selection_command +16504,12157359,"TERMINAL",0,0,"4\t",,terminal_output +16505,12158314,"TERMINAL",0,0,"5\t",,terminal_output +16506,12159373,"TERMINAL",0,0,"6\t",,terminal_output +16507,12160179,"genie.py",9801,0,"",python,selection_mouse +16508,12160386,"TERMINAL",0,0,"7\t",,terminal_output +16509,12160993,"genie.py",9801,0," ",python,content +16510,12160995,"genie.py",9802,0,"",python,selection_keyboard +16511,12161383,"genie.py",9801,0,"",python,selection_command +16512,12161475,"TERMINAL",0,0,"8\t",,terminal_output +16513,12162462,"TERMINAL",0,0,"9\t",,terminal_output +16514,12162524,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +16515,12163050,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +16516,12163205,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +16517,12163509,"TERMINAL",0,0,"20\t",,terminal_output +16518,12164554,"TERMINAL",0,0,"1\t",,terminal_output +16519,12165635,"TERMINAL",0,0,"25",,terminal_output +16520,12166146,"TERMINAL",0,0,"2025-07-03 19:38:23.007115: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16521,12166625,"TERMINAL",0,0,"3\t",,terminal_output +16522,12167685,"TERMINAL",0,0,"4\t",,terminal_output +16523,12168808,"TERMINAL",0,0,"5\t",,terminal_output +16524,12169841,"TERMINAL",0,0,"6\t",,terminal_output +16525,12170857,"TERMINAL",0,0,"74",,terminal_output +16526,12171881,"TERMINAL",0,0,"8\t",,terminal_output +16527,12172905,"TERMINAL",0,0,"9\t",,terminal_output +16528,12173929,"TERMINAL",0,0,"30\t",,terminal_output +16529,12174953,"TERMINAL",0,0,"1\t",,terminal_output +16530,12176062,"TERMINAL",0,0,"2\t",,terminal_output +16531,12177034,"TERMINAL",0,0,"3\t",,terminal_output +16532,12177282,"genie.py",0,0,"",python,tab +16533,12177283,"genie.py",10176,0,"",python,selection_mouse +16534,12178090,"TERMINAL",0,0,"4\t",,terminal_output +16535,12178182,"genie.py",10176,0,"s",python,content +16536,12178182,"genie.py",10177,0,"",python,selection_keyboard +16537,12178397,"genie.py",10177,0,"a",python,content +16538,12178398,"genie.py",10178,0,"",python,selection_keyboard +16539,12178512,"genie.py",10178,0,"m",python,content +16540,12178513,"genie.py",10179,0,"",python,selection_keyboard +16541,12178608,"genie.py",10179,0,"p",python,content +16542,12178609,"genie.py",10180,0,"",python,selection_keyboard +16543,12178707,"genie.py",10180,0,"l",python,content +16544,12178709,"genie.py",10181,0,"",python,selection_keyboard +16545,12178769,"genie.py",10181,0,"e",python,content +16546,12178770,"genie.py",10182,0,"",python,selection_keyboard +16547,12178870,"genie.py",10182,0,"d",python,content +16548,12178871,"genie.py",10183,0,"",python,selection_keyboard +16549,12178906,"TERMINAL",0,0,"2025-07-03 19:38:35.695989: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16550,12179084,"genie.py",10183,0,"_",python,content +16551,12179085,"genie.py",10184,0,"",python,selection_keyboard +16552,12179191,"TERMINAL",0,0,"5\t",,terminal_output +16553,12179522,"genie.py",10183,0,"",python,selection_command +16554,12180183,"TERMINAL",0,0,"7\t",,terminal_output +16555,12181361,"TERMINAL",0,0,"8\t",,terminal_output +16556,12182325,"TERMINAL",0,0,"9\t",,terminal_output +16557,12183348,"TERMINAL",0,0,"40\t",,terminal_output +16558,12184373,"TERMINAL",0,0,"1\t",,terminal_output +16559,12185383,"TERMINAL",0,0,"2\t",,terminal_output +16560,12186426,"TERMINAL",0,0,"3\t",,terminal_output +16561,12187460,"TERMINAL",0,0,"4\t",,terminal_output +16562,12188498,"TERMINAL",0,0,"5\t",,terminal_output +16563,12189088,"TERMINAL",0,0,"2025-07-03 19:38:45.945285: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16564,12189538,"TERMINAL",0,0,"62",,terminal_output +16565,12190620,"TERMINAL",0,0,"7\t",,terminal_output +16566,12191643,"TERMINAL",0,0,"81",,terminal_output +16567,12192770,"TERMINAL",0,0,"9\t",,terminal_output +16568,12193726,"TERMINAL",0,0,"50\t",,terminal_output +16569,12194818,"TERMINAL",0,0,"1\t",,terminal_output +16570,12195860,"TERMINAL",0,0,"2\t",,terminal_output +16571,12196866,"TERMINAL",0,0,"3\t",,terminal_output +16572,12198002,"TERMINAL",0,0,"4\t",,terminal_output +16573,12199017,"TERMINAL",0,0,"5\t",,terminal_output +16574,12200041,"TERMINAL",0,0,"6\t",,terminal_output +16575,12201065,"TERMINAL",0,0,"7\t",,terminal_output +16576,12201885,"TERMINAL",0,0,"2025-07-03 19:38:58.756165: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16577,12202087,"TERMINAL",0,0,"8\t",,terminal_output +16578,12203193,"TERMINAL",0,0,"9\t",,terminal_output +16579,12204240,"TERMINAL",0,0,"9:01\t",,terminal_output +16580,12204439,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +16581,12205263,"TERMINAL",0,0,"2\t",,terminal_output +16582,12206252,"TERMINAL",0,0,"3\t",,terminal_output +16583,12207332,"TERMINAL",0,0,"41",,terminal_output +16584,12208348,"TERMINAL",0,0,"5\t",,terminal_output +16585,12209391,"TERMINAL",0,0,"6\t",,terminal_output +16586,12210436,"TERMINAL",0,0,"7\t",,terminal_output +16587,12211481,"TERMINAL",0,0,"8\t",,terminal_output +16588,12212534,"TERMINAL",0,0,"9\t",,terminal_output +16589,12212640,"TERMINAL",0,0,"2025-07-03 19:39:09.522064: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16590,12213660,"TERMINAL",0,0,"10\t",,terminal_output +16591,12214625,"TERMINAL",0,0,"12",,terminal_output +16592,12215708,"TERMINAL",0,0,"2025-07-03 19:39:12.530420: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16593,12215709,"TERMINAL",0,0,"2\t",,terminal_output +16594,12216732,"TERMINAL",0,0,"3\t",,terminal_output +16595,12217757,"TERMINAL",0,0,"4\t",,terminal_output +16596,12218253,"TERMINAL",0,0,"watch",,terminal_focus +16597,12218783,"TERMINAL",0,0,"5\t",,terminal_output +16598,12219907,"TERMINAL",0,0,"61",,terminal_output +16599,12220932,"TERMINAL",0,0,"7\t",,terminal_output +16600,12221955,"TERMINAL",0,0,"8\t",,terminal_output +16601,12221971,"TERMINAL",0,0,"2025-07-03 19:39:18.873032: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16602,12222979,"TERMINAL",0,0,"9\t",,terminal_output +16603,12223965,"TERMINAL",0,0,"2025-07-03 19:39:20.867790: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16604,12223979,"TERMINAL",0,0,"20\t",,terminal_output +16605,12225027,"TERMINAL",0,0,"1\t",,terminal_output +16606,12225458,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +16607,12225846,"TERMINAL",0,0,"sampling frame 1\r\n",,terminal_output +16608,12226050,"TERMINAL",0,0,"2\t",,terminal_output +16609,12227178,"TERMINAL",0,0,"3\t",,terminal_output +16610,12228135,"TERMINAL",0,0,"4\t",,terminal_output +16611,12229224,"TERMINAL",0,0,"6\t",,terminal_output +16612,12230218,"TERMINAL",0,0,"750",,terminal_output +16613,12231272,"TERMINAL",0,0,"8\t",,terminal_output +16614,12231832,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 2\r\n",,terminal_output +16615,12232306,"TERMINAL",0,0,"9\t",,terminal_output +16616,12233341,"TERMINAL",0,0,"30\t",,terminal_output +16617,12234395,"TERMINAL",0,0,"1\t",,terminal_output +16618,12234754,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 3\r\n",,terminal_output +16619,12235427,"TERMINAL",0,0,"2\t",,terminal_output +16620,12236488,"TERMINAL",0,0,"3\t",,terminal_output +16621,12237521,"TERMINAL",0,0,"4\t",,terminal_output +16622,12237724,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 0 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 4\r\n",,terminal_output +16623,12238551,"TERMINAL",0,0,"5\t",,terminal_output +16624,12239670,"TERMINAL",0,0,"6\t",,terminal_output +16625,12240695,"TERMINAL",0,0,"76",,terminal_output +16626,12240695,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 0 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 5\r\n",,terminal_output +16627,12241718,"TERMINAL",0,0,"8\t",,terminal_output +16628,12242741,"TERMINAL",0,0,"9\t",,terminal_output +16629,12243579,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 0 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 6\r\n",,terminal_output +16630,12243724,"TERMINAL",0,0,"40\t",,terminal_output +16631,12244769,"TERMINAL",0,0,"1\t",,terminal_output +16632,12245813,"TERMINAL",0,0,"27",,terminal_output +16633,12246444,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 0 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 7\r\n",,terminal_output +16634,12246841,"TERMINAL",0,0,"3\t",,terminal_output +16635,12247882,"TERMINAL",0,0,"4\t",,terminal_output +16636,12248988,"TERMINAL",0,0,"5\t",,terminal_output +16637,12249398,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 0 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 8\r\n",,terminal_output +16638,12250013,"TERMINAL",0,0,"68",,terminal_output +16639,12251036,"TERMINAL",0,0,"7\t",,terminal_output +16640,12252061,"TERMINAL",0,0,"8\t",,terminal_output +16641,12252256,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 0 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 9\r\n",,terminal_output +16642,12253085,"TERMINAL",0,0,"9\t",,terminal_output +16643,12254210,"TERMINAL",0,0,"50\t",,terminal_output +16644,12255234,"TERMINAL",0,0,"29",,terminal_output +16645,12255235,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 0 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 10\r\n",,terminal_output +16646,12256260,"TERMINAL",0,0,"3\t",,terminal_output +16647,12257222,"TERMINAL",0,0,"4\t",,terminal_output +16648,12258043,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 0 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 11\r\n",,terminal_output +16649,12258322,"TERMINAL",0,0,"5\t",,terminal_output +16650,12259339,"TERMINAL",0,0,"680",,terminal_output +16651,12260358,"TERMINAL",0,0,"7\t",,terminal_output +16652,12261046,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 0 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 12\r\n",,terminal_output +16653,12261393,"TERMINAL",0,0,"8\t",,terminal_output +16654,12262426,"TERMINAL",0,0,"9\t",,terminal_output +16655,12263464,"TERMINAL",0,0,"40:00\t",,terminal_output +16656,12264144,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 0 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 13\r\n",,terminal_output +16657,12264553,"TERMINAL",0,0,"1\t",,terminal_output +16658,12265548,"TERMINAL",0,0,"2\t",,terminal_output +16659,12266601,"TERMINAL",0,0,"3\t",,terminal_output +16660,12266959,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 0 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 14\r\n",,terminal_output +16661,12267638,"TERMINAL",0,0,"4\t",,terminal_output +16662,12268686,"TERMINAL",0,0,"5\t",,terminal_output +16663,12269776,"TERMINAL",0,0,"6\t",,terminal_output +16664,12269883,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 0 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\nsampling frame 15\r\n",,terminal_output +16665,12270809,"TERMINAL",0,0,"7\t",,terminal_output +16666,12271824,"TERMINAL",0,0,"8\t",,terminal_output +16667,12272848,"TERMINAL",0,0,"maskgit-sampled-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 0]\r\nmaskgit-token_idxs[0,:,0]: [151 854 852 842 151 447 220 583 447 246 325 273 825 825 102 635]\r\n",,terminal_output +16668,12272848,"TERMINAL",0,0,"9\t",,terminal_output +16669,12273975,"TERMINAL",0,0,"10\t",,terminal_output +16670,12274916,"TERMINAL",0,0,"175",,terminal_output +16671,12275994,"TERMINAL",0,0,"2\t",,terminal_output +16672,12277046,"TERMINAL",0,0,"3\t",,terminal_output +16673,12278035,"TERMINAL",0,0,"4\t",,terminal_output +16674,12279095,"TERMINAL",0,0,"5\t",,terminal_output +16675,12280222,"TERMINAL",0,0,"6\t",,terminal_output +16676,12280733,"TERMINAL",0,0,"SSIM: 0.9997086524963379\r\n",,terminal_output +16677,12281245,"TERMINAL",0,0,"8\t",,terminal_output +16678,12282237,"TERMINAL",0,0,"9\t",,terminal_output +16679,12283012,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +16680,12283367,"TERMINAL",0,0,"20\t",,terminal_output +16681,12284335,"TERMINAL",0,0,"1\t",,terminal_output +16682,12285376,"TERMINAL",0,0,"2\t",,terminal_output +16683,12286362,"TERMINAL",0,0,"3\t",,terminal_output +16684,12287405,"TERMINAL",0,0,"4\t",,terminal_output +16685,12288450,"TERMINAL",0,0,"5\t",,terminal_output +16686,12289493,"TERMINAL",0,0,"6\t",,terminal_output +16687,12290547,"TERMINAL",0,0,"7\t",,terminal_output +16688,12291690,"TERMINAL",0,0,"8\t",,terminal_output +16689,12292621,"TERMINAL",0,0,"9\t",,terminal_output +16690,12293694,"TERMINAL",0,0,"30\t",,terminal_output +16691,12294017,"TERMINAL",0,0,"srun",,terminal_focus +16692,12294687,"TERMINAL",0,0,"1\t",,terminal_output +16693,12295780,"TERMINAL",0,0,"2\t",,terminal_output +16694,12296767,"TERMINAL",0,0,"3\t",,terminal_output +16695,12297834,"TERMINAL",0,0,"4\t",,terminal_output +16696,12298784,"genie.py",0,0,"",python,tab +16697,12298785,"genie.py",10129,0,"",python,selection_mouse +16698,12298933,"TERMINAL",0,0,"5\t",,terminal_output +16699,12299792,"genie.py",10111,92,"",python,content +16700,12299908,"genie.py",10119,0,"",python,selection_command +16701,12299979,"TERMINAL",0,0,"6\t",,terminal_output +16702,12300116,"genie.py",10188,0,"",python,selection_command +16703,12300652,"genie.py",10180,76,"",python,content +16704,12300772,"genie.py",10111,0,"",python,selection_command +16705,12300906,"genie.py",10075,0,"",python,selection_command +16706,12300967,"TERMINAL",0,0,"7\t",,terminal_output +16707,12301065,"genie.py",10040,0,"",python,selection_command +16708,12301189,"genie.py",9950,0,"",python,selection_command +16709,12301318,"genie.py",9868,0,"",python,selection_command +16710,12301443,"genie.py",9776,0,"",python,selection_command +16711,12301576,"genie.py",9676,0,"",python,selection_command +16712,12301708,"genie.py",9604,0,"",python,selection_command +16713,12301833,"genie.py",9546,0,"",python,selection_command +16714,12301985,"genie.py",9532,0,"",python,selection_command +16715,12301997,"TERMINAL",0,0,"8\t",,terminal_output +16716,12302408,"genie.py",9546,0,"",python,selection_command +16717,12302562,"genie.py",9604,0,"",python,selection_command +16718,12302730,"genie.py",9676,0,"",python,selection_command +16719,12303023,"TERMINAL",0,0,"9\t",,terminal_output +16720,12304060,"TERMINAL",0,0,"40\t",,terminal_output +16721,12305348,"genie.py",7786,0,"",python,selection_mouse +16722,12306255,"TERMINAL",0,0,"16",,terminal_output +16723,12307379,"TERMINAL",0,0,"45",,terminal_output +16724,12307880,"genie.py",8725,0,"",python,selection_command +16725,12308389,"genie.py",8797,0,"",python,selection_command +16726,12308413,"TERMINAL",0,0,"5\t",,terminal_output +16727,12308535,"genie.py",9107,0,"",python,selection_command +16728,12309046,"genie.py",9195,0,"",python,selection_command +16729,12309330,"TERMINAL",0,0,"6\t",,terminal_output +16730,12309586,"genie.py",9878,0,"",python,selection_command +16731,12310162,"genie.py",9960,0,"",python,selection_command +16732,12310325,"genie.py",8725,0,"",python,selection_command +16733,12310408,"TERMINAL",0,0,"7\t",,terminal_output +16734,12310930,"genie.py",8797,0,"",python,selection_command +16735,12311071,"genie.py",9107,0,"",python,selection_command +16736,12311226,"genie.py",9195,0,"",python,selection_command +16737,12311349,"genie.py",9878,0,"",python,selection_command +16738,12311456,"TERMINAL",0,0,"8\t",,terminal_output +16739,12312526,"TERMINAL",0,0,"9\t",,terminal_output +16740,12313487,"TERMINAL",0,0,"50\t",,terminal_output +16741,12314526,"TERMINAL",0,0,"1\t",,terminal_output +16742,12315715,"TERMINAL",0,0,"2\t",,terminal_output +16743,12316603,"TERMINAL",0,0,"3\t",,terminal_output +16744,12317709,"TERMINAL",0,0,"4\t",,terminal_output +16745,12318663,"TERMINAL",0,0,"5\t",,terminal_output +16746,12319747,"TERMINAL",0,0,"6\t",,terminal_output +16747,12320772,"TERMINAL",0,0,"7\t",,terminal_output +16748,12321770,"TERMINAL",0,0,"8\t",,terminal_output +16749,12322923,"TERMINAL",0,0,"9\t",,terminal_output +16750,12323947,"TERMINAL",0,0,"1:00\t",,terminal_output +16751,12324970,"TERMINAL",0,0,"1\t",,terminal_output +16752,12325995,"TERMINAL",0,0,"2\t",,terminal_output +16753,12327019,"TERMINAL",0,0,"3\t",,terminal_output +16754,12328039,"TERMINAL",0,0,"4\t",,terminal_output +16755,12329114,"TERMINAL",0,0,"5\t",,terminal_output +16756,12330125,"TERMINAL",0,0,"6\t",,terminal_output +16757,12331218,"TERMINAL",0,0,"8\t",,terminal_output +16758,12332193,"TERMINAL",0,0,"9\t",,terminal_output +16759,12333264,"TERMINAL",0,0,"10\t",,terminal_output +16760,12334289,"TERMINAL",0,0,"1\t",,terminal_output +16761,12335416,"TERMINAL",0,0,"2\t",,terminal_output +16762,12336446,"TERMINAL",0,0,"3\t",,terminal_output +16763,12337464,"TERMINAL",0,0,"4\t",,terminal_output +16764,12338458,"TERMINAL",0,0,"5\t",,terminal_output +16765,12339481,"TERMINAL",0,0,"6\t",,terminal_output +16766,12340520,"TERMINAL",0,0,"7\t",,terminal_output +16767,12341561,"TERMINAL",0,0,"8\t",,terminal_output +16768,12342686,"TERMINAL",0,0,"9\t",,terminal_output +16769,12343711,"TERMINAL",0,0,"20\t",,terminal_output +16770,12344734,"TERMINAL",0,0,"1\t",,terminal_output +16771,12345758,"TERMINAL",0,0,"2\t",,terminal_output +16772,12345973,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +16773,12346036,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +16774,12346181,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +16775,12346326,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +16776,12346489,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +16777,12346712,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +16778,12346754,"TERMINAL",0,0,"3\t",,terminal_output +16779,12347648,"TERMINAL",0,0,"watch",,terminal_focus +16780,12347792,"TERMINAL",0,0,"4\t",,terminal_output +16781,12348436,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr",,terminal_output +16782,12349329,"TERMINAL",0,0,"queue",,terminal_command +16783,12349419,"TERMINAL",0,0,"]633;E;2025-07-03 19:41:26 queue;70fb23be-3987-4c7c-890b-2d8e1f267fe1]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Thu Jul 3 19:41:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3316026 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316022 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316020 accelerat train_to tum_cte0 PD\t0:00 12 (Priority)3316019 accelerat train_to tum_cte0 PD\t0:00\t 8 (Priority)3316018 accelerat train_to tum_cte0 PD\t0:00\t 4 (Priority)3316016 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3316017 accelerat train_to tum_cte0 PD\t0:00\t 2 (Priority)3316038 dev_accel interact tum_cte0 R23:59\t 1 hkn0901",,terminal_output +16784,12350280,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar/scripts_horeka/batchsize_scaling/adjusted_lr]633;D;0",,terminal_output +16785,12350878,"TERMINAL",0,0,"srun",,terminal_focus +16786,12361376,"TERMINAL",0,0,"clear",,terminal_output +16787,12362136,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +16788,12362999,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +16789,12363137,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +16790,12366137,"TERMINAL",0,0,"2025-07-03 19:41:42.968899: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16791,12379038,"TERMINAL",0,0,"2025-07-03 19:41:55.877408: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16792,12389446,"TERMINAL",0,0,"2025-07-03 19:42:06.346672: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16793,12402489,"TERMINAL",0,0,"2025-07-03 19:42:19.338794: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16794,12405151,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +16795,12413482,"TERMINAL",0,0,"2025-07-03 19:42:30.382368: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16796,12416680,"TERMINAL",0,0,"2025-07-03 19:42:33.580346: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16797,12423277,"TERMINAL",0,0,"2025-07-03 19:42:40.101290: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16798,12425325,"TERMINAL",0,0,"2025-07-03 19:42:42.222223: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +16799,12426972,"TERMINAL",0,0,"token_idxs shape: (1, 1, 920)\r\n",,terminal_output +16800,12427191,"TERMINAL",0,0,"sampling frame 1\r\n",,terminal_output +16801,12433753,"TERMINAL",0,0,"sampling frame 2\r\n",,terminal_output +16802,12437307,"TERMINAL",0,0,"sampling frame 3\r\n",,terminal_output +16803,12440862,"TERMINAL",0,0,"sampling frame 4\r\n",,terminal_output +16804,12444475,"TERMINAL",0,0,"sampling frame 5\r\n",,terminal_output +16805,12447982,"TERMINAL",0,0,"sampling frame 6\r\n",,terminal_output +16806,12451334,"TERMINAL",0,0,"sampling frame 7\r\n",,terminal_output +16807,12454918,"TERMINAL",0,0,"sampling frame 8\r\n",,terminal_output +16808,12458263,"TERMINAL",0,0,"sampling frame 9\r\n",,terminal_output +16809,12461881,"TERMINAL",0,0,"sampling frame 10\r\n",,terminal_output +16810,12465363,"TERMINAL",0,0,"sampling frame 11\r\n",,terminal_output +16811,12468844,"TERMINAL",0,0,"sampling frame 12\r\n",,terminal_output +16812,12472521,"TERMINAL",0,0,"sampling frame 13\r\n",,terminal_output +16813,12476115,"TERMINAL",0,0,"sampling frame 14\r\n",,terminal_output +16814,12479598,"TERMINAL",0,0,"sampling frame 15\r\n",,terminal_output +16815,12491158,"TERMINAL",0,0,"SSIM: 0.9997087121009827\r\n",,terminal_output +16816,12493462,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +16817,12514682,"TERMINAL",0,0,"c",,terminal_output +16818,12514761,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +16819,12514825,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +16820,12514948,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +16821,12515296,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +16822,12515417,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ [?25h",,terminal_output +16823,12518879,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0901 jafar]$ \r(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +16824,12521009,"TERMINAL",0,0,"#",,terminal_output +16825,12521267,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +16826,12521606,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +16827,12522046,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +16828,12522163,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +16829,12522339,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +16830,12522849,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +16831,12522976,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +16832,12523251,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +16833,12523357,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +16834,12523858,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +16835,12523911,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +16836,12523973,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +16837,12524174,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +16838,12524257,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +16839,12524363,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +16840,12524728,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +16841,12524905,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +16842,12524968,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +16843,12525164,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +16844,12525329,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +16845,12525463,"TERMINAL",0,0,"[?25li[?25h[?25le[?25h",,terminal_output +16846,12525626,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +16847,12525830,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +16848,12526060,"TERMINAL",0,0,"[?25l![?25h",,terminal_output +16849,12526362,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ [?25h",,terminal_output +16850,12770024,"genie.py",0,0,"",python,tab +16851,12784689,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n# from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n start_time = time.time()\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n elapsed = time.time() - start_time\n print(f""Frame {frame_idx} sampling took {elapsed:.3f} seconds"")\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample_mihir(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = genie.apply(\n params,\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample_mihir,\n )\n return generated_vid\n\n\n\n# --- Get video + latent actions ---\n# tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n# ]\n# dataloader = get_dataloader(\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # args.image_height,\n # args.image_width,\n # args.image_channels,\n # seed=args.seed,\n# )\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1,:args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample_mihir(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bfc58ec4-bb8b-4c95-acb7-22cdc47c7cc81759255316787-2025_09_30-20.02.40.828/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bfc58ec4-bb8b-4c95-acb7-22cdc47c7cc81759255316787-2025_09_30-20.02.40.828/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..58f7c0d72ca1361e8a3c4c8396308a49bb1d0b78 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bfc58ec4-bb8b-4c95-acb7-22cdc47c7cc81759255316787-2025_09_30-20.02.40.828/source.csv @@ -0,0 +1,1152 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"slurm/jobs/alfred/berlin/coinrun/w_val/lam_jasmine_default.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_jasmine_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun with_val lam baseline 500m_dataset""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --lr_schedule cos \\n --init_lr=3e-6 \\n --max_lr=3e-5 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --val_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --no_use_flash_attention \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,tab +2,562,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"8:02:40 PM [info] Activating crowd-code\n8:02:40 PM [info] Recording started\n8:02:40 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,773,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"8:02:41 PM [info] Git repository found\n8:02:41 PM [info] Git provider initialized successfully\n8:02:41 PM [info] Initial git state: [object Object]\n",Log,content +4,21733,"slurm/jobs/alfred/berlin/coinrun/w_val/lam_jasmine_default.sbatch",0,0,"",shellscript,tab +5,22383,"TERMINAL",0,0,"bash",,terminal_focus +6,27552,"TERMINAL",0,0,"cd slurm/",,terminal_command +7,27594,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +8,29231,"TERMINAL",0,0,"git pull",,terminal_command +9,29247,"TERMINAL",0,0,"]633;C",,terminal_output +10,31138,"TERMINAL",0,0,"remote: Enumerating objects: 116, done.\r\nremote: Counting objects: 0% (1/116)\rremote: Counting objects: 1% (2/116)\rremote: Counting objects: 2% (3/116)\rremote: Counting objects: 3% (4/116)\rremote: Counting objects: 4% (5/116)\rremote: Counting objects: 5% (6/116)\rremote: Counting objects: 6% (7/116)\rremote: Counting objects: 7% (9/116)\rremote: Counting objects: 8% (10/116)\rremote: Counting objects: 9% (11/116)\rremote: Counting objects: 10% (12/116)\rremote: Counting objects: 11% (13/116)\rremote: Counting objects: 12% (14/116)\rremote: Counting objects: 13% (16/116)\rremote: Counting objects: 14% (17/116)\rremote: Counting objects: 15% (18/116)\rremote: Counting objects: 16% (19/116)\rremote: Counting objects: 17% (20/116)\rremote: Counting objects: 18% (21/116)\rremote: Counting objects: 19% (23/116)\rremote: Counting objects: 20% (24/116)\rremote: Counting objects: 21% (25/116)\rremote: Counting objects: 22% (26/116)\rremote: Counting objects: 23% (27/116)\rremote: Counting objects: 24% (28/116)\rremote: Counting objects: 25% (29/116)\rremote: Counting objects: 26% (31/116)\rremote: Counting objects: 27% (32/116)\rremote: Counting objects: 28% (33/116)\rremote: Counting objects: 29% (34/116)\rremote: Counting objects: 30% (35/116)\rremote: Counting objects: 31% (36/116)\rremote: Counting objects: 32% (38/116)\rremote: Counting objects: 33% (39/116)\rremote: Counting objects: 34% (40/116)\rremote: Counting objects: 35% (41/116)\rremote: Counting objects: 36% (42/116)\rremote: Counting objects: 37% (43/116)\rremote: Counting objects: 38% (45/116)\rremote: Counting objects: 39% (46/116)\rremote: Counting objects: 40% (47/116)\rremote: Counting objects: 41% (48/116)\rremote: Counting objects: 42% (49/116)\rremote: Counting objects: 43% (50/116)\rremote: Counting objects: 44% (52/116)\rremote: Counting objects: 45% (53/116)\rremote: Counting objects: 46% (54/116)\rremote: Counting objects: 47% (55/116)\rremote: Counting objects: 48% (56/116)\rremote: Counting objects: 49% (57/116)\rremote: Counting objects: 50% (58/116)\rremote: Counting objects: 51% (60/116)\rremote: Counting objects: 52% (61/116)\rremote: Counting objects: 53% (62/116)\rremote: Counting objects: 54% (63/116)\rremote: Counting objects: 55% (64/116)\rremote: Counting objects: 56% (65/116)\rremote: Counting objects: 57% (67/116)\rremote: Counting objects: 58% (68/116)\rremote: Counting objects: 59% (69/116)\rremote: Counting objects: 60% (70/116)\rremote: Counting objects: 61% (71/116)\rremote: Counting objects: 62% (72/116)\rremote: Counting objects: 63% (74/116)\rremote: Counting objects: 64% (75/116)\rremote: Counting objects: 65% (76/116)\rremote: Counting objects: 66% (77/116)\rremote: Counting objects: 67% (78/116)\rremote: Counting objects: 68% (79/116)\rremote: Counting objects: 69% (81/116)\rremote: Counting objects: 70% (82/116)\r",,terminal_output +11,31241,"TERMINAL",0,0,"remote: Counting objects: 71% (83/116)\rremote: Counting objects: 72% (84/116)\rremote: Counting objects: 73% (85/116)\rremote: Counting objects: 74% (86/116)\rremote: Counting objects: 75% (87/116)\rremote: Counting objects: 76% (89/116)\rremote: Counting objects: 77% (90/116)\rremote: Counting objects: 78% (91/116)\rremote: Counting objects: 79% (92/116)\rremote: Counting objects: 80% (93/116)\rremote: Counting objects: 81% (94/116)\rremote: Counting objects: 82% (96/116)\rremote: Counting objects: 83% (97/116)\rremote: Counting objects: 84% (98/116)\rremote: Counting objects: 85% (99/116)\rremote: Counting objects: 86% (100/116)\rremote: Counting objects: 87% (101/116)\rremote: Counting objects: 88% (103/116)\rremote: Counting objects: 89% (104/116)\rremote: Counting objects: 90% (105/116)\rremote: Counting objects: 91% (106/116)\rremote: Counting objects: 92% (107/116)\rremote: Counting objects: 93% (108/116)\rremote: Counting objects: 94% (110/116)\rremote: Counting objects: 95% (111/116)\rremote: Counting objects: 96% (112/116)\rremote: Counting objects: 97% (113/116)\rremote: Counting objects: 98% (114/116)\rremote: Counting objects: 99% (115/116)\rremote: Counting objects: 100% (116/116)\rremote: Counting objects: 100% (116/116), done.\r\nremote: Compressing objects: 1% (1/60)\rremote: Compressing objects: 3% (2/60)\rremote: Compressing objects: 5% (3/60)\rremote: Compressing objects: 6% (4/60)\rremote: Compressing objects: 8% (5/60)\rremote: Compressing objects: 10% (6/60)\rremote: Compressing objects: 11% (7/60)\rremote: Compressing objects: 13% (8/60)\rremote: Compressing objects: 15% (9/60)\rremote: Compressing objects: 16% (10/60)\rremote: Compressing objects: 18% (11/60)\rremote: Compressing objects: 20% (12/60)\rremote: Compressing objects: 21% (13/60)\rremote: Compressing objects: 23% (14/60)\rremote: Compressing objects: 25% (15/60)\rremote: Compressing objects: 26% (16/60)\rremote: Compressing objects: 28% (17/60)\rremote: Compressing objects: 30% (18/60)\rremote: Compressing objects: 31% (19/60)\rremote: Compressing objects: 33% (20/60)\rremote: Compressing objects: 35% (21/60)\rremote: Compressing objects: 36% (22/60)\rremote: Compressing objects: 38% (23/60)\rremote: Compressing objects: 40% (24/60)\rremote: Compressing objects: 41% (25/60)\rremote: Compressing objects: 43% (26/60)\rremote: Compressing objects: 45% (27/60)\rremote: Compressing objects: 46% (28/60)\rremote: Compressing objects: 48% (29/60)\rremote: Compressing objects: 50% (30/60)\rremote: Compressing objects: 51% (31/60)\rremote: Compressing objects: 53% (32/60)\rremote: Compressing objects: 55% (33/60)\rremote: Compressing objects: 56% (34/60)\rremote: Compressing objects: 58% (35/60)\rremote: Compressing objects: 60% (36/60)\rremote: Compressing objects: 61% (37/60)\rremote: Compressing objects: 63% (38/60)\rremote: Compressing objects: 65% (39/60)\rremote: Compressing objects: 66% (40/60)\rremote: Compressing objects: 68% (41/60)\rremote: Compressing objects: 70% (42/60)\rremote: Compressing objects: 71% (43/60)\rremote: Compressing objects: 73% (44/60)\rremote: Compressing objects: 75% (45/60)\rremote: Compressing objects: 76% (46/60)\rremote: Compressing objects: 78% (47/60)\rremote: Compressing objects: 80% (48/60)\rremote: Compressing objects: 81% (49/60)\rremote: Compressing objects: 83% (50/60)\rremote: Compressing objects: 85% (51/60)\rremote: Compressing objects: 86% (52/60)\rremote: Compressing objects: 88% (53/60)\rremote: Compressing objects: 90% (54/60)\rremote: Compressing objects: 91% (55/60)\rremote: Compressing objects: 93% (56/60)\rremote: Compressing objects: 95% (57/60)\rremote: Compressing objects: 96% (58/60)\rremote: Compressing objects: 98% (59/60)\rremote: Compressing objects: 100% (60/60)\rremote: Compressing objects: 100% (60/60), done.\r\nremote: Total 116 (delta 65), reused 103 (delta 52), pack-reused 0 (from 0)\r\n",,terminal_output +12,31464,"TERMINAL",0,0,"Receiving objects: 0% (1/116)\rReceiving objects: 1% (2/116)\rReceiving objects: 2% (3/116)\rReceiving objects: 3% (4/116)\rReceiving objects: 4% (5/116)\rReceiving objects: 5% (6/116)\rReceiving objects: 6% (7/116)\rReceiving objects: 7% (9/116)\rReceiving objects: 8% (10/116)\rReceiving objects: 9% (11/116)\rReceiving objects: 10% (12/116)\rReceiving objects: 11% (13/116)\rReceiving objects: 12% (14/116)\rReceiving objects: 13% (16/116)\rReceiving objects: 14% (17/116)\rReceiving objects: 15% (18/116)\rReceiving objects: 16% (19/116)\rReceiving objects: 17% (20/116)\rReceiving objects: 18% (21/116)\rReceiving objects: 19% (23/116)\rReceiving objects: 20% (24/116)\rReceiving objects: 21% (25/116)\rReceiving objects: 22% (26/116)\rReceiving objects: 23% (27/116)\rReceiving objects: 24% (28/116)\rReceiving objects: 25% (29/116)\rReceiving objects: 26% (31/116)\rReceiving objects: 27% (32/116)\rReceiving objects: 28% (33/116)\rReceiving objects: 29% (34/116)\rReceiving objects: 30% (35/116)\rReceiving objects: 31% (36/116)\rReceiving objects: 32% (38/116)\rReceiving objects: 33% (39/116)\rReceiving objects: 34% (40/116)\rReceiving objects: 35% (41/116)\rReceiving objects: 36% (42/116)\rReceiving objects: 37% (43/116)\rReceiving objects: 38% (45/116)\rReceiving objects: 39% (46/116)\rReceiving objects: 40% (47/116)\rReceiving objects: 41% (48/116)\rReceiving objects: 42% (49/116)\rReceiving objects: 43% (50/116)\rReceiving objects: 44% (52/116)\rReceiving objects: 45% (53/116)\rReceiving objects: 46% (54/116)\rReceiving objects: 47% (55/116)\rReceiving objects: 48% (56/116)\rReceiving objects: 49% (57/116)\rReceiving objects: 50% (58/116)\rReceiving objects: 51% (60/116)\rReceiving objects: 52% (61/116)\rReceiving objects: 53% (62/116)\rReceiving objects: 54% (63/116)\rReceiving objects: 55% (64/116)\rReceiving objects: 56% (65/116)\rReceiving objects: 57% (67/116)\rReceiving objects: 58% (68/116)\rReceiving objects: 59% (69/116)\rReceiving objects: 60% (70/116)\rReceiving objects: 61% (71/116)\rReceiving objects: 62% (72/116)\rReceiving objects: 63% (74/116)\rReceiving objects: 64% (75/116)\rReceiving objects: 65% (76/116)\rReceiving objects: 66% (77/116)\rReceiving objects: 67% (78/116)\rReceiving objects: 68% (79/116)\rReceiving objects: 69% (81/116)\rReceiving objects: 70% (82/116)\rReceiving objects: 71% (83/116)\rReceiving objects: 72% (84/116)\rReceiving objects: 73% (85/116)\rReceiving objects: 74% (86/116)\rReceiving objects: 75% (87/116)\rReceiving objects: 76% (89/116)\rReceiving objects: 77% (90/116)\rReceiving objects: 78% (91/116)\rReceiving objects: 79% (92/116)\rReceiving objects: 80% (93/116)\rReceiving objects: 81% (94/116)\rReceiving objects: 82% (96/116)\rReceiving objects: 83% (97/116)\rReceiving objects: 84% (98/116)\rReceiving objects: 85% (99/116)\rReceiving objects: 86% (100/116)\rReceiving objects: 87% (101/116)\rReceiving objects: 88% (103/116)\rReceiving objects: 89% (104/116)\rReceiving objects: 90% (105/116)\rReceiving objects: 91% (106/116)\rReceiving objects: 92% (107/116)\rReceiving objects: 93% (108/116)\rReceiving objects: 94% (110/116)\rReceiving objects: 95% (111/116)\rReceiving objects: 96% (112/116)\rReceiving objects: 97% (113/116)\rReceiving objects: 98% (114/116)\rReceiving objects: 99% (115/116)\rReceiving objects: 100% (116/116)\rReceiving objects: 100% (116/116), 13.47 KiB | 255.00 KiB/s, done.\r\nResolving deltas: 0% (0/65)\rResolving deltas: 1% (1/65)\rResolving deltas: 3% (2/65)\rResolving deltas: 4% (3/65)\rResolving deltas: 6% (4/65)\rResolving deltas: 7% (5/65)\rResolving deltas: 9% (6/65)\rResolving deltas: 10% (7/65)\rResolving deltas: 12% (8/65)\rResolving deltas: 13% (9/65)\rResolving deltas: 15% (10/65)\rResolving deltas: 16% (11/65)\rResolving deltas: 18% (12/65)\rResolving deltas: 20% (13/65)\rResolving deltas: 21% (14/65)\rResolving deltas: 23% (15/65)\rResolving deltas: 24% (16/65)\rResolving deltas: 26% (17/65)\rResolving deltas: 27% (18/65)\rResolving deltas: 29% (19/65)\rResolving deltas: 30% (20/65)\rResolving deltas: 32% (21/65)\rResolving deltas: 33% (22/65)\rResolving deltas: 35% (23/65)\rResolving deltas: 36% (24/65)\rResolving deltas: 38% (25/65)\rResolving deltas: 40% (26/65)\rResolving deltas: 41% (27/65)\rResolving deltas: 43% (28/65)\rResolving deltas: 44% (29/65)\rResolving deltas: 46% (30/65)\rResolving deltas: 47% (31/65)\rResolving deltas: 49% (32/65)\rResolving deltas: 50% (33/65)\rResolving deltas: 52% (34/65)\rResolving deltas: 53% (35/65)\rResolving deltas: 55% (36/65)\rResolving deltas: 56% (37/65)\rResolving deltas: 58% (38/65)\rResolving deltas: 60% (39/65)\rResolving deltas: 61% (40/65)\rResolving deltas: 63% (41/65)\rResolving deltas: 64% (42/65)\rResolving deltas: 66% (43/65)\rResolving deltas: 67% (44/65)\rResolving deltas: 69% (45/65)\rResolving deltas: 70% (46/65)\rResolving deltas: 72% (47/65)\rResolving deltas: 73% (48/65)\rResolving deltas: 75% (49/65)\rResolving deltas: 76% (50/65)\rResolving deltas: 78% (51/65)\rResolving deltas: 80% (52/65)\rResolving deltas: 81% (53/65)\rResolving deltas: 83% (54/65)\rResolving deltas: 84% (55/65)\rResolving deltas: 86% (56/65)\rResolving deltas: 87% (57/65)\rResolving deltas: 89% (58/65)\rResolving deltas: 90% (59/65)\rResolving deltas: 92% (60/65)\rResolving deltas: 93% (61/65)\rResolving deltas: 95% (62/65)\rResolving deltas: 96% (63/65)\rResolving deltas: 98% (64/65)\rResolving deltas: 100% (65/65)\rResolving deltas: 100% (65/65), completed with 11 local objects.\r\n",,terminal_output +13,31757,"TERMINAL",0,0,"From github.com:p-doom/slurm\r\n 8ba64f0..cf160ad main -> origin/main\r\n",,terminal_output +14,31927,"TERMINAL",0,0,"Updating 8ba64f0..cf160ad\r\n",,terminal_output +15,32351,"TERMINAL",0,0,"Fast-forward\r\n",,terminal_output +16,32452,"TERMINAL",0,0," .../berlin/dataset_investigation/gen_1.sbatch | 35 ++++++++\r\n .../berlin/dataset_investigation/gen_2.sbatch | 35 ++++++++\r\n .../test_exposure_bias/dynamics_causal.sbatch | 81 +++++++++++++++++++\r\n .../dynamics_causal_no_noise.sbatch | 82 +++++++++++++++++++\r\n ...c_from_80k_nan_invest_requeue_every2h.sbatch | 2 +-\r\n ...from_80k_nan_invest_requeue_every_10h.sbatch | 2 +-\r\n .../test_exposure_bias/dynamics_maskgit.sbatch | 81 +++++++++++++++++++\r\n .../dynamics_maskgit_no_noise.sbatch | 82 +++++++++++++++++++\r\n .../dynamics_maskgit_no_noise_from_main.sbatch | 81 +++++++++++++++++++\r\n ...skgit_no_noise_from_main_w_full_frame.sbatch | 82 +++++++++++++++++++\r\n .../test_exposure_bias/sample_180k.sbatch | 28 +++++++\r\n .../test_exposure_bias/sample_causal.sbatch | 27 +++++++\r\n .../sample_causal_no_noise.sbatch | 27 +++++++\r\n .../test_exposure_bias/sample_maskgit.sbatch | 27 +++++++\r\n .../sample_maskgit_no_noise.sbatch | 27 +++++++\r\n .../test_exposure_bias_climber/sample.sbatch | 27 +++++++\r\n .../coinrun/sample/causal/sample_causal.sh | 27 +++++++\r\n .../sample/causal/sample_noised_causal.sh | 27 +++++++\r\n .../sample_dynamics_from_140k_tokenizer.sh | 30 +++++++\r\n ...ple_dynamics_from_fully_trained_tokenizer.sh | 30 +++++++\r\n dev/franz/placeholder | 0\r\n .../berlin/coinrun/1m_steps/sample.sbatch | 28 +++++++\r\n .../overfitting_test/500k_dataset/sample.sbatch | 27 +++++++\r\n .../overfitting_test/500m_dataset/sample.sbatch | 27 +++++++\r\n .../500m_dataset_climber/sample.sbatch | 27 +++++++\r\n .../workshop/jafar_default/dynamics.sbatch | 84 ++++++++++++++++++++\r\n .../workshop/jasmine_default/dynamics.sbatch | 84 ++++++++++++++++++++\r\n .../lam_full_precision_nan_invest.sbatch | 79 ++++++++++++++++++\r\n .../lam_mixed_precision_nan_invest.sbatch | 79 ++++++++++++++++++\r\n ...d_precision_nan_invest_jafar_cos_init.sbatch | 79 ++++++++++++++++++\r\n ...vest_jafar_cos_init_w_flash_attention.sbatch | 78 ++++++++++++++++++\r\n ...recision_nan_invest_w_flash_attention.sbatch | 78 ++++++++++++++++++\r\n .../franz/berlin/atari/data_generation/alien.sh | 16 ++++\r\n .../berlin/atari/data_generation/amidar.sh | 16 ++++\r\n .../berlin/atari/data_generation/assault.sh | 16 ++++\r\n .../berlin/atari/data_generation/asterix.sh | 16 ++++\r\n .../berlin/atari/data_generation/bank_heist.sh | 16 ++++\r\n .../berlin/atari/data_generation/battle_zone.sh | 16 ++++\r\n .../berlin/atari/data_generation/boxing.sh | 16 ++++\r\n .../berlin/atari/data_generation/breakout.sh | 16 ++++\r\n .../atari/data_generation/chopper_command.sh | 16 ++++\r\n .../atari/data_generation/crazy_climber.sh | 16 ++++\r\n .../atari/data_generation/demon_attack.sh | 16 ++++\r\n jobs/franz/berlin/atari/data_generation/pong.sh | 16 ++++\r\n .../berlin/atari/data_generation/spawner.sh | 12 +++\r\n .../atari_breakout_data_gen.sh | 0\r\n .../atari_breakout_v4_data_gen.sh | 13 +++\r\n .../tokenizer/atari_breakout_tokenizer.sbatch | 52 ++++++++++++\r\n .../mila_submission/coinrun_dynamics_base.sh | 71 +++++++++++++++++\r\n .../coinrun/mila_submission/coinrun_lam_base.sh | 64 +++++++++++++++\r\n .../coinrun_lam_no_flash_attention.sh | 65 +++++++++++++++\r\n .../mila_submission/coinrun_tokenizer_base.sh | 64 +++++++++++++++\r\n 52 files changed, 2041 insertions(+), 2 deletions(-)\r\n create mode 100644 dev/alfred/berlin/dataset_investigation/gen_1.sbatch\r\n create mode 100644 dev/alfred/berlin/dataset_investigation/gen_2.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_causal.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_causal_no_noise.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main_w_full_frame.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/sample_180k.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/sample_causal.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/sample_causal_no_noise.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/sample_maskgit.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/sample_maskgit_no_noise.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias_climber/sample.sbatch\r\n create mode 100644 dev/franz/berlin/coinrun/sample/causal/sample_causal.sh\r\n create mode 100644 dev/franz/berlin/coinrun/sample/causal/sample_noised_causal.sh\r\n create mode 100644 dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_140k_tokenizer.sh\r\n create mode 100644 dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_fully_trained_tokenizer.sh\r\n delete mode 100644 dev/franz/placeholder\r\n create mode 100644 jobs/alfred/berlin/coinrun/1m_steps/sample.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/sample.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/sample.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/sample.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/jafar_default/dynamics.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/jasmine_default/dynamics.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_full_precision_nan_invest.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init_w_flash_attention.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_w_flash_attention.sbatch\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/alien.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/amidar.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/assault.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/asterix.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/bank_heist.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/battle_zone.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/boxing.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/breakout.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/chopper_command.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/crazy_climber.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/demon_attack.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/pong.sh\r\n create mode 100644 jobs/franz/berlin/atari/data_generation/spawner.sh\r\n rename jobs/franz/berlin/atari/{ => legacy_cleanrl_data_generation}/atari_breakout_data_gen.sh (100%)\r\n create mode 100644 jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_v4_data_gen.sh\r\n create mode 100644 jobs/franz/berlin/atari/tokenizer/atari_breakout_tokenizer.sbatch\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_no_flash_attention.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/coinrun_tokenizer_base.sh\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +17,36377,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18,40390,"slurm/jobs/alfred/berlin/coinrun/w_val/lam_jasmine_default.sbatch",0,0,"",shellscript,tab +19,42856,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +20,183225,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +21,183229,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",1212,0,"",shellscript,selection_mouse +22,210940,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +23,222858,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +24,231024,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +25,240704,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/generate_coinrun_chunked_500m_3533473.log",0,0,"python: can't open file '/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/input_pipeline/generate_coinrun_dataset.py': [Errno 2] No such file or directory\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3533473\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 2)\nPartition: large\nNodes: 1\nCores per node: 16\nNodelist: hkn1901\nCPU Utilized: 00:00:00\nCPU Efficiency: 0.00% of 00:07:12 core-walltime\nJob Wall-clock time: 00:00:27\nStarttime: Tue Sep 30 15:23:50 2025\nEndtime: Tue Sep 30 15:24:17 2025\nMemory Utilized: 10.55 MB\nMemory Efficiency: 0.00% of 423.53 GB (26.47 GB/core)\nEnergy Consumed: 5089 Joule / 1.41361111111111 Watthours\nAverage node power draw: 188.481481481481 Watt\n",log,tab +26,242594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +27,249364,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",127,0,"",shellscript,selection_mouse +28,250244,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",513,0,"",shellscript,selection_command +29,251485,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",755,0,"",shellscript,selection_mouse +30,262555,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1956,0,"",shellscript,selection_mouse +31,262558,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1955,0,"",shellscript,selection_command +32,263486,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1978,0,"",shellscript,selection_mouse +33,263518,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1977,0,"",shellscript,selection_command +34,265874,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +35,285570,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +36,285571,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",2005,0,"",shellscript,selection_mouse +37,287315,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1930,0,"",shellscript,selection_mouse +38,287335,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1929,0,"",shellscript,selection_command +39,288167,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1777,0,"",shellscript,selection_mouse +40,288168,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1776,0,"",shellscript,selection_command +41,288780,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1777,0,"",shellscript,selection_mouse +42,288794,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1776,0,"",shellscript,selection_command +43,289625,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1749,0,"",shellscript,selection_mouse +44,306981,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1759,0,"\n",shellscript,content +45,308010,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1760,0," ",shellscript,content +46,309076,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1764,0,"-",shellscript,content +47,309078,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1765,0,"",shellscript,selection_keyboard +48,309416,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1765,0,"-",shellscript,content +49,309417,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1766,0,"",shellscript,selection_keyboard +50,311781,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1766,0,"d",shellscript,content +51,311783,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1767,0,"",shellscript,selection_keyboard +52,311953,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1767,0,"y",shellscript,content +53,311954,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1768,0,"",shellscript,selection_keyboard +54,312167,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1768,0,"n",shellscript,content +55,312168,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1769,0,"",shellscript,selection_keyboard +56,312627,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1769,0,"a",shellscript,content +57,312628,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1770,0,"",shellscript,selection_keyboard +58,312864,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1770,0,"_",shellscript,content +59,312865,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1771,0,"",shellscript,selection_keyboard +60,313197,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1771,0,"f",shellscript,content +61,313198,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1772,0,"",shellscript,selection_keyboard +62,313340,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1772,0,"f",shellscript,content +63,313341,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1773,0,"",shellscript,selection_keyboard +64,313441,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1773,0,"n",shellscript,content +65,313442,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1774,0,"",shellscript,selection_keyboard +66,313813,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1774,0,"_",shellscript,content +67,313814,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1775,0,"",shellscript,selection_keyboard +68,314114,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1775,0,"d",shellscript,content +69,314115,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1776,0,"",shellscript,selection_keyboard +70,314241,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1776,0,"i",shellscript,content +71,314242,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1777,0,"",shellscript,selection_keyboard +72,314296,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1777,0,"m",shellscript,content +73,314296,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1778,0,"",shellscript,selection_keyboard +74,318076,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1778,0,"=",shellscript,content +75,318078,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1779,0,"",shellscript,selection_keyboard +76,318821,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1779,0,"5",shellscript,content +77,318822,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1780,0,"",shellscript,selection_keyboard +78,318890,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1780,0,"1",shellscript,content +79,318891,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1781,0,"",shellscript,selection_keyboard +80,318989,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1781,0,"2",shellscript,content +81,318989,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1782,0,"",shellscript,selection_keyboard +82,320212,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1782,0," ",shellscript,content +83,320213,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1783,0,"",shellscript,selection_keyboard +84,320436,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1783,0,"\",shellscript,content +85,320437,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1784,0,"",shellscript,selection_keyboard +86,320713,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1784,0,"\n ",shellscript,content +87,324313,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1789,0,"-",shellscript,content +88,324314,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1790,0,"",shellscript,selection_keyboard +89,324471,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1790,0,"-",shellscript,content +90,324472,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1791,0,"",shellscript,selection_keyboard +91,325028,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1791,0,"d",shellscript,content +92,325029,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1792,0,"",shellscript,selection_keyboard +93,325263,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1792,0,"y",shellscript,content +94,325264,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1793,0,"",shellscript,selection_keyboard +95,325375,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1793,0,"n",shellscript,content +96,325376,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1794,0,"",shellscript,selection_keyboard +97,325510,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1794,0,"a",shellscript,content +98,325511,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1795,0,"",shellscript,selection_keyboard +99,326233,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1795,0,"_",shellscript,content +100,326234,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1796,0,"",shellscript,selection_keyboard +101,326588,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1796,0,"n",shellscript,content +102,326589,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1797,0,"",shellscript,selection_keyboard +103,326864,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1797,0,"m",shellscript,content +104,326865,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1798,0,"",shellscript,selection_keyboard +105,327324,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1797,1,"",shellscript,content +106,327643,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1797,0,"u",shellscript,content +107,327644,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1798,0,"",shellscript,selection_keyboard +108,327739,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1798,0,"m",shellscript,content +109,327740,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1799,0,"",shellscript,selection_keyboard +110,328083,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1799,0,"_",shellscript,content +111,328084,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1800,0,"",shellscript,selection_keyboard +112,328486,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1800,0,"b",shellscript,content +113,328487,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1801,0,"",shellscript,selection_keyboard +114,328642,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1801,0,"l",shellscript,content +115,328642,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1802,0,"",shellscript,selection_keyboard +116,328802,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1802,0,"o",shellscript,content +117,328803,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1803,0,"",shellscript,selection_keyboard +118,328879,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1803,0,"c",shellscript,content +119,328880,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1804,0,"",shellscript,selection_keyboard +120,328995,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1804,0,"k",shellscript,content +121,328996,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1805,0,"",shellscript,selection_keyboard +122,329076,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1805,0,"s",shellscript,content +123,329077,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1806,0,"",shellscript,selection_keyboard +124,331395,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1806,0," ",shellscript,content +125,331396,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1807,0,"",shellscript,selection_keyboard +126,331862,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1806,1,"",shellscript,content +127,332488,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1806,0,"=",shellscript,content +128,332489,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1807,0,"",shellscript,selection_keyboard +129,334350,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1807,0,"1",shellscript,content +130,334351,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1808,0,"",shellscript,selection_keyboard +131,334459,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1808,0,"2",shellscript,content +132,334460,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1809,0,"",shellscript,selection_keyboard +133,334905,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1809,0," ",shellscript,content +134,334906,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1810,0,"",shellscript,selection_keyboard +135,335135,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1810,0,"\",shellscript,content +136,335136,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1811,0,"",shellscript,selection_keyboard +137,355534,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",361,0,"",shellscript,selection_mouse +138,356210,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",378,0,"",shellscript,selection_mouse +139,357798,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",378,0,"_",shellscript,content +140,357799,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",379,0,"",shellscript,selection_keyboard +141,358122,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",379,0,"f",shellscript,content +142,358123,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",380,0,"",shellscript,selection_keyboard +143,358255,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",380,0,"f",shellscript,content +144,358256,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",381,0,"",shellscript,selection_keyboard +145,358348,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",381,0,"n",shellscript,content +146,358349,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",382,0,"",shellscript,selection_keyboard +147,358873,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",382,0,"_",shellscript,content +148,358874,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",383,0,"",shellscript,selection_keyboard +149,359896,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",383,0,"d",shellscript,content +150,359897,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",384,0,"",shellscript,selection_keyboard +151,360003,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",384,0,"i",shellscript,content +152,360004,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",385,0,"",shellscript,selection_keyboard +153,360004,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",385,0,"m",shellscript,content +154,360005,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",386,0,"",shellscript,selection_keyboard +155,360364,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",386,0,"_",shellscript,content +156,360365,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",387,0,"",shellscript,selection_keyboard +157,360653,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",387,0,"a",shellscript,content +158,360654,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",388,0,"",shellscript,selection_keyboard +159,360790,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",388,0,"b",shellscript,content +160,360791,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",389,0,"",shellscript,selection_keyboard +161,360961,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",389,0,"l",shellscript,content +162,360962,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",390,0,"",shellscript,selection_keyboard +163,361051,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",390,0,"a",shellscript,content +164,361052,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",391,0,"",shellscript,selection_keyboard +165,361228,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",391,0,"t",shellscript,content +166,361229,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",392,0,"",shellscript,selection_keyboard +167,361331,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",392,0,"i",shellscript,content +168,361332,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",393,0,"",shellscript,selection_keyboard +169,361413,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",393,0,"o",shellscript,content +170,361414,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",394,0,"",shellscript,selection_keyboard +171,361541,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",394,0,"n",shellscript,content +172,361542,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",395,0,"",shellscript,selection_keyboard +173,362690,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",273,0,"",shellscript,selection_mouse +174,363413,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",363,0,"",shellscript,selection_mouse +175,365286,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",413,0,"",shellscript,selection_mouse +176,366130,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",395,0,"",shellscript,selection_mouse +177,367558,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",413,0,"",shellscript,selection_mouse +178,369835,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",347,0,"",shellscript,selection_mouse +179,371253,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",353,0,"",shellscript,selection_mouse +180,371836,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",413,0,"",shellscript,selection_mouse +181,381131,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1227,0,"",shellscript,selection_mouse +182,382022,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1227,0," ",shellscript,content +183,382023,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1228,0,"",shellscript,selection_keyboard +184,382208,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1228,0,"f",shellscript,content +185,382209,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1229,0,"",shellscript,selection_keyboard +186,382421,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1229,0,"f",shellscript,content +187,382421,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1230,0,"",shellscript,selection_keyboard +188,382573,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1230,0,"n",shellscript,content +189,382574,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1231,0,"",shellscript,selection_keyboard +190,382918,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1231,0,"_",shellscript,content +191,382919,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1232,0,"",shellscript,selection_keyboard +192,383221,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1232,0,"d",shellscript,content +193,383222,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1233,0,"",shellscript,selection_keyboard +194,383323,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1233,0,"i",shellscript,content +195,383324,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1234,0,"",shellscript,selection_keyboard +196,383381,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1234,0,"m",shellscript,content +197,383382,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0,"",shellscript,selection_keyboard +198,383707,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0,"_",shellscript,content +199,383708,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"",shellscript,selection_keyboard +200,383951,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"a",shellscript,content +201,383952,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1237,0,"",shellscript,selection_keyboard +202,384069,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1237,0,"b",shellscript,content +203,384070,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1238,0,"",shellscript,selection_keyboard +204,384268,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1238,0,"l",shellscript,content +205,384269,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1239,0,"",shellscript,selection_keyboard +206,384348,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1239,0,"a",shellscript,content +207,384349,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1240,0,"",shellscript,selection_keyboard +208,384573,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1240,0,"t",shellscript,content +209,384574,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1241,0,"",shellscript,selection_keyboard +210,384672,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1241,0,"i",shellscript,content +211,384673,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1242,0,"",shellscript,selection_keyboard +212,384747,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1242,0,"o",shellscript,content +213,384748,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1243,0,"",shellscript,selection_keyboard +214,384905,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1243,0,"n",shellscript,content +215,384906,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1244,0,"",shellscript,selection_keyboard +216,392992,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1887,0,"",shellscript,selection_mouse +217,393817,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1936,0,"",shellscript,selection_mouse +218,395103,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1869,0,"",shellscript,selection_mouse +219,401927,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1778,0,"",shellscript,selection_mouse +220,402547,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1863,0,"",shellscript,selection_mouse +221,403189,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1815,0,"",shellscript,selection_mouse +222,479501,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1227,0,"",shellscript,selection_mouse +223,480317,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1227,0," ",shellscript,content +224,480319,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1228,0,"",shellscript,selection_keyboard +225,480466,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1228,0,"a",shellscript,content +226,480467,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1229,0,"",shellscript,selection_keyboard +227,480536,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1229,0,"b",shellscript,content +228,480537,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1230,0,"",shellscript,selection_keyboard +229,480731,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1230,0,"l",shellscript,content +230,480732,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1231,0,"",shellscript,selection_keyboard +231,480790,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1231,0,"a",shellscript,content +232,480791,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1232,0,"",shellscript,selection_keyboard +233,480979,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1232,0,"t",shellscript,content +234,480980,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1233,0,"",shellscript,selection_keyboard +235,481117,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1233,0,"i",shellscript,content +236,481118,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1234,0,"",shellscript,selection_keyboard +237,481141,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1234,0,"o",shellscript,content +238,481142,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0,"",shellscript,selection_keyboard +239,481284,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0,"n",shellscript,content +240,481285,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"",shellscript,selection_keyboard +241,482184,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1228,8,"",shellscript,content +242,483350,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1227,1,"",shellscript,content +243,485057,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1243,0,"",shellscript,selection_command +244,485552,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1242,0,"",shellscript,selection_command +245,485577,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1241,0,"",shellscript,selection_command +246,485621,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1240,0,"",shellscript,selection_command +247,485664,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1239,0,"",shellscript,selection_command +248,485666,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1238,0,"",shellscript,selection_command +249,485714,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1237,0,"",shellscript,selection_command +250,485757,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"",shellscript,selection_command +251,485766,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0,"",shellscript,selection_command +252,486103,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"",shellscript,selection_command +253,486298,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,1,"",shellscript,content +254,486435,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1235,0," ",shellscript,content +255,486436,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1236,0,"",shellscript,selection_keyboard +256,487696,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1147,0,"",shellscript,selection_mouse +257,488221,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1246,0,"",shellscript,selection_mouse +258,505043,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +259,507597,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +260,514459,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +261,516838,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",378,0,"",shellscript,selection_mouse +262,516885,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",377,0,"",shellscript,selection_command +263,517219,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",378,0,"",shellscript,selection_command +264,518006,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",378,0,"_",shellscript,content +265,518007,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",379,0,"",shellscript,selection_keyboard +266,518242,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",379,0,"g",shellscript,content +267,518243,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",380,0,"",shellscript,selection_keyboard +268,518355,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",380,0,"t",shellscript,content +269,518356,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",381,0,"",shellscript,selection_keyboard +270,518546,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",381,0,"_",shellscript,content +271,518546,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",382,0,"",shellscript,selection_keyboard +272,519258,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",382,0,"a",shellscript,content +273,519259,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",383,0,"",shellscript,selection_keyboard +274,519449,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",383,0,"c",shellscript,content +275,519450,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",384,0,"",shellscript,selection_keyboard +276,519674,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",384,0,"t",shellscript,content +277,519675,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",385,0,"",shellscript,selection_keyboard +278,519796,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",385,0,"i",shellscript,content +279,519796,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",386,0,"",shellscript,selection_keyboard +280,519858,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",386,0,"o",shellscript,content +281,519859,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",387,0,"",shellscript,selection_keyboard +282,520025,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",387,0,"n",shellscript,content +283,520026,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",388,0,"",shellscript,selection_keyboard +284,520082,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",388,0,"s",shellscript,content +285,520083,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",389,0,"",shellscript,selection_keyboard +286,522873,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1788,0,"",shellscript,selection_mouse +287,523294,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1787,0,"",shellscript,selection_command +288,523743,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1747,0,"",shellscript,selection_command +289,524831,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1770,0,"\n",shellscript,content +290,525116,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1771,0,"-",shellscript,content +291,525116,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1772,0,"",shellscript,selection_keyboard +292,525238,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1772,0,"-",shellscript,content +293,525239,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1773,0,"",shellscript,selection_keyboard +294,525527,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1773,0,"u",shellscript,content +295,525528,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1774,0,"",shellscript,selection_keyboard +296,525928,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1773,1,"",shellscript,content +297,526064,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1772,1,"",shellscript,content +298,526211,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1771,1,"",shellscript,content +299,526370,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1771,0," ",shellscript,content +300,526824,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1775,0,"-",shellscript,content +301,526825,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1776,0,"",shellscript,selection_keyboard +302,526975,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1776,0,"-",shellscript,content +303,526976,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1777,0,"",shellscript,selection_keyboard +304,527209,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1777,0,"u",shellscript,content +305,527210,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1778,0,"",shellscript,selection_keyboard +306,527301,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1778,0,"s",shellscript,content +307,527302,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1779,0,"",shellscript,selection_keyboard +308,527444,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1779,0,"e",shellscript,content +309,527445,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1780,0,"",shellscript,selection_keyboard +310,527614,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1780,0,"_",shellscript,content +311,527615,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1781,0,"",shellscript,selection_keyboard +312,527816,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1781,0,"g",shellscript,content +313,527816,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1782,0,"",shellscript,selection_keyboard +314,528297,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1782,0,"t",shellscript,content +315,528298,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1783,0,"",shellscript,selection_keyboard +316,528503,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1783,0,"_",shellscript,content +317,528504,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1784,0,"",shellscript,selection_keyboard +318,528616,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1784,0,"a",shellscript,content +319,528617,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1785,0,"",shellscript,selection_keyboard +320,528725,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1785,0,"c",shellscript,content +321,528726,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1786,0,"",shellscript,selection_keyboard +322,528894,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1786,0,"t",shellscript,content +323,528895,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1787,0,"",shellscript,selection_keyboard +324,528997,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1787,0,"i",shellscript,content +325,528998,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1788,0,"",shellscript,selection_keyboard +326,529097,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1788,0,"o",shellscript,content +327,529098,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1789,0,"",shellscript,selection_keyboard +328,529259,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1789,0,"n",shellscript,content +329,529260,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1790,0,"",shellscript,selection_keyboard +330,529412,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1790,0,"s",shellscript,content +331,529412,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1791,0,"",shellscript,selection_keyboard +332,529718,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1791,0," ",shellscript,content +333,529719,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1792,0,"",shellscript,selection_keyboard +334,529897,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1792,0,"\",shellscript,content +335,529898,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1793,0,"",shellscript,selection_keyboard +336,530190,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1792,0,"",shellscript,selection_command +337,533592,"jasmine/train_dynamics.py",0,0,"",python,tab +338,533593,"jasmine/train_dynamics.py",2245,0,"",python,selection_mouse +339,533594,"jasmine/train_dynamics.py",2235,14,"use_gt_actions",python,selection_mouse +340,537559,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +341,537560,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1221,0,"",shellscript,selection_mouse +342,539088,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1221,0," ",shellscript,content +343,539089,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1222,0,"",shellscript,selection_keyboard +344,539301,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1222,0,"a",shellscript,content +345,539302,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1223,0,"",shellscript,selection_keyboard +346,539441,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1223,0,"b",shellscript,content +347,539442,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1224,0,"",shellscript,selection_keyboard +348,539588,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1224,0,"l",shellscript,content +349,539589,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1225,0,"",shellscript,selection_keyboard +350,539632,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1225,0,"a",shellscript,content +351,539632,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1226,0,"",shellscript,selection_keyboard +352,539828,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1226,0,"t",shellscript,content +353,539828,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1227,0,"",shellscript,selection_keyboard +354,539933,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1227,0,"i",shellscript,content +355,539934,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1228,0,"",shellscript,selection_keyboard +356,540005,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1228,0,"o",shellscript,content +357,540006,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1229,0,"",shellscript,selection_keyboard +358,540122,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1229,0,"n",shellscript,content +359,540123,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1230,0,"",shellscript,selection_keyboard +360,540199,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1230,0," ",shellscript,content +361,540199,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1231,0,"",shellscript,selection_keyboard +362,540894,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1231,0,"g",shellscript,content +363,540895,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1232,0,"",shellscript,selection_keyboard +364,541061,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1232,0,"t",shellscript,content +365,541062,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1233,0,"",shellscript,selection_keyboard +366,541500,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1233,0,"-",shellscript,content +367,541501,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1234,0,"",shellscript,selection_keyboard +368,541639,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1234,0,"a",shellscript,content +369,541639,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1235,0,"",shellscript,selection_keyboard +370,541763,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1235,0,"c",shellscript,content +371,541764,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1236,0,"",shellscript,selection_keyboard +372,541970,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1236,0,"t",shellscript,content +373,541971,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1237,0,"",shellscript,selection_keyboard +374,542072,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1237,0,"i",shellscript,content +375,542072,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1238,0,"",shellscript,selection_keyboard +376,542170,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1238,0,"o",shellscript,content +377,542171,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1239,0,"",shellscript,selection_keyboard +378,542298,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1239,0,"n",shellscript,content +379,542299,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1240,0,"",shellscript,selection_keyboard +380,542369,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1240,0,"s",shellscript,content +381,542369,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1241,0,"",shellscript,selection_keyboard +382,543411,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1225,0,"",shellscript,selection_mouse +383,570963,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +384,574101,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +385,583244,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +386,585429,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",374,0,"",shellscript,selection_mouse +387,585837,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",378,0,"",shellscript,selection_command +388,586375,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",378,0,"_",shellscript,content +389,586376,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",379,0,"",shellscript,selection_keyboard +390,586673,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",379,0,"c",shellscript,content +391,586674,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",380,0,"",shellscript,selection_keyboard +392,586823,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",380,0,"a",shellscript,content +393,586824,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",381,0,"",shellscript,selection_keyboard +394,586923,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",381,0,"u",shellscript,content +395,586924,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",382,0,"",shellscript,selection_keyboard +396,587026,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",382,0,"s",shellscript,content +397,587027,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",383,0,"",shellscript,selection_keyboard +398,587164,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",383,0,"a",shellscript,content +399,587165,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",384,0,"",shellscript,selection_keyboard +400,587210,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",384,0,"l",shellscript,content +401,587211,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",385,0,"",shellscript,selection_keyboard +402,591138,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1963,0,"",shellscript,selection_mouse +403,592726,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1766,0,"",shellscript,selection_mouse +404,593057,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1766,0,"\n",shellscript,content +405,593566,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1767,0," ",shellscript,content +406,593944,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1771,0,"-",shellscript,content +407,593945,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1772,0,"",shellscript,selection_keyboard +408,594087,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1772,0,"-",shellscript,content +409,594088,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1773,0,"",shellscript,selection_keyboard +410,595143,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1773,0,"d",shellscript,content +411,595145,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1774,0,"",shellscript,selection_keyboard +412,595684,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1774,0,"y",shellscript,content +413,595686,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1775,0,"",shellscript,selection_keyboard +414,595800,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1775,0," ",shellscript,content +415,595800,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1776,0,"",shellscript,selection_keyboard +416,596007,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1776,0,"a",shellscript,content +417,596008,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1777,0,"",shellscript,selection_keyboard +418,596358,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",2176,0,"",shellscript,selection_keyboard +419,597777,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1777,0,"",shellscript,selection_mouse +420,598328,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1776,1,"",shellscript,content +421,598462,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1775,1,"",shellscript,content +422,598590,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1774,1,"",shellscript,content +423,599235,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1774,0,"y",shellscript,content +424,599236,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1775,0,"",shellscript,selection_keyboard +425,599308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1775,0,"n",shellscript,content +426,599309,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1776,0,"",shellscript,selection_keyboard +427,599443,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1776,0,"a",shellscript,content +428,599444,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1777,0,"",shellscript,selection_keyboard +429,599653,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1777,0,"_",shellscript,content +430,599654,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1778,0,"",shellscript,selection_keyboard +431,599993,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1778,0,"t",shellscript,content +432,599994,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1779,0,"",shellscript,selection_keyboard +433,600246,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1779,0,"y",shellscript,content +434,600247,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1780,0,"",shellscript,selection_keyboard +435,600416,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1780,0,"p",shellscript,content +436,600417,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1781,0,"",shellscript,selection_keyboard +437,600531,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1781,0,"e",shellscript,content +438,600532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1782,0,"",shellscript,selection_keyboard +439,600873,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1782,0,"=",shellscript,content +440,600874,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1783,0,"",shellscript,selection_keyboard +441,601559,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1783,0,"c",shellscript,content +442,601560,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1784,0,"",shellscript,selection_keyboard +443,601689,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1784,0,"a",shellscript,content +444,601690,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1785,0,"",shellscript,selection_keyboard +445,601795,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1785,0,"u",shellscript,content +446,601796,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1786,0,"",shellscript,selection_keyboard +447,601901,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1786,0,"s",shellscript,content +448,601902,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1787,0,"",shellscript,selection_keyboard +449,602025,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1787,0,"a",shellscript,content +450,602026,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1788,0,"",shellscript,selection_keyboard +451,602136,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1788,0,"l",shellscript,content +452,602136,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1789,0,"",shellscript,selection_keyboard +453,602278,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1789,0," ",shellscript,content +454,602279,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1790,0,"",shellscript,selection_keyboard +455,602485,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1790,0,"\",shellscript,content +456,602486,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1791,0,"",shellscript,selection_keyboard +457,602761,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1790,0,"",shellscript,selection_command +458,606878,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1217,0,"",shellscript,selection_mouse +459,607781,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1217,0," ",shellscript,content +460,607782,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1218,0,"",shellscript,selection_keyboard +461,608698,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1218,0,"a",shellscript,content +462,608700,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1219,0,"",shellscript,selection_keyboard +463,608852,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1219,0,"b",shellscript,content +464,608853,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1220,0,"",shellscript,selection_keyboard +465,609112,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1220,0,"l",shellscript,content +466,609113,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1221,0,"",shellscript,selection_keyboard +467,609217,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1221,0,"a",shellscript,content +468,609217,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1222,0,"",shellscript,selection_keyboard +469,609441,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1222,0,"t",shellscript,content +470,609441,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1223,0,"",shellscript,selection_keyboard +471,609532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1223,0,"i",shellscript,content +472,609533,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1224,0,"",shellscript,selection_keyboard +473,609625,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1224,0,"o",shellscript,content +474,609626,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1225,0,"",shellscript,selection_keyboard +475,609763,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1225,0,"n",shellscript,content +476,609764,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1226,0,"",shellscript,selection_keyboard +477,609933,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1226,0," ",shellscript,content +478,609934,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1227,0,"",shellscript,selection_keyboard +479,610062,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1227,0,"c",shellscript,content +480,610063,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1228,0,"",shellscript,selection_keyboard +481,610198,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1228,0,"a",shellscript,content +482,610199,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1229,0,"",shellscript,selection_keyboard +483,610296,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1229,0,"u",shellscript,content +484,610297,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1230,0,"",shellscript,selection_keyboard +485,610394,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1230,0,"s",shellscript,content +486,610395,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1231,0,"",shellscript,selection_keyboard +487,610514,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1231,0,"a",shellscript,content +488,610515,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1232,0,"",shellscript,selection_keyboard +489,610593,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1232,0,"l",shellscript,content +490,610594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1233,0,"",shellscript,selection_keyboard +491,611236,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1232,0,"",shellscript,selection_command +492,640374,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +493,642923,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +494,654968,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +495,663342,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",378,0,"",shellscript,selection_mouse +496,663356,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",377,0,"",shellscript,selection_command +497,663740,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",378,0,"",shellscript,selection_command +498,664046,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",378,0,"_",shellscript,content +499,664048,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",379,0,"",shellscript,selection_keyboard +500,664704,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",379,0,"n",shellscript,content +501,664705,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",380,0,"",shellscript,selection_keyboard +502,664873,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",380,0,"o",shellscript,content +503,664874,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",381,0,"",shellscript,selection_keyboard +504,665230,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",381,0,"_",shellscript,content +505,665231,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",382,0,"",shellscript,selection_keyboard +506,665573,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",382,0,"c",shellscript,content +507,665574,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",383,0,"",shellscript,selection_keyboard +508,667204,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",382,1,"",shellscript,content +509,667326,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",381,1,"",shellscript,content +510,667474,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",380,1,"",shellscript,content +511,667585,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",379,1,"",shellscript,content +512,667792,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",379,0,"n",shellscript,content +513,667793,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",380,0,"",shellscript,selection_keyboard +514,667934,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",380,0,"o",shellscript,content +515,667935,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",381,0,"",shellscript,selection_keyboard +516,668282,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",381,0,"_",shellscript,content +517,668283,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",382,0,"",shellscript,selection_keyboard +518,668550,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",382,0,"c",shellscript,content +519,668551,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",383,0,"",shellscript,selection_keyboard +520,668636,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",383,0,"o",shellscript,content +521,668637,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",384,0,"",shellscript,selection_keyboard +522,669021,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",384,0,"t",shellscript,content +523,669021,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",385,0,"",shellscript,selection_keyboard +524,669317,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",385,0,"r",shellscript,content +525,669318,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",386,0,"",shellscript,selection_keyboard +526,669532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",386,0,"a",shellscript,content +527,669533,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",387,0,"",shellscript,selection_keyboard +528,669559,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",387,0,"i",shellscript,content +529,669560,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",388,0,"",shellscript,selection_keyboard +530,669628,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",388,0,"n",shellscript,content +531,669629,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",389,0,"",shellscript,selection_keyboard +532,669760,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",389,0,"i",shellscript,content +533,669761,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",390,0,"",shellscript,selection_keyboard +534,669793,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",390,0,"n",shellscript,content +535,669794,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",391,0,"",shellscript,selection_keyboard +536,669845,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",391,0,"g",shellscript,content +537,669845,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",392,0,"",shellscript,selection_keyboard +538,675930,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1357,0,"",shellscript,selection_mouse +539,676560,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1242,0,"",shellscript,selection_mouse +540,677275,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1479,0,"",shellscript,selection_mouse +541,678513,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1472,0,"",shellscript,selection_mouse +542,678801,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1465,12,"lam_ckpt_dir",shellscript,selection_mouse +543,680996,"TERMINAL",0,0,"bash",,terminal_focus +544,682397,"TERMINAL",0,0,"cd slurm/",,terminal_command +545,683742,"TERMINAL",0,0,"git pull",,terminal_command +546,683791,"TERMINAL",0,0,"]633;C",,terminal_output +547,685357,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +548,687448,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1471,0,"",shellscript,selection_mouse +549,726607,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,0,"",shellscript,selection_mouse +550,726688,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,1,"""",shellscript,selection_mouse +551,726688,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,5,"""/fas",shellscript,selection_mouse +552,726689,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,9,"""/fast/pr",shellscript,selection_mouse +553,726689,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,11,"""/fast/proj",shellscript,selection_mouse +554,726728,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,16,"""/fast/project/H",shellscript,selection_mouse +555,726733,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,22,"""/fast/project/HFMI_Sy",shellscript,selection_mouse +556,726773,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,28,"""/fast/project/HFMI_SynergyU",shellscript,selection_mouse +557,726804,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,33,"""/fast/project/HFMI_SynergyUnit/j",shellscript,selection_mouse +558,726805,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,43,"""/fast/project/HFMI_SynergyUnit/jafar_ws/ch",shellscript,selection_mouse +559,726805,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1395,83,"/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +560,726806,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1397,81,"heckpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +561,726862,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1399,79,"ckpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +562,726863,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1401,77,"points/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +563,726863,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1402,76,"oints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +564,726896,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1404,74,"nts/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +565,726896,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1406,72,"s/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +566,726937,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1407,71,"/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +567,726938,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1408,70,"coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +568,726981,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1409,69,"oinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +569,726981,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1410,68,"inrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +570,726982,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1411,67,"nrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +571,727022,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1412,66,"run/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +572,727022,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1413,65,"un/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=",shellscript,selection_mouse +573,727066,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1464,14,"\nlam_ckpt_dir=",shellscript,selection_mouse +574,727330,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,96,"""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""",shellscript,selection_mouse +575,728082,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,96,"",shellscript,content +576,729269,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1478,0,"""",shellscript,content +577,729270,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1479,0,"",shellscript,selection_keyboard +578,730041,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1479,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738",shellscript,content +579,730575,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1595,0,"""",shellscript,content +580,730576,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1596,0,"",shellscript,selection_keyboard +581,732750,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1471,0,"",shellscript,selection_mouse +582,732873,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1465,12,"lam_ckpt_dir",shellscript,selection_mouse +583,734210,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1476,0,"",shellscript,selection_command +584,734745,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2067,0,"",shellscript,selection_mouse +585,734759,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2066,0,"",shellscript,selection_command +586,736085,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2067,0,"\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \",shellscript,content +587,736095,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2072,0,"",shellscript,selection_command +588,736478,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2073,0,"",shellscript,selection_command +589,736695,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,0,"",shellscript,selection_command +590,737219,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,20,"",shellscript,content +591,738308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,0,"tokenizer_checkpoint",shellscript,content +592,739269,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +593,739406,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +594,739534,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +595,739675,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +596,739800,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +597,739926,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +598,740062,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +599,740197,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +600,740592,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,1,"",shellscript,content +601,741231,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2074,0,"l",shellscript,content +602,741232,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2075,0,"",shellscript,selection_keyboard +603,741355,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2075,0,"a",shellscript,content +604,741356,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2076,0,"",shellscript,selection_keyboard +605,741416,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2076,0,"m",shellscript,content +606,741417,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2077,0,"",shellscript,selection_keyboard +607,743311,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2109,0,"",shellscript,selection_command +608,744782,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +609,744921,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +610,745069,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +611,745205,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +612,745337,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +613,745467,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +614,745610,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +615,745750,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +616,746048,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,1,"",shellscript,content +617,746599,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2092,0,"l",shellscript,content +618,746600,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2093,0,"",shellscript,selection_keyboard +619,746717,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2093,0,"a",shellscript,content +620,746718,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2094,0,"",shellscript,selection_keyboard +621,746780,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2094,0,"m",shellscript,content +622,746781,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2095,0,"",shellscript,selection_keyboard +623,751541,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1596,0,"",shellscript,selection_mouse +624,752579,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1595,0,"",shellscript,selection_command +625,753150,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1595,0,"/",shellscript,content +626,753151,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1596,0,"",shellscript,selection_keyboard +627,757947,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2083,0,"",shellscript,selection_mouse +628,758096,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2075,14,"lam_checkpoint",shellscript,selection_mouse +629,758238,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2069,41," --lam_checkpoint=""${lam_ckpt_dir}"" \\n",shellscript,selection_mouse +630,759095,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2095,0,"",shellscript,selection_mouse +631,759096,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",2093,12,"lam_ckpt_dir",shellscript,selection_mouse +632,763593,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1171,0,"",shellscript,selection_command +633,765198,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1224,0,"",shellscript,selection_command +634,767091,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1224,0," ",shellscript,content +635,767093,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1225,0,"",shellscript,selection_keyboard +636,768006,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1225,0,"a",shellscript,content +637,768007,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1226,0,"",shellscript,selection_keyboard +638,768074,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1226,0,"b",shellscript,content +639,768074,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1227,0,"",shellscript,selection_keyboard +640,768269,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1227,0,"l",shellscript,content +641,768270,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1228,0,"",shellscript,selection_keyboard +642,768392,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1228,0,"a",shellscript,content +643,768393,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1229,0,"",shellscript,selection_keyboard +644,768531,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1229,0,"t",shellscript,content +645,768532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1230,0,"",shellscript,selection_keyboard +646,768588,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1230,0,"i",shellscript,content +647,768589,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1231,0,"",shellscript,selection_keyboard +648,768683,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1231,0,"o",shellscript,content +649,768684,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1232,0,"",shellscript,selection_keyboard +650,768806,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1232,0,"n",shellscript,content +651,768807,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1233,0,"",shellscript,selection_keyboard +652,768944,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1233,0," ",shellscript,content +653,768945,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1234,0,"",shellscript,selection_keyboard +654,769155,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1234,0,"n",shellscript,content +655,769156,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1235,0,"",shellscript,selection_keyboard +656,769316,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1235,0,"o",shellscript,content +657,769317,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1236,0,"",shellscript,selection_keyboard +658,769647,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1236,0,"_",shellscript,content +659,769648,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1237,0,"",shellscript,selection_keyboard +660,769827,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1237,0,"c",shellscript,content +661,769828,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1238,0,"",shellscript,selection_keyboard +662,769955,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1238,0,"o",shellscript,content +663,769956,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1239,0,"",shellscript,selection_keyboard +664,770234,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1239,0,"t",shellscript,content +665,770234,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1240,0,"",shellscript,selection_keyboard +666,771071,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1239,1,"",shellscript,content +667,771196,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1238,1,"",shellscript,content +668,771341,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1237,1,"",shellscript,content +669,771790,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1236,1,"",shellscript,content +670,772685,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1236,0,"-",shellscript,content +671,772686,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1237,0,"",shellscript,selection_keyboard +672,773048,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1237,0,"c",shellscript,content +673,773049,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1238,0,"",shellscript,selection_keyboard +674,773130,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1238,0,"o",shellscript,content +675,773131,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1239,0,"",shellscript,selection_keyboard +676,773480,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1239,0,"t",shellscript,content +677,773481,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1240,0,"",shellscript,selection_keyboard +678,773612,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1240,0,"r",shellscript,content +679,773613,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1241,0,"",shellscript,selection_keyboard +680,773817,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1241,0,"a",shellscript,content +681,773818,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1242,0,"",shellscript,selection_keyboard +682,773919,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1242,0,"i",shellscript,content +683,773919,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1243,0,"",shellscript,selection_keyboard +684,773979,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1243,0,"n",shellscript,content +685,773980,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1244,0,"",shellscript,selection_keyboard +686,774085,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1244,0,"i",shellscript,content +687,774086,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1245,0,"",shellscript,selection_keyboard +688,774137,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1245,0,"n",shellscript,content +689,774137,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1246,0,"",shellscript,selection_keyboard +690,774209,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1246,0,"g",shellscript,content +691,774210,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1247,0,"",shellscript,selection_keyboard +692,815686,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,0,"",shellscript,selection_mouse +693,816001,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,1,"/",shellscript,selection_mouse +694,816002,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,6,"/fast/",shellscript,selection_mouse +695,816002,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,9,"/fast/pro",shellscript,selection_mouse +696,816002,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,13,"/fast/project",shellscript,selection_mouse +697,816003,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,15,"/fast/project/H",shellscript,selection_mouse +698,816025,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,18,"/fast/project/HFMI",shellscript,selection_mouse +699,816040,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,23,"/fast/project/HFMI_Syne",shellscript,selection_mouse +700,816058,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,32,"/fast/project/HFMI_SynergyUnit/j",shellscript,selection_mouse +701,816087,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1307,72,"s/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +702,816088,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1310,69,"ata/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +703,816131,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1312,67,"a/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +704,816131,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1314,65,"coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +705,816174,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,46,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkp",shellscript,selection_mouse +706,816175,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,48,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoi",shellscript,selection_mouse +707,816175,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,52,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/",shellscript,selection_mouse +708,816187,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,53,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/c",shellscript,selection_mouse +709,816209,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,54,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/co",shellscript,selection_mouse +710,816231,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coin",shellscript,selection_mouse +711,816253,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,57,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinr",shellscript,selection_mouse +712,816255,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +713,816287,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1358,21,"\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +714,816385,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +715,817411,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,107,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/",shellscript,selection_mouse +716,817463,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,106,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +717,818759,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,106,"",shellscript,content +718,819698,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736",shellscript,content +719,825885,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +720,828801,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +721,832133,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,0,"",shellscript,selection_mouse +722,832290,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,1,"/",shellscript,selection_mouse +723,832308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,4,"/fas",shellscript,selection_mouse +724,832355,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,7,"/fast/p",shellscript,selection_mouse +725,832439,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,12,"/fast/projec",shellscript,selection_mouse +726,832439,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,15,"/fast/project/H",shellscript,selection_mouse +727,832440,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,19,"/fast/project/HFMI_",shellscript,selection_mouse +728,832440,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,152,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUni",shellscript,selection_mouse +729,832442,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,154,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/",shellscript,selection_mouse +730,832483,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,157,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jaf",shellscript,selection_mouse +731,832492,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,159,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar",shellscript,selection_mouse +732,832493,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,160,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_",shellscript,selection_mouse +733,832507,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,162,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws",shellscript,selection_mouse +734,832534,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,163,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/",shellscript,selection_mouse +735,832545,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,165,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/ch",shellscript,selection_mouse +736,832581,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,166,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/che",shellscript,selection_mouse +737,832582,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,167,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/chec",shellscript,selection_mouse +738,832592,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,168,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/check",shellscript,selection_mouse +739,832635,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,170,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpo",shellscript,selection_mouse +740,832636,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,172,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoin",shellscript,selection_mouse +741,832649,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,174,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints",shellscript,selection_mouse +742,832694,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,175,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/",shellscript,selection_mouse +743,832694,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,177,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/co",shellscript,selection_mouse +744,832695,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,178,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coi",shellscript,selection_mouse +745,832709,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,181,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinru",shellscript,selection_mouse +746,832746,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,183,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/",shellscript,selection_mouse +747,832746,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,55,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coi",shellscript,selection_mouse +748,832760,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coin",shellscript,selection_mouse +749,832803,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,57,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinr",shellscript,selection_mouse +750,832809,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +751,832910,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1344,21,"\ntokenizer_ckpt_dir=""",shellscript,selection_mouse +752,833285,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +753,833393,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,218,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""",shellscript,selection_mouse +754,833759,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +755,835275,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,107,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/",shellscript,selection_mouse +756,837332,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,107,"",shellscript,content +757,838259,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1365,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736",shellscript,content +758,839534,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1474,0,"/",shellscript,content +759,839535,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1475,0,"",shellscript,selection_keyboard +760,841616,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +761,843516,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,0,"",shellscript,selection_mouse +762,843661,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,2,"/f",shellscript,selection_mouse +763,843666,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,7,"/fast/p",shellscript,selection_mouse +764,843666,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,12,"/fast/projec",shellscript,selection_mouse +765,843677,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,150,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyU",shellscript,selection_mouse +766,843709,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,27,"/fast/project/HFMI_SynergyU",shellscript,selection_mouse +767,843710,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_mouse +768,843728,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,33,"/fast/project/HFMI_SynergyUnit/ja",shellscript,selection_mouse +769,843753,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,37,"/fast/project/HFMI_SynergyUnit/jafar_",shellscript,selection_mouse +770,843754,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,46,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkp",shellscript,selection_mouse +771,843771,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,48,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoi",shellscript,selection_mouse +772,843797,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,50,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoint",shellscript,selection_mouse +773,843814,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,51,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints",shellscript,selection_mouse +774,843838,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,181,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinru",shellscript,selection_mouse +775,843856,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,183,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/",shellscript,selection_mouse +776,843856,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,184,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/l",shellscript,selection_mouse +777,843879,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,186,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam",shellscript,selection_mouse +778,843899,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,218,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""",shellscript,selection_mouse +779,844037,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +780,845273,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,107,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/",shellscript,selection_mouse +781,845344,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,106,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +782,846124,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,106,"",shellscript,content +783,846599,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1373,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736",shellscript,content +784,854295,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +785,857977,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +786,860306,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +787,865135,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1482,0,"",shellscript,selection_mouse +788,865373,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1481,1,"0",shellscript,selection_mouse +789,865423,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1480,2,"90",shellscript,selection_mouse +790,865423,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1479,3,"490",shellscript,selection_mouse +791,865459,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1477,5,"29490",shellscript,selection_mouse +792,865474,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1474,8,"et_29490",shellscript,selection_mouse +793,865477,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1465,17,"00m_dataset_29490",shellscript,selection_mouse +794,865503,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1459,23,"nrun_500m_dataset_29490",shellscript,selection_mouse +795,865541,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1337,145,"_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +796,865541,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1331,151,"s_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +797,865542,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1328,154,"ords_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +798,865583,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1316,166,"un/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +799,865639,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1313,169,"inrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +800,865640,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1312,170,"oinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +801,865662,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1309,173,"a/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +802,865662,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1308,174,"ta/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +803,865683,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1307,175,"ata/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +804,865697,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1305,177,"/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +805,865733,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1304,178,"s/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +806,865743,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1303,179,"ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +807,865780,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1302,180,"_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +808,865801,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1301,181,"r_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +809,865817,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,235,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +810,865930,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1356,126,"tokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +811,866355,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1360,122,"nizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +812,866373,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1362,120,"zer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +813,866390,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1365,117,"_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +814,866424,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1260,222,"_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +815,866434,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1262,220,"ir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +816,866475,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1263,219,"r=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +817,866476,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1265,217,"""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +818,866495,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1266,216,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +819,866495,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1267,215,"fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +820,866540,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1268,214,"ast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +821,866579,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1269,213,"st/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +822,866623,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1378,104,"ast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +823,866623,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1379,103,"st/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +824,866661,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1380,102,"t/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +825,866854,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1379,103,"st/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +826,866934,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1378,104,"ast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +827,867175,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1377,105,"fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +828,867668,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1376,106,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +829,868450,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1376,106,"",shellscript,content +830,868625,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1376,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736",shellscript,content +831,871278,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +832,874728,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +833,882098,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +834,882285,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1816,0,"",shellscript,selection_mouse +835,882327,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1815,0,"",shellscript,selection_command +836,883581,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1492,0,"",shellscript,selection_mouse +837,884574,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",1485,110,"",shellscript,content +838,886554,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +839,887972,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1495,0,"",shellscript,selection_mouse +840,889307,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1494,0,"",shellscript,selection_command +841,889924,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",1488,110,"",shellscript,content +842,892107,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +843,893657,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1482,0,"",shellscript,selection_mouse +844,894251,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh",1477,110,"",shellscript,content +845,901183,"TERMINAL",0,0,"git branch",,terminal_command +846,901234,"TERMINAL",0,0,"]633;C[?1h=\r* main\r\n\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +847,905187,"TERMINAL",0,0,"git pull",,terminal_command +848,905260,"TERMINAL",0,0,"]633;C",,terminal_output +849,906656,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +850,917457,"TERMINAL",0,0,"git status",,terminal_command +851,917503,"TERMINAL",0,0,"]633;C",,terminal_output +852,917535,"TERMINAL",0,0,"On branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu.sh\r\n\tmodified: jobs/mihir/horeka/coinrun/default_runs/train_lam_default.sh\r\n\tmodified: jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\n\tmodified: jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tjobs/franz/berlin/coinrun/mila_submission/ablations/\r\n\tjobs/mihir/horeka/breakout/noise_schedule_runs/causal/\r\n\tjobs/mihir/horeka/coinrun/ablations/\r\n\tjobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n\tjobs/mihir/horeka/minecraft/\r\n\tjobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n\tjobs/mihir/horeka/preprocessing/doom_chunked.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +853,928481,"TERMINAL",0,0,"git add jobs/",,terminal_command +854,928528,"TERMINAL",0,0,"]633;C",,terminal_output +855,928837,"TERMINAL",0,0,"g",,terminal_output +856,928890,"TERMINAL",0,0,"i",,terminal_output +857,928943,"TERMINAL",0,0,"t",,terminal_output +858,928987,"TERMINAL",0,0," ]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +859,937120,"TERMINAL",0,0,"git commit -am ""added mila submission ablations""",,terminal_command +860,937169,"TERMINAL",0,0,"]633;C",,terminal_output +861,937439,"TERMINAL",0,0,"[main 556e3da] added mila submission ablations\r\n 25 files changed, 1428 insertions(+), 15 deletions(-)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu._sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_50k.sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_50k_no_noise_aug.sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions._sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions_50k.sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions_smaller_lr_50k._sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_no_noise_aug._sh\r\n create mode 100644 jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_smaller_lr_50k._sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/train_dyn_default-sqrt-ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n create mode 100644 jobs/mihir/horeka/minecraft/default_runs/train_lam_8_nodes.sbatch\r\n create mode 100644 jobs/mihir/horeka/minecraft/default_runs/train_tokenizer_8_nodes.sbatch\r\n create mode 100644 jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n create mode 100644 jobs/mihir/horeka/preprocessing/doom_chunked.sh\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +862,939374,"TERMINAL",0,0,"git push",,terminal_command +863,939434,"TERMINAL",0,0,"]633;C",,terminal_output +864,940706,"TERMINAL",0,0,"Enumerating objects: 63, done.\r\nCounting objects: 1% (1/63)\rCounting objects: 3% (2/63)\rCounting objects: 4% (3/63)\rCounting objects: 6% (4/63)\rCounting objects: 7% (5/63)\rCounting objects: 9% (6/63)\rCounting objects: 11% (7/63)\rCounting objects: 12% (8/63)\rCounting objects: 14% (9/63)\rCounting objects: 15% (10/63)\rCounting objects: 17% (11/63)\rCounting objects: 19% (12/63)\rCounting objects: 20% (13/63)\rCounting objects: 22% (14/63)\rCounting objects: 23% (15/63)\rCounting objects: 25% (16/63)\rCounting objects: 26% (17/63)\rCounting objects: 28% (18/63)\rCounting objects: 30% (19/63)\rCounting objects: 31% (20/63)\rCounting objects: 33% (21/63)\rCounting objects: 34% (22/63)\rCounting objects: 36% (23/63)\rCounting objects: 38% (24/63)\rCounting objects: 39% (25/63)\rCounting objects: 41% (26/63)\rCounting objects: 42% (27/63)\rCounting objects: 44% (28/63)\rCounting objects: 46% (29/63)\rCounting objects: 47% (30/63)\rCounting objects: 49% (31/63)\rCounting objects: 50% (32/63)\rCounting objects: 52% (33/63)\rCounting objects: 53% (34/63)\rCounting objects: 55% (35/63)\rCounting objects: 57% (36/63)\rCounting objects: 58% (37/63)\rCounting objects: 60% (38/63)\rCounting objects: 61% (39/63)\rCounting objects: 63% (40/63)\rCounting objects: 65% (41/63)\rCounting objects: 66% (42/63)\rCounting objects: 68% (43/63)\rCounting objects: 69% (44/63)\rCounting objects: 71% (45/63)\rCounting objects: 73% (46/63)\rCounting objects: 74% (47/63)\rCounting objects: 76% (48/63)\rCounting objects: 77% (49/63)\rCounting objects: 79% (50/63)\rCounting objects: 80% (51/63)\rCounting objects: 82% (52/63)\rCounting objects: 84% (53/63)\rCounting objects: 85% (54/63)\rCounting objects: 87% (55/63)\rCounting objects: 88% (56/63)\rCounting objects: 90% (57/63)\rCounting objects: 92% (58/63)\rCounting objects: 93% (59/63)\rCounting objects: 95% (60/63)\rCounting objects: 96% (61/63)\rCounting objects: 98% (62/63)\rCounting objects: 100% (63/63)\rCounting objects: 100% (63/63), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 2% (1/44)\rCompressing objects: 4% (2/44)\rCompressing objects: 6% (3/44)\rCompressing objects: 9% (4/44)\rCompressing objects: 11% (5/44)\rCompressing objects: 13% (6/44)\rCompressing objects: 15% (7/44)\rCompressing objects: 18% (8/44)\rCompressing objects: 20% (9/44)\rCompressing objects: 22% (10/44)\rCompressing objects: 25% (11/44)\rCompressing objects: 27% (12/44)\rCompressing objects: 29% (13/44)\rCompressing objects: 31% (14/44)\rCompressing objects: 34% (15/44)\rCompressing objects: 36% (16/44)\rCompressing objects: 38% (17/44)\rCompressing objects: 40% (18/44)\rCompressing objects: 43% (19/44)\rCompressing objects: 45% (20/44)\rCompressing objects: 47% (21/44)\rCompressing objects: 50% (22/44)\rCompressing objects: 52% (23/44)\rCompressing objects: 54% (24/44)\rCompressing objects: 56% (25/44)\rCompressing objects: 59% (26/44)\rCompressing objects: 61% (27/44)\rCompressing objects: 63% (28/44)\rCompressing objects: 65% (29/44)\rCompressing objects: 68% (30/44)\rCompressing objects: 70% (31/44)\rCompressing objects: 72% (32/44)\rCompressing objects: 75% (33/44)\rCompressing objects: 77% (34/44)\rCompressing objects: 79% (35/44)\rCompressing objects: 81% (36/44)\rCompressing objects: 84% (37/44)\rCompressing objects: 86% (38/44)\rCompressing objects: 88% (39/44)\rCompressing objects: 90% (40/44)\rCompressing objects: 93% (41/44)\rCompressing objects: 95% (42/44)\rCompressing objects: 97% (43/44)\rCompressing objects: 100% (44/44)\rCompressing objects: 100% (44/44), done.\r\nWriting objects: 2% (1/45)\rWriting objects: 4% (2/45)\rWriting objects: 6% (3/45)\rWriting objects: 8% (4/45)\rWriting objects: 11% (5/45)\rWriting objects: 13% (6/45)\rWriting objects: 15% (7/45)\rWriting objects: 17% (8/45)\rWriting objects: 33% (15/45)\rWriting objects: 35% (16/45)\rWriting objects: 37% (17/45)\rWriting objects: 40% (18/45)\rWriting objects: 42% (19/45)\rWriting objects: 44% (20/45)\rWriting objects: 46% (21/45)\rWriting objects: 48% (22/45)\rWriting objects: 53% (24/45)\rWriting objects: 55% (25/45)\rWriting objects: 57% (26/45)\rWriting objects: 60% (27/45)\rWriting objects: 62% (28/45)\rWriting objects: 64% (29/45)\rWriting objects: 66% (30/45)\rWriting objects: 68% (31/45)\rWriting objects: 71% (32/45)\rWriting objects: 73% (33/45)\rWriting objects: 75% (34/45)\rWriting objects: 77% (35/45)\rWriting objects: 80% (36/45)\rWriting objects: 82% (37/45)\rWriting objects: 84% (38/45)\rWriting objects: 86% (39/45)\rWriting objects: 88% (40/45)\rWriting objects: 91% (41/45)\rWriting objects: 93% (42/45)\rWriting objects: 95% (43/45)\rWriting objects: 97% (44/45)\rWriting objects: 100% (45/45)\rWriting objects: 100% (45/45), 6.87 KiB | 1.15 MiB/s, done.\r\nTotal 45 (delta 32), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +865,940894,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/32)\rremote: Resolving deltas: 3% (1/32)\rremote: Resolving deltas: 6% (2/32)\rremote: Resolving deltas: 9% (3/32)\rremote: Resolving deltas: 12% (4/32)\rremote: Resolving deltas: 15% (5/32)\rremote: Resolving deltas: 18% (6/32)\rremote: Resolving deltas: 21% (7/32)\rremote: Resolving deltas: 25% (8/32)\rremote: Resolving deltas: 28% (9/32)\rremote: Resolving deltas: 31% (10/32)\rremote: Resolving deltas: 34% (11/32)\rremote: Resolving deltas: 37% (12/32)\rremote: Resolving deltas: 40% (13/32)\rremote: Resolving deltas: 43% (14/32)\rremote: Resolving deltas: 46% (15/32)\rremote: Resolving deltas: 50% (16/32)\rremote: Resolving deltas: 53% (17/32)\rremote: Resolving deltas: 56% (18/32)\rremote: Resolving deltas: 59% (19/32)\rremote: Resolving deltas: 62% (20/32)\rremote: Resolving deltas: 65% (21/32)\rremote: Resolving deltas: 68% (22/32)\rremote: Resolving deltas: 71% (23/32)\rremote: Resolving deltas: 75% (24/32)\rremote: Resolving deltas: 78% (25/32)\rremote: Resolving deltas: 81% (26/32)\rremote: Resolving deltas: 84% (27/32)\rremote: Resolving deltas: 87% (28/32)\rremote: Resolving deltas: 90% (29/32)\rremote: Resolving deltas: 93% (30/32)\rremote: Resolving deltas: 96% (31/32)\rremote: Resolving deltas: 100% (32/32)\rremote: Resolving deltas: 100% (32/32), completed with 11 local objects.\r\n",,terminal_output +866,940979,"TERMINAL",0,0,"To github.com:p-doom/slurm.git\r\n cf160ad..556e3da main -> main\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +867,973163,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +868,995527,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +869,1002293,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +870,1020043,"TERMINAL",0,0,"git diff",,terminal_command +871,1020087,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +872,1168285,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +873,1216310,"TERMINAL",0,0,"cd ..",,terminal_command +874,1322330,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +875,1350311,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +876,1369702,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +877,1530953,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +878,1536353,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +879,1580461,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_500m_dataset_29489""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +880,1583411,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",378,0,"",shellscript,selection_mouse +881,1583427,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",377,0,"",shellscript,selection_command +882,1584593,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",378,0,"",shellscript,selection_command +883,1584821,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",378,0,"_",shellscript,content +884,1584823,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",379,0,"",shellscript,selection_keyboard +885,1585175,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",379,0,"n",shellscript,content +886,1585176,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",380,0,"",shellscript,selection_keyboard +887,1585347,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",380,0,"o",shellscript,content +888,1585348,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",381,0,"",shellscript,selection_keyboard +889,1585645,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",381,0,"_",shellscript,content +890,1585646,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",382,0,"",shellscript,selection_keyboard +891,1585893,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",382,0,"f",shellscript,content +892,1585893,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",383,0,"",shellscript,selection_keyboard +893,1586006,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",383,0,"l",shellscript,content +894,1586006,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",384,0,"",shellscript,selection_keyboard +895,1586104,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",384,0,"a",shellscript,content +896,1586105,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",385,0,"",shellscript,selection_keyboard +897,1586192,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",385,0,"s",shellscript,content +898,1586193,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",386,0,"",shellscript,selection_keyboard +899,1586335,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",386,0,"h",shellscript,content +900,1586336,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",387,0,"",shellscript,selection_keyboard +901,1586694,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",387,0,"_",shellscript,content +902,1586695,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",388,0,"",shellscript,selection_keyboard +903,1587178,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",388,0,"a",shellscript,content +904,1587179,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",389,0,"",shellscript,selection_keyboard +905,1587366,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",389,0,"t",shellscript,content +906,1587367,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",390,0,"",shellscript,selection_keyboard +907,1587550,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",390,0,"t",shellscript,content +908,1587551,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",391,0,"",shellscript,selection_keyboard +909,1587594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",391,0,"n",shellscript,content +910,1587594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",392,0,"",shellscript,selection_keyboard +911,1589715,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1224,0,"",shellscript,selection_mouse +912,1590063,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1224,0," ",shellscript,content +913,1590064,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1225,0,"",shellscript,selection_keyboard +914,1590272,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1225,0,"a",shellscript,content +915,1590273,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1226,0,"",shellscript,selection_keyboard +916,1590387,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1226,0,"b",shellscript,content +917,1590388,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1227,0,"",shellscript,selection_keyboard +918,1590531,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1227,0,"l",shellscript,content +919,1590532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1228,0,"",shellscript,selection_keyboard +920,1590629,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1228,0,"a",shellscript,content +921,1590630,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1229,0,"",shellscript,selection_keyboard +922,1590835,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1229,0,"t",shellscript,content +923,1590836,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1230,0,"",shellscript,selection_keyboard +924,1590899,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1230,0,"i",shellscript,content +925,1590900,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1231,0,"",shellscript,selection_keyboard +926,1590958,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1231,0,"o",shellscript,content +927,1590959,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1232,0,"",shellscript,selection_keyboard +928,1591142,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1232,0,"n",shellscript,content +929,1591143,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1233,0,"",shellscript,selection_keyboard +930,1591256,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1233,0," ",shellscript,content +931,1591256,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1234,0,"",shellscript,selection_keyboard +932,1591331,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1234,0,"n",shellscript,content +933,1591332,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1235,0,"",shellscript,selection_keyboard +934,1591486,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1235,0,"o",shellscript,content +935,1591487,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1236,0,"",shellscript,selection_keyboard +936,1592388,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1236,0,"-",shellscript,content +937,1592389,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1237,0,"",shellscript,selection_keyboard +938,1592536,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1237,0,"f",shellscript,content +939,1592537,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1238,0,"",shellscript,selection_keyboard +940,1592668,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1238,0,"l",shellscript,content +941,1592669,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1239,0,"",shellscript,selection_keyboard +942,1592743,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1239,0,"a",shellscript,content +943,1592744,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1240,0,"",shellscript,selection_keyboard +944,1592823,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1240,0,"s",shellscript,content +945,1592824,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1241,0,"",shellscript,selection_keyboard +946,1592966,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1241,0,"h",shellscript,content +947,1592967,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1242,0,"",shellscript,selection_keyboard +948,1594089,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1242,0,"-",shellscript,content +949,1594090,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1243,0,"",shellscript,selection_keyboard +950,1594243,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1243,0,"a",shellscript,content +951,1594244,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1244,0,"",shellscript,selection_keyboard +952,1594501,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1244,0,"t",shellscript,content +953,1594502,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1245,0,"",shellscript,selection_keyboard +954,1594666,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1245,0,"t",shellscript,content +955,1594667,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1246,0,"",shellscript,selection_keyboard +956,1594717,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1246,0,"h",shellscript,content +957,1594718,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1247,0,"",shellscript,selection_keyboard +958,1595156,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1247,0,"n",shellscript,content +959,1595157,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1248,0,"",shellscript,selection_keyboard +960,1595657,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1247,1,"",shellscript,content +961,1595792,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1246,1,"",shellscript,content +962,1595992,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1246,0,"n",shellscript,content +963,1595993,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1247,0,"",shellscript,selection_keyboard +964,1601097,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1796,0,"",shellscript,selection_mouse +965,1601491,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1796,0,"\n",shellscript,content +966,1602630,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1797,0,"-",shellscript,content +967,1602631,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1798,0,"",shellscript,selection_keyboard +968,1603107,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1797,1,"",shellscript,content +969,1603276,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1797,0," ",shellscript,content +970,1603610,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1801,0,"-",shellscript,content +971,1603611,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1802,0,"",shellscript,selection_keyboard +972,1603797,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1802,0,"-",shellscript,content +973,1603798,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1803,0,"",shellscript,selection_keyboard +974,1607300,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1803,0,"n",shellscript,content +975,1607302,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1804,0,"",shellscript,selection_keyboard +976,1607597,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1804,0,"o",shellscript,content +977,1607598,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1805,0,"",shellscript,selection_keyboard +978,1608123,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1805,0,"-",shellscript,content +979,1608124,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1806,0,"",shellscript,selection_keyboard +980,1608426,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1806,0,"u",shellscript,content +981,1608427,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1807,0,"",shellscript,selection_keyboard +982,1608496,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1807,0,"s",shellscript,content +983,1608497,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1808,0,"",shellscript,selection_keyboard +984,1608711,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1808,0,"e",shellscript,content +985,1608712,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1809,0,"",shellscript,selection_keyboard +986,1608847,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1809,0,"-",shellscript,content +987,1608848,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1810,0,"",shellscript,selection_keyboard +988,1609182,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1810,0,"f",shellscript,content +989,1609183,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1811,0,"",shellscript,selection_keyboard +990,1609485,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1811,0,"l",shellscript,content +991,1609486,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1812,0,"",shellscript,selection_keyboard +992,1609593,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1812,0,"a",shellscript,content +993,1609594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1813,0,"",shellscript,selection_keyboard +994,1609653,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1813,0,"s",shellscript,content +995,1609653,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1814,0,"",shellscript,selection_keyboard +996,1609815,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1814,0,"h",shellscript,content +997,1609816,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1815,0,"",shellscript,selection_keyboard +998,1610413,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1815,0,"-",shellscript,content +999,1610414,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1816,0,"",shellscript,selection_keyboard +1000,1610596,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1816,0,"a",shellscript,content +1001,1610597,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1817,0,"",shellscript,selection_keyboard +1002,1610774,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1817,0,"t",shellscript,content +1003,1610774,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1818,0,"",shellscript,selection_keyboard +1004,1610892,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1818,0,"t",shellscript,content +1005,1610893,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1819,0,"",shellscript,selection_keyboard +1006,1611030,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1819,0,"e",shellscript,content +1007,1611031,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1820,0,"",shellscript,selection_keyboard +1008,1611139,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1820,0,"n",shellscript,content +1009,1611140,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1821,0,"",shellscript,selection_keyboard +1010,1611333,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1821,0,"t",shellscript,content +1011,1611334,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1822,0,"",shellscript,selection_keyboard +1012,1611407,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1822,0,"i",shellscript,content +1013,1611408,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1823,0,"",shellscript,selection_keyboard +1014,1611517,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1823,0,"o",shellscript,content +1015,1611518,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1824,0,"",shellscript,selection_keyboard +1016,1611639,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1824,0,"n",shellscript,content +1017,1611640,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1825,0,"",shellscript,selection_keyboard +1018,1612586,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1825,0," ",shellscript,content +1019,1612587,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1826,0,"",shellscript,selection_keyboard +1020,1612808,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1826,0,"\",shellscript,content +1021,1612808,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1827,0,"",shellscript,selection_keyboard +1022,1613646,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1826,0,"",shellscript,selection_command +1023,1627610,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1511,0,"",shellscript,selection_mouse +1024,1628794,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1488,110,"",shellscript,content +1025,1630266,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +1026,1632549,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1380,0,"",shellscript,selection_mouse +1027,1634156,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,0,"",shellscript,selection_mouse +1028,1634307,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,1,"/",shellscript,selection_mouse +1029,1634307,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,3,"/fa",shellscript,selection_mouse +1030,1634308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,6,"/fast/",shellscript,selection_mouse +1031,1634308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,9,"/fast/pro",shellscript,selection_mouse +1032,1634308,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,11,"/fast/proje",shellscript,selection_mouse +1033,1634337,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,15,"/fast/project/H",shellscript,selection_mouse +1034,1634338,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,16,"/fast/project/HF",shellscript,selection_mouse +1035,1634354,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,22,"/fast/project/HFMI_Syn",shellscript,selection_mouse +1036,1634370,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,24,"/fast/project/HFMI_Syner",shellscript,selection_mouse +1037,1634399,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,27,"/fast/project/HFMI_SynergyU",shellscript,selection_mouse +1038,1634425,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,30,"/fast/project/HFMI_SynergyUnit",shellscript,selection_mouse +1039,1634467,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_mouse +1040,1634476,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,33,"/fast/project/HFMI_SynergyUnit/ja",shellscript,selection_mouse +1041,1634476,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,35,"/fast/project/HFMI_SynergyUnit/jafa",shellscript,selection_mouse +1042,1634506,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,37,"/fast/project/HFMI_SynergyUnit/jafar_",shellscript,selection_mouse +1043,1634532,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,40,"/fast/project/HFMI_SynergyUnit/jafar_ws/",shellscript,selection_mouse +1044,1634533,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,41,"/fast/project/HFMI_SynergyUnit/jafar_ws/c",shellscript,selection_mouse +1045,1634541,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,43,"/fast/project/HFMI_SynergyUnit/jafar_ws/che",shellscript,selection_mouse +1046,1634554,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,45,"/fast/project/HFMI_SynergyUnit/jafar_ws/check",shellscript,selection_mouse +1047,1634588,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,48,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoi",shellscript,selection_mouse +1048,1634589,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,49,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoin",shellscript,selection_mouse +1049,1634603,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,50,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoint",shellscript,selection_mouse +1050,1634656,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,51,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints",shellscript,selection_mouse +1051,1634657,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,53,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/c",shellscript,selection_mouse +1052,1634657,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,54,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/co",shellscript,selection_mouse +1053,1634703,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coin",shellscript,selection_mouse +1054,1634704,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,57,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinr",shellscript,selection_mouse +1055,1634704,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,111,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""",shellscript,selection_mouse +1056,1635385,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,244,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738/""",shellscript,selection_mouse +1057,1635583,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,243,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738/",shellscript,selection_mouse +1058,1635604,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,241,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_2973",shellscript,selection_mouse +1059,1635632,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,240,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_297",shellscript,selection_mouse +1060,1635699,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,239,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29",shellscript,selection_mouse +1061,1635815,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,240,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_297",shellscript,selection_mouse +1062,1635860,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_2973",shellscript,selection_mouse +1063,1635959,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,109,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736",shellscript,selection_mouse +1064,1636071,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh",1379,110,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/",shellscript,selection_mouse +1065,1640859,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"",shellscript,tab +1066,1642082,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1377,0,"",shellscript,selection_mouse +1067,1644009,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,0,"",shellscript,selection_mouse +1068,1644137,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,2,"/f",shellscript,selection_mouse +1069,1644155,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,5,"/fast",shellscript,selection_mouse +1070,1644170,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,10,"/fast/proj",shellscript,selection_mouse +1071,1644186,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,14,"/fast/project/",shellscript,selection_mouse +1072,1644211,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,21,"/fast/project/HFMI_Sy",shellscript,selection_mouse +1073,1644253,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_mouse +1074,1644254,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,35,"/fast/project/HFMI_SynergyUnit/jafa",shellscript,selection_mouse +1075,1644256,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,42,"/fast/project/HFMI_SynergyUnit/jafar_ws/ch",shellscript,selection_mouse +1076,1644299,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,175,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoint",shellscript,selection_mouse +1077,1644333,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,178,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/c",shellscript,selection_mouse +1078,1644341,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,181,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coin",shellscript,selection_mouse +1079,1644341,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,184,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun",shellscript,selection_mouse +1080,1644382,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,186,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/d",shellscript,selection_mouse +1081,1644388,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,187,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dy",shellscript,selection_mouse +1082,1644389,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,222,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""",shellscript,selection_mouse +1083,1644598,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,108,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,selection_mouse +1084,1645055,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,107,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/",shellscript,selection_mouse +1085,1645089,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,106,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +1086,1645139,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,105,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_2949",shellscript,selection_mouse +1087,1645481,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,106,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490",shellscript,selection_mouse +1088,1646594,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,107,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/",shellscript,selection_mouse +1089,1647451,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,107,"",shellscript,content +1090,1648049,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1379,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/",shellscript,content +1091,1650395,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",1630,0,"",shellscript,selection_mouse +1092,1664797,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1093,1674388,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1094,1675953,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",392,0,"",shellscript,selection_mouse +1095,1675969,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",391,0,"",shellscript,selection_command +1096,1676325,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",392,0,"",shellscript,selection_command +1097,1676718,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",392,0,"_",shellscript,content +1098,1676719,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",393,0,"",shellscript,selection_keyboard +1099,1677008,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",393,0,"n",shellscript,content +1100,1677009,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",394,0,"",shellscript,selection_keyboard +1101,1677151,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",394,0,"o",shellscript,content +1102,1677152,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",395,0,"",shellscript,selection_keyboard +1103,1677479,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",395,0,"_",shellscript,content +1104,1677480,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",396,0,"",shellscript,selection_keyboard +1105,1677816,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",396,0,"m",shellscript,content +1106,1677817,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",397,0,"",shellscript,selection_keyboard +1107,1677912,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",397,0,"i",shellscript,content +1108,1677913,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",398,0,"",shellscript,selection_keyboard +1109,1678062,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",398,0,"x",shellscript,content +1110,1678063,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",399,0,"",shellscript,selection_keyboard +1111,1678312,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",399,0,"e",shellscript,content +1112,1678312,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",400,0,"",shellscript,selection_keyboard +1113,1678422,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",400,0,"d",shellscript,content +1114,1678423,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",401,0,"",shellscript,selection_keyboard +1115,1678560,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",401,0,"_",shellscript,content +1116,1678561,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",402,0,"",shellscript,selection_keyboard +1117,1678819,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",402,0,"p",shellscript,content +1118,1678820,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",403,0,"",shellscript,selection_keyboard +1119,1678956,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",403,0,"r",shellscript,content +1120,1678957,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",404,0,"",shellscript,selection_keyboard +1121,1679098,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",404,0,"e",shellscript,content +1122,1679099,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",405,0,"",shellscript,selection_keyboard +1123,1679189,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",405,0,"c",shellscript,content +1124,1679189,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",406,0,"",shellscript,selection_keyboard +1125,1683181,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn_no_mixed_prec.sh",1734,0,"",shellscript,selection_mouse +1126,1708370,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"",shellscript,tab +1127,1828516,"TERMINAL",0,0,"cd slurm/",,terminal_command +1128,1832570,"TERMINAL",0,0,"git add .",,terminal_command +1129,1832610,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1130,1843712,"TERMINAL",0,0,"git commit -am added flash attn ablation""\n^C",,terminal_command +1131,1851409,"TERMINAL",0,0,"git commit -am ""added flash attn ablation""",,terminal_command +1132,1851463,"TERMINAL",0,0,"]633;C",,terminal_output +1133,1851531,"TERMINAL",0,0,"[main ff91b86] added flash attn ablation\r\n 1 file changed, 71 insertions(+)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1134,1853954,"TERMINAL",0,0,"git push",,terminal_command +1135,1853989,"TERMINAL",0,0,"]633;C",,terminal_output +1136,1855187,"TERMINAL",0,0,"Enumerating objects: 16, done.\r\nCounting objects: 6% (1/16)\rCounting objects: 12% (2/16)\rCounting objects: 18% (3/16)\rCounting objects: 25% (4/16)\rCounting objects: 31% (5/16)\rCounting objects: 37% (6/16)\rCounting objects: 43% (7/16)\rCounting objects: 50% (8/16)\rCounting objects: 56% (9/16)\rCounting objects: 62% (10/16)\rCounting objects: 68% (11/16)\rCounting objects: 75% (12/16)\rCounting objects: 81% (13/16)\rCounting objects: 87% (14/16)\rCounting objects: 93% (15/16)\rCounting objects: 100% (16/16)\rCounting objects: 100% (16/16), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 11% (1/9)\rCompressing objects: 22% (2/9)\rCompressing objects: 33% (3/9)\rCompressing objects: 44% (4/9)\rCompressing objects: 55% (5/9)\rCompressing objects: 66% (6/9)\rCompressing objects: 77% (7/9)\rCompressing objects: 88% (8/9)\rCompressing objects: 100% (9/9)\rCompressing objects: 100% (9/9), done.\r\nWriting objects: 11% (1/9)\rWriting objects: 22% (2/9)\rWriting objects: 33% (3/9)\rWriting objects: 44% (4/9)\rWriting objects: 55% (5/9)\rWriting objects: 66% (6/9)\rWriting objects: 77% (7/9)\rWriting objects: 88% (8/9)\rWriting objects: 100% (9/9)\rWriting objects: 100% (9/9), 1.67 KiB | 853.00 KiB/s, done.\r\nTotal 9 (delta 4), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1137,1855330,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/4)\rremote: Resolving deltas: 25% (1/4)\rremote: Resolving deltas: 50% (2/4)\rremote: Resolving deltas: 75% (3/4)\rremote: Resolving deltas: 100% (4/4)\rremote: Resolving deltas: 100% (4/4), completed with 4 local objects.\r\n",,terminal_output +1138,1855442,"TERMINAL",0,0,"To github.com:p-doom/slurm.git\r\n 556e3da..ff91b86 main -> main\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1139,1884840,"TERMINAL",0,0,"python",,terminal_command +1140,1884921,"TERMINAL",0,0,"]633;CPython 3.9.18 (main, Sep 4 2025, 00:00:00) \r\n[GCC 11.4.1 20231218 (Red Hat 11.4.1-3)] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n>>> ",,terminal_output +1141,1886632,"TERMINAL",0,0,"1",,terminal_output +1142,1886719,"TERMINAL",0,0,"6",,terminal_output +1143,1887407,"TERMINAL",0,0,"0",,terminal_output +1144,1888091,"TERMINAL",0,0,"*",,terminal_output +1145,1888598,"TERMINAL",0,0,"",,terminal_output +1146,1889224,"TERMINAL",0,0,"/",,terminal_output +1147,1889757,"TERMINAL",0,0,"60",,terminal_output +1148,1890076,"TERMINAL",0,0,"\r\n2.6666666666666665\r\n>>> ",,terminal_output +1149,1990423,"TERMINAL",0,0,"\r\n",,terminal_output +1150,1990446,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +1151,12901450,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"Switched from branch 'change-default-to-wsd' to 'seeding-data-generation'",shellscript,git_branch_checkout diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d33d9128-8aa8-4382-a7f1-61cc99198a8e1750839147762-2025_06_25-10.21.30.519/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d33d9128-8aa8-4382-a7f1-61cc99198a8e1750839147762-2025_06_25-10.21.30.519/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..e11f23f3da1b43662230fa8493498b542c476f25 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d33d9128-8aa8-4382-a7f1-61cc99198a8e1750839147762-2025_06_25-10.21.30.519/source.csv @@ -0,0 +1,178 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,2413,"TERMINAL",0,0,"queue",,terminal_command +3,2443,"TERMINAL",0,0,"[?25l[?2004l\r]633;E;watch -n1 squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +4,8529,"TERMINAL",0,0,"squeue --me",,terminal_command +5,8573,"TERMINAL",0,0,"[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +6,17811,"TERMINAL",0,0,"time squeue --me",,terminal_command +7,17844,"TERMINAL",0,0,"[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +8,37681,"utils/dataloader_new.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntry:\n tf.config.experimental.set_visible_devices([], ""GPU"")\nexcept tf.errors.NotFoundError:\n pass\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n # Create a dataset of just the paths (filenames)\n path_dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n breakpoint()\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +9,37682,"utils/dataloader_new.py",3487,0,"",python,selection_mouse +10,38347,"utils/dataloader_new.py",3657,0,"",python,selection_mouse +11,39063,"utils/dataloader_new.py",3526,0,"",python,selection_mouse +12,39065,"utils/dataloader_new.py",3525,0,"",python,selection_command +13,39637,"utils/dataloader_new.py",3487,0,"",python,selection_mouse +14,99817,"utils/dataloader_new.py",4112,0,"",python,selection_mouse +15,100375,"utils/dataloader_new.py",3544,0,"",python,selection_mouse +16,100858,"utils/dataloader_new.py",3487,0,"",python,selection_mouse +17,149529,"utils/dataloader_new.py",4205,0,"",python,selection_mouse +18,149531,"utils/dataloader_new.py",4204,0,"",python,selection_command +19,150257,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntry:\n tf.config.experimental.set_visible_devices([], ""GPU"")\nexcept tf.errors.NotFoundError:\n pass\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +20,153641,"TERMINAL",0,0,"time squeue --me",,terminal_command +21,153696,"TERMINAL",0,0,"[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +22,220337,"TERMINAL",0,0,"time squeue --me",,terminal_command +23,220388,"TERMINAL",0,0,"[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +24,275710,"TERMINAL",0,0,"sbatch scripts/train_tokenizer_overfit_sample.sbatch",,terminal_command +25,275761,"TERMINAL",0,0,"[?25l\r]633;A(jafar) ]0;mahajanm@atcremers51: ~/Projects/jafarmahajanm@atcremers51:~/Projects/jafar$ ]633;Bsbatch scripts/train_tokenizer_overfit_sample.sbatch\r\n[?2004l\r]633;E;sbatch scripts/train_tokenizer_overfit_sample.sbatch;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h",,terminal_output +26,412543,"utils/dataloader.py",0,0,"",python,tab +27,413008,"utils/dataloader_new.py",0,0,"",python,tab +28,413008,"utils/dataloader_new.py",3500,0,"",python,selection_mouse +29,413622,"utils/dataloader_new.py",3487,0,"",python,selection_mouse +30,416146,"utils/dataloader.py",0,0,"",python,tab +31,416147,"utils/dataloader.py",2110,0,"",python,selection_mouse +32,425788,".gitignore",0,0,"*.pyc\n*.npy\n*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt",ignore,tab +33,425789,".gitignore",68,0,"",ignore,selection_command +34,427457,".gitignore",73,0,"",ignore,selection_mouse +35,427465,".gitignore",72,0,"",ignore,selection_command +36,428954,".gitignore",73,0,"\n",ignore,content +37,431180,".gitignore",74,0,"s",ignore,content +38,431181,".gitignore",75,0,"",ignore,selection_keyboard +39,431239,".gitignore",75,0,"l",ignore,content +40,431240,".gitignore",76,0,"",ignore,selection_keyboard +41,431493,".gitignore",76,0,"u",ignore,content +42,431494,".gitignore",77,0,"",ignore,selection_keyboard +43,431628,".gitignore",77,0,"r",ignore,content +44,431629,".gitignore",78,0,"",ignore,selection_keyboard +45,431813,".gitignore",78,0,"m",ignore,content +46,431814,".gitignore",79,0,"",ignore,selection_keyboard +47,432911,".gitignore",79,0,"*",ignore,content +48,432913,".gitignore",80,0,"",ignore,selection_keyboard +49,433224,".gitignore",80,0,".",ignore,content +50,433226,".gitignore",81,0,"",ignore,selection_keyboard +51,433442,".gitignore",81,0,"o",ignore,content +52,433443,".gitignore",82,0,"",ignore,selection_keyboard +53,433656,".gitignore",82,0,"u",ignore,content +54,433657,".gitignore",83,0,"",ignore,selection_keyboard +55,433856,".gitignore",83,0,"t",ignore,content +56,433857,".gitignore",84,0,"",ignore,selection_keyboard +57,433984,".gitignore",84,0,"\n",ignore,content +58,438435,".gitignore",85,0,"d",ignore,content +59,438437,".gitignore",86,0,"",ignore,selection_keyboard +60,438616,".gitignore",86,0,"a",ignore,content +61,438617,".gitignore",87,0,"",ignore,selection_keyboard +62,438716,".gitignore",87,0,"t",ignore,content +63,438718,".gitignore",88,0,"",ignore,selection_keyboard +64,438807,".gitignore",88,0,"a",ignore,content +65,438808,".gitignore",89,0,"",ignore,selection_keyboard +66,439038,".gitignore",89,0,"\n",ignore,content +67,439447,".gitignore",90,0,"d",ignore,content +68,439448,".gitignore",91,0,"",ignore,selection_keyboard +69,439656,".gitignore",91,0,"a",ignore,content +70,439657,".gitignore",92,0,"",ignore,selection_keyboard +71,439748,".gitignore",92,0,"t",ignore,content +72,439749,".gitignore",93,0,"",ignore,selection_keyboard +73,439893,".gitignore",93,0,"a",ignore,content +74,439893,".gitignore",94,0,"",ignore,selection_keyboard +75,440360,".gitignore",94,0,"_",ignore,content +76,440361,".gitignore",95,0,"",ignore,selection_keyboard +77,441127,".gitignore",95,0,"t",ignore,content +78,441128,".gitignore",96,0,"",ignore,selection_keyboard +79,441589,".gitignore",96,0,"f",ignore,content +80,441589,".gitignore",97,0,"",ignore,selection_keyboard +81,442445,".gitignore",97,0,"r",ignore,content +82,442446,".gitignore",98,0,"",ignore,selection_keyboard +83,442647,".gitignore",98,0,"e",ignore,content +84,442647,".gitignore",99,0,"",ignore,selection_keyboard +85,442812,".gitignore",99,0,"c",ignore,content +86,442813,".gitignore",100,0,"",ignore,selection_keyboard +87,442939,".gitignore",100,0,"o",ignore,content +88,442940,".gitignore",101,0,"",ignore,selection_keyboard +89,443066,".gitignore",101,0,"r",ignore,content +90,443067,".gitignore",102,0,"",ignore,selection_keyboard +91,443268,".gitignore",102,0,"d",ignore,content +92,443269,".gitignore",103,0,"",ignore,selection_keyboard +93,443390,".gitignore",103,0,"s",ignore,content +94,443391,".gitignore",104,0,"",ignore,selection_keyboard +95,445534,".gitignore",104,0,"\n",ignore,content +96,448327,".gitignore",105,0,"l",ignore,content +97,448329,".gitignore",106,0,"",ignore,selection_keyboard +98,448514,".gitignore",106,0,"o",ignore,content +99,448516,".gitignore",107,0,"",ignore,selection_keyboard +100,449427,".gitignore",107,0,"g",ignore,content +101,449429,".gitignore",108,0,"",ignore,selection_keyboard +102,449520,".gitignore",108,0,"s",ignore,content +103,449523,".gitignore",109,0,"",ignore,selection_keyboard +104,450133,".gitignore",109,0,"\n",ignore,content +105,459064,".gitignore",110,0,"s",ignore,content +106,459066,".gitignore",111,0,"",ignore,selection_keyboard +107,459282,".gitignore",111,0,"c",ignore,content +108,459283,".gitignore",112,0,"",ignore,selection_keyboard +109,459506,".gitignore",112,0,"r",ignore,content +110,459507,".gitignore",113,0,"",ignore,selection_keyboard +111,459696,".gitignore",113,0,"u",ignore,content +112,459697,".gitignore",114,0,"",ignore,selection_keyboard +113,460010,".gitignore",114,0,"t",ignore,content +114,460011,".gitignore",115,0,"",ignore,selection_keyboard +115,460236,".gitignore",114,1,"",ignore,content +116,460364,".gitignore",113,1,"",ignore,content +117,460529,".gitignore",113,0,"i",ignore,content +118,460531,".gitignore",114,0,"",ignore,selection_keyboard +119,460757,".gitignore",114,0,"p",ignore,content +120,460758,".gitignore",115,0,"",ignore,selection_keyboard +121,460904,".gitignore",115,0,"t",ignore,content +122,460905,".gitignore",116,0,"",ignore,selection_keyboard +123,461098,".gitignore",116,0,"s",ignore,content +124,461099,".gitignore",117,0,"",ignore,selection_keyboard +125,468232,"utils/dataloader.py",0,0,"",python,tab +126,468284,"utils/dataloader.py",2726,0,"",python,selection_command +127,472265,".gitignore",0,0,"",ignore,tab +128,946611,"scripts/batch_sizes.md",0,0,"## Batchsizes for jafar with Minecraft dataset\n\n### Tokenizer\n| GPU VRAM | Batch Size | Learning Rate |\n|----------|------------|---------------|\n| - | 1 | 4.3e-5 | \n| 12 GB | 6 | 1e-4 | \n| 24 GB | 12 | 1.5e-4 | ?? \n| 40 GB | 12 | |\n| 48 GB | 24 | | ?? why does this work on cremers\n| 80 GB | 48 | 3e-4 |\n\n### LAM\n| GPU VRAM | Batch Size | Learning Rate |\n|----------|------------|---------------|\n| - | 1 | 5e-6/5e-7 | \n| 12 GB | 6 | | \n| 24 GB | 12 | |\n| 48 GB | 24 | |\n| - | 36 | 3e-5/3e-6 |\n| 80 GB | 48 | |\n",markdown,tab +129,965032,"scripts/train_tokenizer_overfit_batch.sbatch",0,0,"#!/bin/bash\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:30:00\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1,VRAM:24G\n#SBATCH --mem=50G\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_minecraft_overfit_batch\n\n# Log the sbatch script\ncat $0\n\ntf_records_dir=""/storage/user/mahajanm/Projects/world-modeling/knoms_tfrecords_500/""\nws_dir='/storage/user/mahajanm/Projects/world-modeling'\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nSLURM_STEP_NODELIST=$SLURM_NODELIST python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +130,1334999,"scripts/batch_sizes.md",0,0,"",markdown,tab +131,1767929,"TERMINAL",0,0,"queue",,terminal_command +132,1767986,"TERMINAL",0,0,"\r\n[?2004l\r]633;E;watch -n1 squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C",,terminal_output +133,1805007,"TERMINAL",0,0,"[?1049h(B[?7h]4;8;rgb:54/54/54\]4;9;rgb:FF/54/54\]4;10;rgb:54/FF/54\]4;11;rgb:FF/FF/54\]4;12;rgb:54/54/FF\]4;13;rgb:FF/54/FF\]4;14;rgb:54/FF/FF\(BEvery 1.0s: squeue --meatcremers51: Wed Jun 25 10:50:58 2025slurm_load_jobs error: Slurm backup controller in standby mode(B",,terminal_output +134,1829578,".gitignore",0,0,"",ignore,tab +135,1830784,".gitignore",98,19,"ecords\nlogs\nscripts",ignore,selection_mouse +136,1830831,".gitignore",73,44,"\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +137,1830832,".gitignore",50,67,"ndb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +138,1830833,".gitignore",25,92,"wandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +139,1830844,".gitignore",24,93,"\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +140,1830891,".gitignore",18,99,"*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +141,1830892,".gitignore",12,105,"*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +142,1830896,".gitignore",6,111,"*.npy\n*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +143,1830945,".gitignore",1,116,".pyc\n*.npy\n*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +144,1830992,".gitignore",0,117,"*.pyc\n*.npy\n*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +145,1832428,".gitignore",89,0,"",ignore,selection_mouse +146,1833143,".gitignore",117,0,"",ignore,selection_mouse +147,1833305,".gitignore",109,8,"\nscripts",ignore,selection_mouse +148,1833330,".gitignore",94,23,"_tfrecords\nlogs\nscripts",ignore,selection_mouse +149,1833380,".gitignore",88,29,"a\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +150,1833381,".gitignore",76,41,"urm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +151,1833431,".gitignore",75,42,"lurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +152,1833531,".gitignore",69,48,"ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +153,1833581,".gitignore",68,49,"*ckpt\nslurm*.out\ndata\ndata_tfrecords\nlogs\nscripts",ignore,selection_mouse +154,2047870,"utils/dataloader_new.py",0,0,"",python,tab +155,2047872,"utils/dataloader_new.py",3023,0,"",python,selection_mouse +156,2053549,"utils/dataloader.py",0,0,"",python,tab +157,2053567,"utils/dataloader.py",2726,0,"",python,selection_command +158,2060763,"utils/dataloader.py",0,0,"",python,tab +159,2062569,"utils/dataloader_new.py",0,0,"",python,tab +160,2065700,"train_tokenizer.py",0,0,"from dataclasses import dataclass\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(entity=args.entity, project=args.project, group=""debug"", config=args)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +161,2065786,"train_tokenizer.py",6527,0,"",python,selection_command +162,2067915,"utils/dataloader_new.py",0,0,"",python,tab +163,2077118,"train_tokenizer.py",0,0,"",python,tab +164,2091508,"train_tokenizer.py",6721,0,"",python,selection_mouse +165,2091509,"train_tokenizer.py",6720,0,"",python,selection_command +166,2091620,"train_tokenizer.py",6720,1,":",python,selection_mouse +167,2091620,"train_tokenizer.py",6721,0,"",python,selection_command +168,2091634,"train_tokenizer.py",6679,42,"ape: "", videos.shape)\n while(True):",python,selection_mouse +169,2091648,"train_tokenizer.py",6676,45," shape: "", videos.shape)\n while(True):",python,selection_mouse +170,2091698,"train_tokenizer.py",6673,48,"tch shape: "", videos.shape)\n while(True):",python,selection_mouse +171,2091699,"train_tokenizer.py",6632,89,"eos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +172,2091699,"train_tokenizer.py",6629,92,"videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +173,2091748,"train_tokenizer.py",6625,96," videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +174,2091749,"train_tokenizer.py",6624,97," videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +175,2091749,"train_tokenizer.py",6564,157," npy_path = ""overfit_dir/single_sample_corner.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +176,2091800,"train_tokenizer.py",6563,158," npy_path = ""overfit_dir/single_sample_corner.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +177,2091939,"train_tokenizer.py",6527,194," # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):",python,selection_mouse +178,2212633,"train_tokenizer.py",6818,0,"",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f4829211-7733-466c-a3b6-7433cf5dda121753358379439-2025_07_24-14.00.14.771/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f4829211-7733-466c-a3b6-7433cf5dda121753358379439-2025_07_24-14.00.14.771/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..77a2c0b303993361cb033baf4bcdc533ae43013e --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f4829211-7733-466c-a3b6-7433cf5dda121753358379439-2025_07_24-14.00.14.771/source.csv @@ -0,0 +1,7923 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,356,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:00:14 PM [info] Activating crowd-code\n2:00:14 PM [info] Recording started\n2:00:14 PM [info] Initializing git provider using file system watchers...\n2:00:14 PM [info] Git repository found\n2:00:14 PM [info] Git provider initialized successfully\n",Log,tab +3,416,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"2:00:15 PM [info] Initial git state: [object Object]\n",Log,content +4,45476,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +5,45504,"TERMINAL",0,0,"]633;E;2025-07-24 14:01:00 source .venv/bin/activate;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +6,47131,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,tab +7,47976,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",972,0,"",shellscript,selection_mouse +8,47978,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",971,0,"",shellscript,selection_command +9,48498,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",922,0,"",shellscript,selection_mouse +10,52629,"TERMINAL",0,0,"runner-2",,terminal_command +11,52657,"TERMINAL",0,0,"]633;E;2025-07-24 14:01:07 runner-2;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +12,55720,"TERMINAL",0,0,"sync-runner-2",,terminal_command +13,55753,"TERMINAL",0,0,"]633;E;2025-07-24 14:01:10 sync-runner-2;406cfb31-2341-454a-afa8-cae7781806b2]633;Csending incremental file list\r\n",,terminal_output +14,56037,"TERMINAL",0,0,"./\r\n",,terminal_output +15,56966,"TERMINAL",0,0,"\r\nsent 24,558 bytes received 148 bytes 16,470.67 bytes/sec\r\ntotal size is 185,127,089 speedup is 7,493.20\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +16,59411,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +17,60996,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",888,0,"",shellscript,selection_mouse +18,61124,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",880,27,"train_tokenizer_lr_sweep_1e",shellscript,selection_mouse +19,61271,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",788,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +20,70250,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",702,0,"",shellscript,selection_mouse +21,81658,"TERMINAL",0,0,"git log",,terminal_command +22,84971,"TERMINAL",0,0,"cd ..",,terminal_command +23,85024,"TERMINAL",0,0,"]633;E;2025-07-24 14:01:39 cd ..;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects",,terminal_output +24,85842,"TERMINAL",0,0,"cd jafar",,terminal_command +25,87171,"TERMINAL",0,0,"git log",,terminal_command +26,87253,"TERMINAL",0,0,"]633;E;2025-07-24 14:01:41 git log;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1h=\rcommit 23a915871996191fb40f3bf8ef0d91d6066d37d9 (HEAD -> new-arch-sampling)\r\nMerge: 3777e6d e296267\r\nAuthor: Mihir Mahajan \r\nDate: Thu Jul 24 12:55:21 2025 +0200\r\n\r\n merged main into here\r\n\r\ncommit 3777e6dc7cc8a5d6274aa33de39412daa1f67358\r\nAuthor: Mihir Mahajan \r\nDate: Thu Jul 24 12:49:16 2025 +0200\r\n\r\n uncommented all the commented out code\r\n\r\ncommit e296267fd3bdd400b720d8d6568b1583eda675dd (origin/main, origin/HEAD, main)\r\nMerge: 14e6c02 bff38fa\r\nAuthor: mihir <78321484+maharajamihir@users.noreply.github.com>\r\nDate: Thu Jul 24 11:09:41 2025 +0200\r\n\r\n Merge pull request #95 from p-doom/fix-dtype-hint-warning\r\n \r\n chore: infer dtype type annotation (fix warning)\r\n\r\ncommit 14e6c02749f08f8465e5cc9f0fc757a6d5f52c7e\r\nAuthor: mihir <78321484+maharajamihir@users.noreply.github.com>\r\nDate: Thu Jul 24 11:05:37 2025 +0200\r\n\r\n fix: removed dataloader from restore genie component args (#102)\r\n\r\ncommit c1ea8b738f284348612a460482cd29f537544222\r\nAuthor: mihir <78321484+maharajamihir@users.noreply.github.com>\r\nDate: Thu Jul 24 11:04:18 2025 +0200\r\n\r\n feat: different maskprob per sample for maskgit training (#101)\r\n \r\n * feat: different maskprob per sample for maskgit training\r\n \r\n Co-authored-by: emergenz \r\n\r\ncommit 52cd7c0b40c5337371b45cbf1bfc79191cb7f9e9 (origin/new-arch-sampling)\r\nAuthor: Mihir Mahajan \r\nDate: Wed Jul 23 11:50:06 2025 +0200\r\n\r\n sync dev branch\r\n\r\ncommit f8d891ef074e46273af808b111141d1448d421a0\r\nAuthor: Alfred Nguyen <85162596+avocadoali@users.noreply.github.com>\r\nDate: Tue Jul 22 17:33:30 2025 +0200\r\n:",,terminal_output +27,88942,"TERMINAL",0,0,"\r\r:",,terminal_output +28,89267,"TERMINAL",0,0,"\r\r:",,terminal_output +29,89471,"TERMINAL",0,0,"\r\r:",,terminal_output +30,89649,"TERMINAL",0,0,"\r\r:",,terminal_output +31,89831,"TERMINAL",0,0,"\r\r:",,terminal_output +32,89930,"TERMINAL",0,0,"\r\r:",,terminal_output +33,90287,"TERMINAL",0,0,"\r\r:",,terminal_output +34,90453,"TERMINAL",0,0,"\r\r:",,terminal_output +35,93808,"TERMINAL",0,0,"\r\r\n:",,terminal_output +36,94627,"TERMINAL",0,0,"\r chore: run pre-commit on codebase (#100)\r\n:\r\r\n:\rcommit b2196a70f95136aaf4663976f585611c3b518fa9\r\n:\rAuthor: Mihir Mahajan \r\n:\rDate: Tue Jul 22 17:18:59 2025 +0200\r\n:\r\r\n:\r add some more hacky shit\r\n:\r\r\n:\rcommit 8cbd77ee852d7c760d0882d41b0588450bb08a64\r\n:\rAuthor: Mihir Mahajan \r\n:\rDate: Tue Jul 22 11:56:17 2025 +0200\r\n:\r\r\n:\r loaded npy file in int8 to emulate dataloader\r\n:",,terminal_output +37,94906,"TERMINAL",0,0,"\r\r\n:",,terminal_output +38,95717,"TERMINAL",0,0,"\rcommit 93eb1b19c8f960525a0b26234483a61b250ea441\r\n:\rAuthor: Mihir Mahajan \r\n:\rDate: Mon Jul 21 18:53:16 2025 +0200\r\n:\r\r\n:\r reverted scaling of input images\r\n:\r\r\n:\rcommit 8c88319cc4b92fde56e4e4c5781cb81761b431c3\r\n:\rAuthor: Mihir Mahajan \r\n:\rDate: Mon Jul 21 17:37:16 2025 +0200\r\n:\r\r\n:\r reverted back sampling from overfit run\r\n:\r\r\n:\rcommit 371f8a9c06dabcf0972d90da3716a0c131808a51\r\n:",,terminal_output +39,95784,"TERMINAL",0,0,"\rAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\n:",,terminal_output +40,96468,"TERMINAL",0,0,"\rDate: Mon Jul 21 17:33:28 2025 +0200\r\n:",,terminal_output +41,97031,"TERMINAL",0,0,"\r\r\n:\r feat: distinct `ffn_dim` (#90)\r\n:\r \r\n:\r * feat: use flash attention\r\n:",,terminal_output +42,97167,"TERMINAL",0,0,"\r \r\n:",,terminal_output +43,97408,"TERMINAL",0,0,"\r * fix: rearrange qkv for jax.nn.dot_product_attention\r\n:",,terminal_output +44,97960,"TERMINAL",0,0,"\r \r\n:\r * chore: bump required jax version due to upstream bugfix\r\n:\r \r\n:",,terminal_output +45,98133,"TERMINAL",0,0,"\r * feat: pad seq_len to multiple of 4 as per cudnn FA requirement\r\n:",,terminal_output +46,98569,"TERMINAL",0,0,"\r \r\n:",,terminal_output +47,98807,"TERMINAL",0,0,"\r * feat: use ffn_dim=4x model_dim for increased arithmetic intensity\r\n:",,terminal_output +48,98996,"TERMINAL",0,0,"\r\r\n:",,terminal_output +49,99151,"TERMINAL",0,0,"\rcommit e0471d7876c01f8311010f5ee3233ca5bc5b2d27\r\n:",,terminal_output +50,99366,"TERMINAL",0,0,"\rAuthor: Mihir Mahajan \r\n:",,terminal_output +51,99487,"TERMINAL",0,0,"\rDate: Mon Jul 21 16:35:35 2025 +0200\r\n:",,terminal_output +52,99719,"TERMINAL",0,0,"\r\r\n:",,terminal_output +53,99899,"TERMINAL",0,0,"\r sampling on overfitting run works partially; still some artifacts when sampling 2 frames\r\n:",,terminal_output +54,100075,"TERMINAL",0,0,"\r\r\n:",,terminal_output +55,100388,"TERMINAL",0,0,"\rcommit 452028feacb27808efafb4e71be0d312fc1af6b5\r\n:",,terminal_output +56,100647,"TERMINAL",0,0,"\rAuthor: Mihir Mahajan \r\n:",,terminal_output +57,100886,"TERMINAL",0,0,"\rDate: Mon Jul 21 15:40:13 2025 +0200\r\n:",,terminal_output +58,104918,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +59,107842,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n ffn_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n batch_size = vid_embed.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n ffn_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n self.use_flash_attention,\n spatial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n\n # FIXME mihir: HACK\n # rng1, _rng = jax.random.split(batch[""mask_rng""])\n # noise = jax.random.normal(_rng, vid_embed_padded.shape)\n # logits = self.dynamics(noise)[:, :, :-1]\n\n rng1, _rng = jax.random.split(batch[""mask_rng""])\n noise = 0.25 * jax.random.normal(_rng, vid_embed_padded.shape)\n logits = self.dynamics(vid_embed_padded + noise)[:, :, :-1]\n\n # logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)\n",python,tab +60,112301,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +61,115045,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_255M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,tab +62,122740,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_180M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-180M-$slurm_job_id \\n --tags dynamics causal 180M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=768 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=12 \\n --dyna_ffn_dim=3072",shellscript,tab +63,123992,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +64,124570,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_500M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-500M-$slurm_job_id \\n --tags dynamics causal 500M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1536 \\n --dyna_num_blocks=24 \\n --dyna_num_heads=24 \\n --dyna_ffn_dim=6144",shellscript,tab +65,125866,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_356M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-356M-$slurm_job_id \\n --tags dynamics causal 356M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=24 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,tab +66,131570,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:]\n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +67,135709,"train_dynamics.py",1188,0,"",python,selection_mouse +68,136207,"train_dynamics.py",1225,0,"",python,selection_mouse +69,136222,"train_dynamics.py",1224,0,"",python,selection_command +70,137208,"train_dynamics.py",1137,89,"",python,content +71,137230,"train_dynamics.py",1141,0,"",python,selection_command +72,137433,"train_dynamics.py",1112,0,"",python,selection_command +73,137987,"train_dynamics.py",1108,29,"",python,content +74,137990,"train_dynamics.py",1112,0,"",python,selection_command +75,138166,"train_dynamics.py",1106,0,"",python,selection_command +76,138390,"train_dynamics.py",1112,0,"",python,selection_command +77,138700,"train_dynamics.py",1141,0,"",python,selection_command +78,139004,"train_dynamics.py",1201,0,"",python,selection_command +79,139517,"train_dynamics.py",1217,0,"",python,selection_command +80,139755,"train_dynamics.py",1246,0,"",python,selection_command +81,140244,"train_dynamics.py",1280,0,"",python,selection_command +82,140272,"train_dynamics.py",1311,0,"",python,selection_command +83,140298,"train_dynamics.py",1345,0,"",python,selection_command +84,140377,"train_dynamics.py",1369,0,"",python,selection_command +85,140424,"train_dynamics.py",1403,0,"",python,selection_command +86,140426,"train_dynamics.py",1436,0,"",python,selection_command +87,140464,"train_dynamics.py",1471,0,"",python,selection_command +88,140481,"train_dynamics.py",1481,0,"",python,selection_command +89,140482,"train_dynamics.py",1504,0,"",python,selection_command +90,140543,"train_dynamics.py",1532,0,"",python,selection_command +91,140545,"train_dynamics.py",1564,0,"",python,selection_command +92,140608,"train_dynamics.py",1596,0,"",python,selection_command +93,141478,"train_dynamics.py",1625,0,"",python,selection_command +94,141980,"train_dynamics.py",1653,0,"",python,selection_command +95,141995,"train_dynamics.py",1680,0,"",python,selection_command +96,142030,"train_dynamics.py",1709,0,"",python,selection_command +97,142046,"train_dynamics.py",1724,0,"",python,selection_command +98,142092,"train_dynamics.py",1748,0,"",python,selection_command +99,142111,"train_dynamics.py",1777,0,"",python,selection_command +100,142137,"train_dynamics.py",1806,0,"",python,selection_command +101,142207,"train_dynamics.py",1834,0,"",python,selection_command +102,142208,"train_dynamics.py",1859,0,"",python,selection_command +103,142237,"train_dynamics.py",1887,0,"",python,selection_command +104,142263,"train_dynamics.py",1917,0,"",python,selection_command +105,142294,"train_dynamics.py",1958,0,"",python,selection_command +106,142354,"train_dynamics.py",1994,0,"",python,selection_command +107,142355,"train_dynamics.py",2031,0,"",python,selection_command +108,142397,"train_dynamics.py",2045,0,"",python,selection_command +109,142431,"train_dynamics.py",2067,0,"",python,selection_command +110,142462,"train_dynamics.py",2088,0,"",python,selection_command +111,143419,"train_dynamics.py",2110,0,"",python,selection_command +112,143920,"train_dynamics.py",2143,0,"",python,selection_command +113,143944,"train_dynamics.py",2209,0,"",python,selection_command +114,143988,"train_dynamics.py",2235,0,"",python,selection_command +115,144001,"train_dynamics.py",2269,0,"",python,selection_command +116,144032,"train_dynamics.py",2292,0,"",python,selection_command +117,144252,"train_dynamics.py",2333,0,"",python,selection_command +118,144644,"train_dynamics.py",2377,0,"",python,selection_command +119,144871,"train_dynamics.py",2333,0,"",python,selection_command +120,145364,"train_dynamics.py",2292,0,"",python,selection_command +121,145430,"train_dynamics.py",2269,0,"",python,selection_command +122,145431,"train_dynamics.py",2235,0,"",python,selection_command +123,145438,"train_dynamics.py",2209,0,"",python,selection_command +124,145486,"train_dynamics.py",2143,0,"",python,selection_command +125,145506,"train_dynamics.py",2110,0,"",python,selection_command +126,145556,"train_dynamics.py",2088,0,"",python,selection_command +127,145557,"train_dynamics.py",2067,0,"",python,selection_command +128,145593,"train_dynamics.py",2045,0,"",python,selection_command +129,145616,"train_dynamics.py",2031,0,"",python,selection_command +130,145694,"train_dynamics.py",1994,0,"",python,selection_command +131,145700,"train_dynamics.py",1958,0,"",python,selection_command +132,145706,"train_dynamics.py",1917,0,"",python,selection_command +133,145735,"train_dynamics.py",1887,0,"",python,selection_command +134,145779,"train_dynamics.py",1859,0,"",python,selection_command +135,145801,"train_dynamics.py",1834,0,"",python,selection_command +136,145840,"train_dynamics.py",1806,0,"",python,selection_command +137,145875,"train_dynamics.py",1777,0,"",python,selection_command +138,145893,"train_dynamics.py",1748,0,"",python,selection_command +139,145942,"train_dynamics.py",1724,0,"",python,selection_command +140,145958,"train_dynamics.py",1709,0,"",python,selection_command +141,146032,"train_dynamics.py",1680,0,"",python,selection_command +142,146037,"train_dynamics.py",1653,0,"",python,selection_command +143,146040,"train_dynamics.py",1625,0,"",python,selection_command +144,146077,"train_dynamics.py",1596,0,"",python,selection_command +145,146145,"train_dynamics.py",1564,0,"",python,selection_command +146,146146,"train_dynamics.py",1532,0,"",python,selection_command +147,146159,"train_dynamics.py",1504,0,"",python,selection_command +148,146198,"train_dynamics.py",1481,0,"",python,selection_command +149,146272,"train_dynamics.py",1471,0,"",python,selection_command +150,146272,"train_dynamics.py",1436,0,"",python,selection_command +151,146289,"train_dynamics.py",1403,0,"",python,selection_command +152,146314,"train_dynamics.py",1369,0,"",python,selection_command +153,146389,"train_dynamics.py",1345,0,"",python,selection_command +154,146391,"train_dynamics.py",1311,0,"",python,selection_command +155,146407,"train_dynamics.py",1280,0,"",python,selection_command +156,146443,"train_dynamics.py",1246,0,"",python,selection_command +157,146602,"train_dynamics.py",1217,0,"",python,selection_command +158,147136,"train_dynamics.py",1201,0,"",python,selection_command +159,147137,"train_dynamics.py",1141,0,"",python,selection_command +160,147171,"train_dynamics.py",1112,0,"",python,selection_command +161,147200,"train_dynamics.py",1106,0,"",python,selection_command +162,147230,"train_dynamics.py",1023,0,"",python,selection_command +163,147308,"train_dynamics.py",994,0,"",python,selection_command +164,147326,"train_dynamics.py",967,0,"",python,selection_command +165,147338,"train_dynamics.py",942,0,"",python,selection_command +166,147346,"train_dynamics.py",917,0,"",python,selection_command +167,147418,"train_dynamics.py",892,0,"",python,selection_command +168,147419,"train_dynamics.py",873,0,"",python,selection_command +169,147441,"train_dynamics.py",842,0,"",python,selection_command +170,147553,"train_dynamics.py",873,0,"",python,selection_command +171,148052,"train_dynamics.py",892,0,"",python,selection_command +172,148064,"train_dynamics.py",917,0,"",python,selection_command +173,148099,"train_dynamics.py",942,0,"",python,selection_command +174,148173,"train_dynamics.py",967,0,"",python,selection_command +175,148176,"train_dynamics.py",994,0,"",python,selection_command +176,148186,"train_dynamics.py",1023,0,"",python,selection_command +177,148219,"train_dynamics.py",1106,0,"",python,selection_command +178,148279,"train_dynamics.py",1112,0,"",python,selection_command +179,148280,"train_dynamics.py",1141,0,"",python,selection_command +180,148308,"train_dynamics.py",1201,0,"",python,selection_command +181,148367,"train_dynamics.py",1217,0,"",python,selection_command +182,148513,"train_dynamics.py",1246,0,"",python,selection_command +183,151575,"train_dynamics.py",1263,0,"",python,selection_command +184,151818,"train_dynamics.py",1246,0,"",python,selection_command +185,152042,"train_dynamics.py",1263,0,"",python,selection_command +186,152146,"train_dynamics.py",1246,0,"",python,selection_command +187,152299,"train_dynamics.py",1263,0,"",python,selection_command +188,152407,"train_dynamics.py",1246,0,"",python,selection_command +189,152568,"train_dynamics.py",1263,0,"",python,selection_command +190,152684,"train_dynamics.py",1246,0,"",python,selection_command +191,152843,"train_dynamics.py",1263,0,"",python,selection_command +192,152953,"train_dynamics.py",1246,0,"",python,selection_command +193,153088,"train_dynamics.py",1263,0,"",python,selection_command +194,153205,"train_dynamics.py",1246,0,"",python,selection_command +195,153319,"train_dynamics.py",1263,0,"",python,selection_command +196,153463,"train_dynamics.py",1246,0,"",python,selection_command +197,153565,"train_dynamics.py",1263,0,"",python,selection_command +198,153669,"train_dynamics.py",1246,0,"",python,selection_command +199,153785,"train_dynamics.py",1263,0,"",python,selection_command +200,153896,"train_dynamics.py",1246,0,"",python,selection_command +201,153990,"train_dynamics.py",1263,0,"",python,selection_command +202,154131,"train_dynamics.py",1246,0,"",python,selection_command +203,154201,"train_dynamics.py",1263,0,"",python,selection_command +204,154365,"train_dynamics.py",1246,0,"",python,selection_command +205,154423,"train_dynamics.py",1263,0,"",python,selection_command +206,154535,"train_dynamics.py",1246,0,"",python,selection_command +207,154608,"train_dynamics.py",1263,0,"",python,selection_command +208,154777,"train_dynamics.py",1246,0,"",python,selection_command +209,154847,"train_dynamics.py",1263,0,"",python,selection_command +210,154950,"train_dynamics.py",1246,0,"",python,selection_command +211,155028,"train_dynamics.py",1263,0,"",python,selection_command +212,155147,"train_dynamics.py",1246,0,"",python,selection_command +213,155261,"train_dynamics.py",1263,0,"",python,selection_command +214,155323,"train_dynamics.py",1246,0,"",python,selection_command +215,155466,"train_dynamics.py",1263,0,"",python,selection_command +216,155522,"train_dynamics.py",1246,0,"",python,selection_command +217,155715,"train_dynamics.py",1263,0,"",python,selection_command +218,155732,"train_dynamics.py",1246,0,"",python,selection_command +219,155948,"train_dynamics.py",1238,0,"",python,selection_command +220,155992,"train_dynamics.py",1246,0,"",python,selection_command +221,156222,"train_dynamics.py",1238,0,"",python,selection_command +222,156388,"train_dynamics.py",1246,0,"",python,selection_command +223,156565,"train_dynamics.py",1238,0,"",python,selection_command +224,156942,"train_dynamics.py",1236,0,"",python,selection_command +225,157390,"train_dynamics.py",1238,0,"",python,selection_command +226,157688,"train_dynamics.py",1246,0,"",python,selection_command +227,158159,"train_dynamics.py",1238,0,"",python,selection_command +228,158541,"train_dynamics.py",1246,0,"",python,selection_command +229,158770,"train_dynamics.py",1263,0,"",python,selection_command +230,159045,"train_dynamics.py",1246,0,"",python,selection_command +231,159249,"train_dynamics.py",1263,0,"",python,selection_command +232,159353,"train_dynamics.py",1246,0,"",python,selection_command +233,159549,"train_dynamics.py",1263,0,"",python,selection_command +234,159637,"train_dynamics.py",1246,0,"",python,selection_command +235,181225,"TERMINAL",0,0,"idling",,terminal_command +236,181276,"TERMINAL",0,0,"]633;E;2025-07-24 14:03:15 idling;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +237,181341,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Jul 24 14:03:15 2025Partition dev_cpuonly:\t 3 nodes idle\rPartition cpuonly: 27 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +238,182354,"TERMINAL",0,0,"7",,terminal_output +239,183403,"TERMINAL",0,0,"8",,terminal_output +240,183854,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +241,185271,"TERMINAL",0,0,"idling",,terminal_command +242,185328,"TERMINAL",0,0,"]633;E;2025-07-24 14:03:19 idling;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Jul 24 14:03:19 2025Partition dev_cpuonly:\t 3 nodes idle\rPartition cpuonly: 27 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +243,186362,"TERMINAL",0,0,"21",,terminal_output +244,187408,"TERMINAL",0,0,"2",,terminal_output +245,187898,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +246,205780,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +247,205845,"TERMINAL",0,0,"]633;E;2025-07-24 14:03:40 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;406cfb31-2341-454a-afa8-cae7781806b2]633;Csalloc: Pending job allocation 3373090\r\nsalloc: job 3373090 queued and waiting for resources\r\n",,terminal_output +248,237348,"TERMINAL",0,0,"salloc: job 3373090 has been allocated resources\r\nsalloc: Granted job allocation 3373090\r\n",,terminal_output +249,237462,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +250,264504,"TERMINAL",0,0,"salloc: Nodes hkn0403 are ready for job\r\n",,terminal_output +251,265282,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h[tum_cte0515@hkn0403 jafar]$ ",,terminal_output +252,307553,"TERMINAL",0,0,"s",,terminal_output +253,307715,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +254,307779,"TERMINAL",0,0,"[?25lu[?25h[?25lr[?25h",,terminal_output +255,308062,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +256,308176,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +257,308281,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +258,308362,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +259,308474,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +260,308628,"TERMINAL",0,0,"env/",,terminal_output +261,308751,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +262,308864,"TERMINAL",0,0,"in/",,terminal_output +263,309164,"TERMINAL",0,0,"[?25lac[?25h",,terminal_output +264,309391,"TERMINAL",0,0,"tivate",,terminal_output +265,309741,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +266,310614,"TERMINAL",0,0,"[?25lqu[?25h[?25lu[?25h",,terminal_output +267,310814,"TERMINAL",0,0,"[?25leu[?25h",,terminal_output +268,311057,"TERMINAL",0,0,"[?25le[?25h\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0403.localdomain: Thu Jul 24 14:05:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3372631 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Resources)3371237 accelerat train_dy tum_cte0 R 13:31:10\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:31:10\t 2 hkn[0706,0710]3372629 accelerat train_dy tum_cte0 R37:36\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373090 dev_accel interact tum_cte0 R\t1:13\t 1 hkn0403",,terminal_output +269,311676,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +270,312658,"TERMINAL",0,0,"[?25lsm[?25h[?25lm[?25h",,terminal_output +271,312849,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +272,313833,"TERMINAL",0,0,"",,terminal_output +273,315492,"TERMINAL",0,0,"bash",,terminal_focus +274,316444,"TERMINAL",0,0,"srun",,terminal_focus +275,320972,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +276,326462,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +277,326526,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +278,326649,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +279,327210,"TERMINAL",0,0,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +280,327551,"TERMINAL",0,0,"\rslurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:10:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --job-name=train_dyn_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=50 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=1e-3 \\r\n --max_lr=1e-3 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=100 \\r\n --name=dynamics-causal-80M-$slurm_job_id \\r\n --tags dynamics causal 80M \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +281,327742,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=183739\r\nSLURM_JOB_GPUS=2\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1753358652\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753362252\r\nSLURM_PMI2_SRUN_PORT=41205\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373090\r\nSLURM_PTY_PORT=34297\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=200\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=34497\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373090\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=34497\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +282,327861,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +283,339931,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +284,342178,"TERMINAL",0,0,"2025-07-24 14:05:56.858198: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +285,370625,"TERMINAL",0,0,"2025-07-24 14:06:25.320110: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +286,384924,"TERMINAL",0,0,"2025-07-24 14:06:39.616117: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +287,387867,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +288,388634,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_140642-c1zgpbe1\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-80M-3373090\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/c1zgpbe1\r\n",,terminal_output +289,424106,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +290,425247,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 34275568, 'lam': 17492448, 'dynamics': 26818048, 'total': 78586064}\r\n",,terminal_output +291,425375,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 341, in \r\n train_state = restore_genie_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 405, in restore_genie_components\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/standard_checkpoint_handler.py"", line 246, in restore\r\n return self._impl.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 796, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 737, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'params': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}, 'opt_state': [{'mu': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}, 'nu': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}}, None, None]}\r\n",,terminal_output +292,426661,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-80M-3373090 at: https://wandb.ai/instant-uv/jafar/runs/c1zgpbe1\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_140642-c1zgpbe1/logs\r\n",,terminal_output +293,427989,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +294,464418,"train_dynamics.py",0,0,"",python,tab +295,533646,"train_dynamics.py",1364,0,"",python,selection_mouse +296,533647,"train_dynamics.py",1363,0,"",python,selection_command +297,534423,"train_dynamics.py",1329,0,"",python,selection_command +298,534640,"train_dynamics.py",1363,0,"",python,selection_command +299,534798,"train_dynamics.py",1387,0,"",python,selection_command +300,534963,"train_dynamics.py",1421,0,"",python,selection_command +301,535459,"train_dynamics.py",1454,0,"",python,selection_command +302,535503,"train_dynamics.py",1475,0,"",python,selection_command +303,535518,"train_dynamics.py",1498,0,"",python,selection_command +304,535549,"train_dynamics.py",1522,0,"",python,selection_command +305,535607,"train_dynamics.py",1550,0,"",python,selection_command +306,535608,"train_dynamics.py",1582,0,"",python,selection_command +307,535627,"train_dynamics.py",1614,0,"",python,selection_command +308,535779,"train_dynamics.py",1643,0,"",python,selection_command +309,536277,"train_dynamics.py",1614,0,"",python,selection_command +310,536769,"train_dynamics.py",1582,0,"",python,selection_command +311,536792,"train_dynamics.py",1550,0,"",python,selection_command +312,536948,"train_dynamics.py",1522,0,"",python,selection_command +313,537184,"train_dynamics.py",1498,0,"",python,selection_command +314,537303,"train_dynamics.py",1522,0,"",python,selection_command +315,537776,"train_dynamics.py",1550,0,"",python,selection_command +316,537853,"train_dynamics.py",1582,0,"",python,selection_command +317,537854,"train_dynamics.py",1614,0,"",python,selection_command +318,537863,"train_dynamics.py",1643,0,"",python,selection_command +319,537890,"train_dynamics.py",1671,0,"",python,selection_command +320,537925,"train_dynamics.py",1698,0,"",python,selection_command +321,537963,"train_dynamics.py",1718,0,"",python,selection_command +322,537979,"train_dynamics.py",1742,0,"",python,selection_command +323,538013,"train_dynamics.py",1766,0,"",python,selection_command +324,538081,"train_dynamics.py",1795,0,"",python,selection_command +325,538218,"train_dynamics.py",1824,0,"",python,selection_command +326,538391,"train_dynamics.py",1852,0,"",python,selection_command +327,542375,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",0,0,"",shellscript,tab +328,543386,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",927,0,"",shellscript,selection_mouse +329,543954,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",920,0,"",shellscript,selection_mouse +330,544106,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",913,12,"4_larger_ffn",shellscript,selection_mouse +331,544245,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",793,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +332,544863,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",920,0,"",shellscript,selection_mouse +333,544864,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",913,12,"4_larger_ffn",shellscript,selection_mouse +334,545029,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",793,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +335,547760,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +336,549120,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",877,0,"",shellscript,selection_mouse +337,549272,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",868,11,"checkpoints",shellscript,selection_mouse +338,549959,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",877,0,"",shellscript,selection_mouse +339,550311,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",868,11,"checkpoints",shellscript,selection_mouse +340,550480,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",788,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +341,551657,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1399,0,"",shellscript,selection_mouse +342,551831,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1392,18,"tokenizer_ckpt_dir",shellscript,selection_mouse +343,555814,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",787,0,"",shellscript,selection_mouse +344,556282,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",787,1,"\n",shellscript,selection_mouse +345,556801,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",827,0,"",shellscript,selection_mouse +346,556956,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",818,9,"workspace",shellscript,selection_mouse +347,557098,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",788,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +348,557637,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",827,0,"",shellscript,selection_mouse +349,598882,"TERMINAL",0,0,"bash",,terminal_focus +350,603179,"train_dynamics.py",0,0,"",python,tab +351,617290,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +352,638983,"TERMINAL",0,0,"bash",,terminal_focus +353,640131,"TERMINAL",0,0,"srun",,terminal_focus +354,641376,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +355,641527,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +356,646388,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +357,646497,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +358,646740,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +359,647396,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +360,647584,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +361,647773,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +362,647839,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +363,649009,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +364,649104,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +365,649170,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +366,649412,"TERMINAL",0,0,"[?25lf[?25h[?25l [?25h",,terminal_output +367,649576,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +368,649644,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +369,649711,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +370,649779,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +371,649955,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +372,650020,"TERMINAL",0,0,"diff --git a/genie.py b/genie.py\r\nindex 825e181..ed933ff 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -7,7 +7,7 @@ import flax.linen as nn\r\n from flax.training.train_state import TrainState\r\n import orbax.checkpoint as ocp\r\n \r\n-from models.dynamics import DynamicsMaskGIT\r\n+from models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\r\n from models.lam import LatentActionModel\r\n from models.tokenizer import TokenizerVQVAE\r\n \r\n@@ -40,6 +40,7 @@ class Genie(nn.Module):\r\n dyna_ffn_dim: int\r\n dyna_num_blocks: int\r\n dyna_num_heads: int\r\n+ use_maskgit: bool\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n use_flash_attention: bool\r\n@@ -77,18 +78,32 @@ class Genie(nn.Module):\r\n dtype=self.dtype,\r\n use_flash_attention=self.use_flash_attention,\r\n )\r\n- self.dynamics = DynamicsMaskGIT(\r\n- model_dim=self.dyna_dim,\r\n- ffn_dim=self.dyna_ffn_dim,\r\n- num_latents=self.num_patch_latents,\r\n- num_blocks=self.dyna_num_blocks,\r\n- num_heads=self.dyna_num_heads,\r\n- dropout=self.dropout,\r\n- mask_limit=self.mask_limit,\r\n- param_dtype=self.param_dtype,\r\n- dtype=self.dtype,\r\n- use_flash_attention=self.use_flash_attention,\r\n- )\r\n+\r\n+ if self.use_maskgit:\r\n+ self.dynamics = DynamicsMaskGIT(\r\n+ model_dim=self.dyna_dim,\r\n+ ffn_dim=self.dyna_ffn_dim,\r\n+ num_latents=self.num_patch_latents,\r\n+ num_blocks=self.dyna_num_blocks,\r\n+ num_heads=self.dyna_num_heads,\r\n+ dropout=self.dropout,\r\n+ mask_limit=self.mask_limit,\r\n:",,terminal_output +373,654373,"TERMINAL",0,0,"\r+ param_dtype=self.param_dtype,\r\n:",,terminal_output +374,654610,"TERMINAL",0,0,"\r+ dtype=self.dtype,\r\n:",,terminal_output +375,656126,"TERMINAL",0,0,"\r+ use_flash_attention=self.use_flash_attention,\r\n:\r+ )\r\n:\r+ else:\r\n:\r+ self.dynamics = DynamicsAutoregressive(\r\n:\r+ model_dim=self.dyna_dim,\r\n:\r+ ffn_dim=self.dyna_ffn_dim,\r\n:\r+ num_latents=self.num_patch_latents,\r\n:\r+ num_blocks=self.dyna_num_blocks,\r\n:\r+ num_heads=self.dyna_num_heads,\r\n:\r+ dropout=self.dropout,\r\n:\r+ param_dtype=self.param_dtype,\r\n:\r+ dtype=self.dtype,\r\n:\r+ use_flash_attention=self.use_flash_attention,\r\n:\r+ )\r\n:\r \r\n:\r def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\r\n:\r tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n:\r@@ -112,8 +127,82 @@ class Genie(nn.Module):\r\n:\r outputs[""lam_indices""] = lam_outputs[""indices""]\r\n:\r return outputs\r\n:\r \r\n:\r+ def sample_causal(\r\n:\r+ self,\r\n:\r+ batch: Dict[str, Any],\r\n:\r+ seq_len: int,\r\n:\r+ temperature: float = 1,\r\n:\r+ sample_argmax: bool = False,\r\n:\r+ ):\r\n:\r+ """"""\r\n:\r+ Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\r\n:\r+\r\n:\r+ - Input frames are tokenized once.\r\n:\r+ - Future frames are generated one at a time, each conditioned on all previous frames.\r\n:\r+ - All frames are detokenized in a single pass at the end.\r\n:\r+\r\n:\r+ Args:\r\n:",,terminal_output +376,656852,"TERMINAL",0,0,"\r+ batch: Dict with at least ""videos"" (B, T, H, W, C)\r\n:\r+ seq_len: total number of frames to generate (including context)\r\n:\r+ temperature: sampling temperature\r\n:\r+ sample_argmax: if True, use argmax instead of sampling\r\n:\r+\r\n:\r+ Returns:\r\n:\r+ Generated video frames (B, seq_len, H, W, C)\r\n:\r+ """"""\r\n:\r+ # --- Encode context frames ---\r\n:\r+ tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n:\r+ token_idxs = tokenizer_out[""indices""] # (B, T, N)\r\n:\r+ B, T, N = token_idxs.shape\r\n:\r+\r\n:\r+ # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\r\n:\r+ # --- Prepare initial token sequence ---\r\n:\r+ # Pad with zeros for future frames\r\n:\r+ pad_shape = (B, seq_len - T, N)\r\n:\r+ token_idxs_full = jnp.concatenate(\r\n:\r+ [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\r\n:\r+ ) # (B, seq_len, N)\r\n:\r+\r\n:\r+ # --- Prepare latent actions ---\r\n:\r+ action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\r\n:\r+ # --- Autoregressive generation loop ---\r\n:\r+ rng = batch[""rng""]\r\n:",,terminal_output +377,656982,"TERMINAL",0,0,"\r+ for t in range(T, seq_len):\r\n:",,terminal_output +378,657478,"TERMINAL",0,0,"\r+ for n in range(N):\r\n:",,terminal_output +379,658035,"TERMINAL",0,0,"\r+ jax.debug.print(""Sampling token {} from frame {}"", n, t)\r\n:\r+ dyna_inputs = {\r\n:\r+ ""video_tokens"": token_idxs_full,\r\n:\r+ ""latent_actions"": action_tokens,\r\n:\r+ }\r\n:",,terminal_output +380,658275,"TERMINAL",0,0,"\r+ # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\r\n:\r+ dyna_outputs = self.dynamics(dyna_inputs, training=False)\r\n:\r+ # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\r\n:\r+ # # We want the logits for the last time step (frame t-1 predicting t)\r\n:\r+ # jax.debug.breakpoint()\r\n:\r+ next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(\r\n:\r+ jnp.float32\r\n:",,terminal_output +381,658893,"TERMINAL",0,0,"\r+ ) # (B, 1, vocab_size)\r\n:\r+\r\n:\r+ # Sample or argmax for each patch\r\n:\r+ if sample_argmax:\r\n:\r+ next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\r\n:\r+ else:\r\n:\r+ rng, step_rng = jax.random.split(rng)\r\n:\r+ next_token = jax.random.categorical(\r\n:\r+ step_rng, next_token_logits / temperature, axis=-1\r\n:\r+ ) # (B, 1)\r\n:\r+\r\n:\r+ # Insert the generated tokens into the sequence\r\n:\r+ token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\r\n:\r+\r\n:\r+ # --- Decode all tokens at once at the end ---\r\n:\r+ final_frames = self.tokenizer.decode(\r\n:\r+ token_idxs_full, video_hw=batch[""videos""].shape[2:4]\r\n:\r+ )\r\n:\r+ return final_frames\r\n:\r+\r\n:\r @nn.compact\r\n:\r- def sample(\r\n:",,terminal_output +382,659177,"TERMINAL",0,0,"\r+ def sample_maskgit(\r\n:",,terminal_output +383,659380,"TERMINAL",0,0,"\r self,\r\n:",,terminal_output +384,659654,"TERMINAL",0,0,"\r batch: Dict[str, Any],\r\n:",,terminal_output +385,659779,"TERMINAL",0,0,"\r seq_len: int,\r\n:",,terminal_output +386,662588,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +387,696331,"train_dynamics.py",0,0,"",python,tab +388,703551,"train_dynamics.py",1264,0,"",python,selection_mouse +389,704217,"train_dynamics.py",1303,0,"",python,selection_mouse +390,707110,"train_dynamics.py",1391,0,"",python,selection_mouse +391,729146,"models/dynamics.py",0,0,"",python,tab +392,737901,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-8-node-$slurm_job_id \\n --tags dynamics maskprob-fix 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +393,739550,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1730,0,"",shellscript,selection_mouse +394,739551,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1729,11,"tum_ind3695",shellscript,selection_mouse +395,739710,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1681,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +396,744712,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1689,0,"",shellscript,selection_mouse +397,744822,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1681,18,"tokenizer_ckpt_dir",shellscript,selection_mouse +398,745788,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +399,747526,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",921,0,"\n",shellscript,content +400,747965,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",922,0,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,content +401,748919,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",922,0,"",shellscript,selection_command +402,749119,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",788,0,"",shellscript,selection_command +403,751497,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",788,134,"",shellscript,content +404,752710,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",922,0,"",shellscript,selection_command +405,753077,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",922,1,"",shellscript,content +406,761937,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1127,0,"",shellscript,selection_mouse +407,762082,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1122,7,"init_lr",shellscript,selection_mouse +408,762251,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1116,21," --init_lr=1e-3 \\n",shellscript,selection_mouse +409,762877,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1022,0,"",shellscript,selection_mouse +410,763017,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,12,"warmup_steps",shellscript,selection_mouse +411,763296,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,38,"warmup_steps=0 \\n --wsd_decay_steps",shellscript,selection_mouse +412,763315,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,57,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir",shellscript,selection_mouse +413,763332,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,92,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size",shellscript,selection_mouse +414,763372,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,111,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr",shellscript,selection_mouse +415,763373,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,131,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr",shellscript,selection_mouse +416,763431,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,163,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval",shellscript,selection_mouse +417,763432,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,181,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \",shellscript,selection_mouse +418,763441,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,211,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval",shellscript,selection_mouse +419,763504,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,229,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=",shellscript,selection_mouse +420,763525,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,276,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags ",shellscript,selection_mouse +421,763592,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,310,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity",shellscript,selection_mouse +422,763650,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1018,337,"warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project",shellscript,selection_mouse +423,764055,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1353,0,"",shellscript,selection_mouse +424,764056,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1348,7,"project",shellscript,selection_mouse +425,764197,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1342,22," --project jafar \\n",shellscript,selection_mouse +426,764437,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1316,48," --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +427,764438,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1283,81," --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +428,764445,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1236,128," --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +429,764467,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1200,164," --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +430,764528,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1188,176," --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +431,764529,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1157,207," --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +432,764529,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1137,227," --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +433,764547,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1116,248," --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +434,764604,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1094,270," --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +435,764675,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1061,303," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +436,764737,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1035,329," --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +437,764799,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1012,352," --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +438,764814,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",991,373," --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +439,764870,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",973,391," --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +440,764936,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,423,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +441,765256,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",954,0,"",shellscript,selection_mouse +442,765256,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",953,14,"train_dynamics",shellscript,selection_mouse +443,765420,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,32,"srun python train_dynamics.py \\n",shellscript,selection_mouse +444,765605,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,50,"srun python train_dynamics.py \\n --save_ckpt \\n",shellscript,selection_mouse +445,765628,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,94,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n",shellscript,selection_mouse +446,765654,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,120,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n",shellscript,selection_mouse +447,765677,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,175,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n",shellscript,selection_mouse +448,765695,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,196,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n",shellscript,selection_mouse +449,765711,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,247,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n",shellscript,selection_mouse +450,765771,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,295,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n",shellscript,selection_mouse +451,765771,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,342,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n",shellscript,selection_mouse +452,765781,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,375,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n",shellscript,selection_mouse +453,765837,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,401,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n",shellscript,selection_mouse +454,765843,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,423,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n",shellscript,selection_mouse +455,765905,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",941,472,"srun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-80M-$slurm_job_id \\n --tags dynamics causal 80M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n",shellscript,selection_mouse +456,766279,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",1377,0,"",shellscript,selection_mouse +457,768612,"TERMINAL",0,0,"bash",,terminal_focus +458,771233,"TERMINAL",0,0,"srun",,terminal_focus +459,772452,"TERMINAL",0,0,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +460,772818,"TERMINAL",0,0,"\rslurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\r\n[?2004l\rbash: slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch: Permission denied\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +461,774358,"TERMINAL",0,0,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +462,775129,"TERMINAL",0,0,"[?25l[?25h",,terminal_output +463,776127,"TERMINAL",0,0,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\rhslurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\r",,terminal_output +464,776197,"TERMINAL",0,0," slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\r",,terminal_output +465,776444,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:10:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --job-name=train_dyn_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=50 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=1e-3 \\r\n --max_lr=1e-3 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=100 \\r\n --name=dynamics-causal-80M-$slurm_job_id \\r\n --tags dynamics causal 80M \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +466,776799,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=183739\r\nSLURM_JOB_GPUS=2\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1753358652\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753362252\r\nSLURM_PMI2_SRUN_PORT=41205\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373090\r\nSLURM_PTY_PORT=34297\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=200\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=34497\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373090\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=34497\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\nGpuFreq=control_disabled\r\n",,terminal_output +467,784969,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +468,787096,"TERMINAL",0,0,"2025-07-24 14:13:21.790989: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +469,815372,"TERMINAL",0,0,"2025-07-24 14:13:50.038839: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +470,829554,"TERMINAL",0,0,"2025-07-24 14:14:04.237485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +471,832676,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +472,833187,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_141407-3in2idw1\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-80M-3373090\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3in2idw1\r\n",,terminal_output +473,868909,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +474,869252,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +475,869956,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 34275568, 'lam': 17492448, 'dynamics': 26818048, 'total': 78586064}\r\n",,terminal_output +476,870032,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 341, in \r\n train_state = restore_genie_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 405, in restore_genie_components\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/standard_checkpoint_handler.py"", line 246, in restore\r\n return self._impl.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 796, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 737, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'params': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}, 'opt_state': [{'mu': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}, 'nu': {'params': {'decoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(48,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 48), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'encoder': {'Dense_2': Diff(lhs={'bias': ShapeDtypeStruct(shape=(32,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(512, 32), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}}}}, None, None]}\r\n",,terminal_output +477,872821,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-80M-3373090 at: https://wandb.ai/instant-uv/jafar/runs/3in2idw1\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_141407-3in2idw1/logs\r\n",,terminal_output +478,874357,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +479,883243,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_small_model.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node_80M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --num_steps=100000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-8-node-80M-$slurm_job_id \\n --tags dynamics maskprob-fix 8-node 80M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +480,888251,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",0,0,"",shellscript,tab +481,890486,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n tokenizer_ffn_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n lam_ffn_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_ffn_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(N):\n jax.debug.print(""Sampling token {} from frame {}"", n, t)\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens,\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(\n jnp.float32\n ) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +482,890951,"models/dynamics.py",0,0,"",python,tab +483,892770,"train_dynamics.py",0,0,"",python,tab +484,1129257,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n # FIXME (f.srambical): Can we even do native int8 training without casting the video at all?\n # FIXME (f.srambical): If the tokenizer is the reason for the dynamics model being memory-bound,\n # should we at least train the tokenizer natively in int8?\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +485,1133425,"train_tokenizer.py",4904,0,"",python,selection_mouse +486,1133784,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +487,1138454,"train_tokenizer.py",0,0,"",python,tab +488,1140856,"models/tokenizer.py",0,0,"",python,tab +489,1144770,"models/tokenizer.py",579,0,"",python,selection_mouse +490,1145076,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\n# class STBlock2(nn.Module):\n# dim: int\n# num_heads: int\n# dropout: float\n# param_dtype: jnp.dtype\n# dtype: jnp.dtype\n\n# @nn.remat\n# @nn.compact\n# def __call__(self, x: jax.Array) -> jax.Array:\n# # --- Spatial attention ---\n# z = PositionalEncoding(self.dim)(x)\n# z = nn.LayerNorm(\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(z)\n# causal_mask = jnp.tri(z.shape[-2])\n# z = nn.MultiHeadAttention(\n# num_heads=self.num_heads,\n# qkv_features=self.dim,\n# dropout_rate=self.dropout,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(z, mask=causal_mask)\n# x = x + z\n\n# # --- Temporal attention ---\n# x = x.swapaxes(1, 2)\n# z = PositionalEncoding(self.dim)(x)\n# z = nn.LayerNorm(\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(z)\n# causal_mask = jnp.tri(z.shape[-2])\n# z = nn.MultiHeadAttention(\n# num_heads=self.num_heads,\n# qkv_features=self.dim,\n# dropout_rate=self.dropout,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(z, mask=causal_mask)\n# x = x + z\n# x = x.swapaxes(1, 2)\n\n# # --- Feedforward ---\n# z = nn.LayerNorm(\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(x)\n# # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n# z = nn.Dense(\n# self.dim,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(z)\n# z = nn.gelu(z)\n# x = x + z\n\n# return x\n\n# class CausalTransformer(nn.Module):\n# model_dim: int\n# out_dim: int\n# num_blocks: int\n# num_heads: int\n# dropout: float\n# param_dtype: jnp.dtype\n# dtype: jnp.dtype\n\n# @nn.compact\n# def __call__(self, x: jax.Array) -> jax.Array:\n# # Input projection and normalization\n# x = nn.Sequential(\n# [\n# nn.LayerNorm(\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# ),\n# nn.Dense(self.model_dim,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# ),\n# nn.LayerNorm(\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# ),\n# ]\n# )(x)\n# # Causal transformer blocks\n# for _ in range(self.num_blocks):\n# x = STBlock2(\n# dim=self.model_dim,\n# num_heads=self.num_heads,\n# dropout=self.dropout,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(x)\n\n# # Output projection\n# x = nn.Dense(\n# self.out_dim,\n# param_dtype=self.param_dtype,\n# dtype=self.dtype,\n# )(x)\n# return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n spatial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spatial_causal_mask = None if self.spatial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=not self.spatial_bert\n ),\n # decode=True\n )(z, mask=spatial_causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # decode=True\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n spatial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spatial_bert=self.spatial_bert,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +491,1152681,"utils/nn.py",6226,0,"",python,selection_mouse +492,1152688,"utils/nn.py",6225,0,"",python,selection_command +493,1152814,"utils/nn.py",6225,1,",",python,selection_mouse +494,1152835,"utils/nn.py",6226,0,"",python,selection_command +495,1152846,"utils/nn.py",6186,40," dtype=self.dtype,\n ),",python,selection_mouse +496,1152858,"utils/nn.py",6184,42," dtype=self.dtype,\n ),",python,selection_mouse +497,1152878,"utils/nn.py",6131,95," param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +498,1152912,"utils/nn.py",6091,135," self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +499,1152913,"utils/nn.py",6063,163," nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +500,1152970,"utils/nn.py",6042,184," ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +501,1152970,"utils/nn.py",6041,185," ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +502,1152974,"utils/nn.py",6040,186," ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +503,1152974,"utils/nn.py",6039,187," ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +504,1153283,"utils/nn.py",6058,168," nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),",python,selection_mouse +505,1154303,"utils/nn.py",6058,168,"",python,content +506,1156218,"utils/nn.py",6058,1,"",python,content +507,1156237,"utils/nn.py",6074,0,"",python,selection_command +508,1199597,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +509,1200342,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:10:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\r\n#SBATCH --job-name=train_dyn_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=50 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=1e-3 \\r\n --max_lr=1e-3 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=100 \\r\n --name=dynamics-causal-80M-$slurm_job_id \\r\n --tags dynamics causal 80M \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir\r\n ",,terminal_output +510,1200475,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=183739\r\nSLURM_JOB_GPUS=2\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1753358652\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753362252\r\nSLURM_PMI2_SRUN_PORT=41205\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373090\r\nSLURM_PTY_PORT=34297\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=200\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=34497\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373090\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=34497\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +511,1200620,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +512,1205311,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +513,1207464,"TERMINAL",0,0,"2025-07-24 14:20:22.141837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +514,1233722,"TERMINAL",0,0,"2025-07-24 14:20:48.416728: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +515,1248117,"TERMINAL",0,0,"2025-07-24 14:21:02.735489: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +516,1251044,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +517,1251699,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_142105-r7tf07yh\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-80M-3373090\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/r7tf07yh\r\n",,terminal_output +518,1286925,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\n",,terminal_output +519,1288899,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 26555392, 'total': 77535440}\r\n",,terminal_output +520,1294632,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +521,1314219,"TERMINAL",0,0,"2025-07-24 14:22:08.916412: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 14:22:08.917265: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 14:22:08.917845: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 14:22:08.919105: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 14:22:08.919135: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 14:22:08.920597: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +522,1410612,"TERMINAL",0,0,"Step 0, loss: 11.8727388381958\r\nStep 1, loss: 15.830284118652344\r\nStep 2, loss: 22.655609130859375\r\nStep 3, loss: 23.859970092773438\r\nStep 4, loss: 44.27670669555664\r\nStep 5, loss: 38.93070983886719\r\nStep 6, loss: 45.07175827026367\r\nStep 7, loss: 41.26808166503906\r\nStep 8, loss: 45.98561096191406\r\nStep 9, loss: 43.021121978759766\r\nStep 10, loss: 42.70923614501953\r\nStep 11, loss: 39.24930953979492\r\nStep 12, loss: 33.725624084472656\r\nStep 13, loss: 27.885536193847656\r\nStep 14, loss: 21.74414825439453\r\nStep 15, loss: 44.88984680175781\r\nStep 16, loss: 24.769031524658203\r\nStep 17, loss: 24.597986221313477\r\nStep 18, loss: 23.3189754486084\r\nStep 19, loss: 24.931982040405273\r\nStep 20, loss: 23.06072998046875\r\nStep 21, loss: 22.930727005004883\r\nStep 22, loss: 22.029626846313477\r\nStep 23, loss: 20.314973831176758\r\nStep 24, loss: 21.3248291015625\r\nStep 25, loss: 20.23626708984375\r\nStep 26, loss: 18.27555274963379\r\nStep 27, loss: 18.35011863708496\r\nStep 28, loss: 18.325387954711914\r\nStep 29, loss: 17.791641235351562\r\nStep 30, loss: 21.81378936767578\r\nStep 31, loss: 22.260051727294922\r\nStep 32, loss: 22.091415405273438\r\nStep 33, loss: 23.420978546142578\r\nStep 34, loss: 22.005355834960938\r\nStep 35, loss: 23.171815872192383\r\nStep 36, loss: 21.17890739440918\r\nStep 37, loss: 21.611591339111328\r\nStep 38, loss: 20.51702880859375\r\nStep 39, loss: 19.992984771728516\r\nStep 40, loss: 18.040985107421875\r\nStep 41, loss: 18.60979652404785\r\nStep 42, loss: 24.150550842285156\r\nStep 43, loss: 27.271183013916016\r\nStep 44, loss: 23.882659912109375\r\nStep 45, loss: 22.942594528198242\r\nStep 46, loss: 21.828369140625\r\nStep 47, loss: 20.006765365600586\r\nStep 48, loss: 18.48887825012207\r\nStep 49, loss: 18.007871627807617\r\n",,terminal_output +523,1410854,"TERMINAL",0,0,"Filtering out episode with length 13, which is shorter than the requested sequence length 16.\r\n",,terminal_output +524,1410906,"TERMINAL",0,0,"Filtering out episode with length 1, which is shorter than the requested sequence length 16.\r\nFiltering out episode with length 8, which is shorter than the requested sequence length 16.\r\n",,terminal_output +525,1414120,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-80M-3373090 at: https://wandb.ai/instant-uv/jafar/runs/r7tf07yh\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_142105-r7tf07yh/logs\r\n",,terminal_output +526,1416123,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +527,1416466,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +528,1480522,"TERMINAL",0,0,"s",,terminal_output +529,1480843,"TERMINAL",0,0,"y",,terminal_output +530,1481074,"TERMINAL",0,0,"n",,terminal_output +531,1481298,"TERMINAL",0,0,"c",,terminal_output +532,1481410,"TERMINAL",0,0,"-",,terminal_output +533,1481650,"TERMINAL",0,0,"r",,terminal_output +534,1481785,"TERMINAL",0,0,"u",,terminal_output +535,1481885,"TERMINAL",0,0,"n",,terminal_output +536,1482017,"TERMINAL",0,0,"n",,terminal_output +537,1482082,"TERMINAL",0,0,"e",,terminal_output +538,1482148,"TERMINAL",0,0,"r",,terminal_output +539,1482309,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +540,1482697,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +541,1483378,"TERMINAL",0,0,"\r\n[?2004l\rsending incremental file list\r\n",,terminal_output +542,1486167,"TERMINAL",0,0,"train_dynamics.py\r\n",,terminal_output +543,1487137,"TERMINAL",0,0,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch\r\nutils/nn.py\r\n\r\nsent 51,484 bytes received 214 bytes 14,770.86 bytes/sec\r\ntotal size is 185,126,802 speedup is 3,580.93\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0403 jafar]$ ",,terminal_output +544,1487949,"TERMINAL",0,0,"r",,terminal_output +545,1488015,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +546,1488196,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +547,1488373,"TERMINAL",0,0,"[?25ln[?25h[?25le[?25h",,terminal_output +548,1488439,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +549,1489034,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +550,1489459,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +551,1489895,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +552,1491672,"TERMINAL",0,0,"q",,terminal_output +553,1491866,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +554,1491932,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +555,1492084,"TERMINAL",0,0,"[?25lu[?25h[?25le[?25h",,terminal_output +556,1492296,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0403.localdomain: Thu Jul 24 14:25:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 13:50:51\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:50:51\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R11:54\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R57:17\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373090 dev_accel interact tum_cte0 R20:54\t 1 hkn0403",,terminal_output +557,1493369,"TERMINAL",0,0,"722585",,terminal_output +558,1494386,"TERMINAL",0,0,"833696",,terminal_output +559,1495325,"TERMINAL",0,0,"9558218",,terminal_output +560,1496335,"TERMINAL",0,0,"1166929",,terminal_output +561,1497357,"TERMINAL",0,0,"2772:0031:00",,terminal_output +562,1498381,"TERMINAL",0,0,"388141",,terminal_output +563,1499392,"TERMINAL",0,0,"499252",,terminal_output +564,1500432,"TERMINAL",0,0,"51:001:00363",,terminal_output +565,1501076,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +566,1516373,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +567,1516679,"TERMINAL",0,0,"s': sync-runner-2",,terminal_output +568,1516744,"TERMINAL",0,0,"[?25lsb': sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch[?25h",,terminal_output +569,1516902,"TERMINAL",0,0,"[?25ls\ra': sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch[?25h",,terminal_output +570,1517017,"TERMINAL",0,0,"[?25ls\rt': sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch[?25h",,terminal_output +571,1517409,"TERMINAL",0,0,"[?25lbs\rc': sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch[?25h",,terminal_output +572,1517480,"TERMINAL",0,0,"\rh': sh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +573,1519103,"TERMINAL",0,0,"\rlurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +574,1519760,"TERMINAL",0,0,"\rh slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch",,terminal_output +575,1520704,"TERMINAL",0,0,"\rbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",,terminal_output +576,1523328,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch\r",,terminal_output +577,1524066,"TERMINAL",0,0,"\r",,terminal_output +578,1525067,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",,terminal_output +579,1525543,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch\r",,terminal_output +580,1526007,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",,terminal_output +581,1526571,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch\r",,terminal_output +582,1527012,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch",,terminal_output +583,1528948,"TERMINAL",0,0,"\rjafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch\r\n[?2004l\rSubmitted batch job 3373107\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +584,1529489,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch",,terminal_output +585,1532950,"TERMINAL",0,0,"2",,terminal_output +586,1533192,"TERMINAL",0,0,"55M.sbatch",,terminal_output +587,1534942,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3373108\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +588,1535987,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",,terminal_output +589,1538606,"TERMINAL",0,0,"3",,terminal_output +590,1538900,"TERMINAL",0,0,"56M.sbatch",,terminal_output +591,1539459,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3373109\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +592,1540840,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",,terminal_output +593,1542953,"TERMINAL",0,0,"5",,terminal_output +594,1543292,"TERMINAL",0,0,"00M.sbatch",,terminal_output +595,1543691,"TERMINAL",0,0,"\r\n[?2004l\rsbatch: error: QOSMaxSubmitJobPerUserLimit\r\nsbatch: error: Batch job submission failed: Job violates accounting/QOS policy (job submit limit, user's size and/or time limits)\r\n]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +596,1546490,"TERMINAL",0,0,"s",,terminal_output +597,1546587,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +598,1546736,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +599,1547185,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +600,1547542,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: nvidia-smihkn0403.localdomain: Thu Jul 24 14:26:01 2025Thu Jul 24 14:26:01 2025\r+-----------------------------------------------------------------------------------------+\r| NVIDIA-SMI 570.133.20Driver Version: 570.133.20 CUDA Version: 12.8 |\r|-----------------------------------------+------------------------+----------------------+\r| GPU NamePersistence-M | Bus-IdDisp.A | Volatile Uncorr. ECC |\r| Fan Temp PerfPwr:Usage/Cap |Memory-Usage | GPU-Util Compute M. |\r|||MIG M. |\r|=========================================+========================+======================|\r| 0 NVIDIA A100-SXM4-40GBOn | 00000000:CA:00.0 Off |0 |\r| N/A 45C P053W / 300W |\t 29MiB / 40960MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r+-----------------------------------------------------------------------------------------+\r| Processes:|\r| GPU GI CIPID Type Process nameGPU Memory |\r|ID IDUsage\t |\r|=========================================================================================|\r| 0 N/A N/A2546G /usr/libexec/Xorg17MiB |\r+-----------------------------------------------------------------------------------------+",,terminal_output +601,1548188,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0403 jafar_jobs_2]$ ",,terminal_output +602,1548722,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsalloc: Relinquishing job allocation 3373090\r\nsalloc: Job allocation 3373090 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +603,1555665,"TERMINAL",0,0,"runner-2",,terminal_command +604,1559417,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",,terminal_command +605,1559444,"TERMINAL",0,0,"]633;E;2025-07-24 14:26:14 sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch;406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373110\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +606,1562305,"TERMINAL",0,0,"queue",,terminal_command +607,1562355,"TERMINAL",0,0,"]633;E;2025-07-24 14:26:17 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +608,1562421,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 14:26:17 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 13:52:02\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:52:02\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R13:05\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R58:28\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373110 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (Priority)3373109 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373108 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373107 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (Resources)",,terminal_output +609,1563511,"TERMINAL",0,0,"83369",,terminal_output +610,1564494,"TERMINAL",0,0,"944730",,terminal_output +611,1565558,"TERMINAL",0,0,"205581",,terminal_output +612,1566584,"TERMINAL",0,0,"16692",,terminal_output +613,1567706,"TERMINAL",0,0,"277103",,terminal_output +614,1568745,"TERMINAL",0,0,"38814",,terminal_output +615,1569738,"TERMINAL",0,0,"49925",,terminal_output +616,1570884,"TERMINAL",0,0,"5101036",,terminal_output +617,1571832,"TERMINAL",0,0,"61147",,terminal_output +618,1572888,"TERMINAL",0,0,"72258",,terminal_output +619,1573937,"TERMINAL",0,0,"83369",,terminal_output +620,1574994,"TERMINAL",0,0,"944740",,terminal_output +621,1575146,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +622,1796671,"TERMINAL",0,0,"queue",,terminal_command +623,1796743,"TERMINAL",0,0,"]633;E;2025-07-24 14:30:11 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 14:30:11 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 13:55:56\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:55:56\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R16:59\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:02:22\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373110 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373109 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373108 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373107 dev_accel train_dy tum_cte0 R\t3:14\t 1 hkn0403",,terminal_output +624,1797797,"TERMINAL",0,0,"2777:0035",,terminal_output +625,1798931,"TERMINAL",0,0,"388146",,terminal_output +626,1799952,"TERMINAL",0,0,"499257",,terminal_output +627,1800842,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +628,1990793,"TERMINAL",0,0,"queue",,terminal_command +629,1990839,"TERMINAL",0,0,"]633;E;2025-07-24 14:33:25 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +630,1990918,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 14:33:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 13:59:10\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:59:10\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R20:13\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:05:36\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373110 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373109 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373108 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)",,terminal_output +631,1991957,"TERMINAL",0,0,"61147",,terminal_output +632,1992958,"TERMINAL",0,0,"72258",,terminal_output +633,1994000,"TERMINAL",0,0,"83369",,terminal_output +634,1995132,"TERMINAL",0,0,"944740",,terminal_output +635,1996160,"TERMINAL",0,0,"305581",,terminal_output +636,1997138,"TERMINAL",0,0,"16692",,terminal_output +637,1998183,"TERMINAL",0,0,"277203",,terminal_output +638,1999234,"TERMINAL",0,0,"38814",,terminal_output +639,2000276,"TERMINAL",0,0,"49925",,terminal_output +640,2001377,"TERMINAL",0,0,"5212147",,terminal_output +641,2002363,"TERMINAL",0,0,"72258",,terminal_output +642,2003405,"TERMINAL",0,0,"83369",,terminal_output +643,2003715,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +644,2006189,"TERMINAL",0,0,"queue",,terminal_command +645,2006239,"TERMINAL",0,0,"]633;E;2025-07-24 14:33:40 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 14:33:40 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 13:59:25\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 13:59:25\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R20:28\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:05:51\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373110 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373109 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3373108 dev_accel train_dy tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)",,terminal_output +646,2007315,"TERMINAL",0,0,"16692",,terminal_output +647,2008340,"TERMINAL",0,0,"288314",,terminal_output +648,2009381,"TERMINAL",0,0,"49925",,terminal_output +649,2010490,"TERMINAL",0,0,"5303036",,terminal_output +650,2011529,"TERMINAL",0,0,"61147",,terminal_output +651,2011590,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +652,2014237,"TERMINAL",0,0,"idling",,terminal_command +653,2014293,"TERMINAL",0,0,"]633;E;2025-07-24 14:33:48 idling;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Jul 24 14:33:48 2025Partition dev_cpuonly:\t 9 nodes idle\rPartition cpuonly:\t 4 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +654,2015334,"TERMINAL",0,0,"9",,terminal_output +655,2016430,"TERMINAL",0,0,"51",,terminal_output +656,2017452,"TERMINAL",0,0,"2",,terminal_output +657,2018459,"TERMINAL",0,0,"3",,terminal_output +658,2019495,"TERMINAL",0,0,"4",,terminal_output +659,2020629,"TERMINAL",0,0,"5",,terminal_output +660,2021574,"TERMINAL",0,0,"6",,terminal_output +661,2022674,"TERMINAL",0,0,"7",,terminal_output +662,2023701,"TERMINAL",0,0,"8",,terminal_output +663,2024723,"TERMINAL",0,0,"9",,terminal_output +664,2025849,"TERMINAL",0,0,"4:00",,terminal_output +665,2026148,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +666,2398135,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-2-node-$slurm_job_id \\n --tags dynamics maskprob-fix 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +667,2401591,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-2-node-$slurm_job_id \\n --tags dynamics maskprob-fix 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +668,2406298,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",482,0,"",shellscript,selection_mouse +669,2407149,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",347,0,"",shellscript,selection_mouse +670,2407712,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",485,0,"",shellscript,selection_mouse +671,2409644,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",553,1,"m",shellscript,selection_command +672,2409793,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1549,2,"ma",shellscript,selection_command +673,2409914,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1549,3,"mas",shellscript,selection_command +674,2410058,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1549,4,"mask",shellscript,selection_command +675,2410469,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1557,5,"maskp",shellscript,selection_command +676,2410841,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1557,6,"maskpr",shellscript,selection_command +677,2411911,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1557,7,"maskpro",shellscript,selection_command +678,2412375,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1557,8,"maskprob",shellscript,selection_command +679,2424035,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",483,9,"maskprob_",shellscript,selection_command +680,2424266,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",483,10,"maskprob_f",shellscript,selection_command +681,2424423,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",483,11,"maskprob_fi",shellscript,selection_command +682,2424607,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",483,12,"maskprob_fix",shellscript,selection_command +683,2454072,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",483,12,"causal",shellscript,content +684,2456793,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1551,12,"maskprob-fix",shellscript,selection_command +685,2460264,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1551,12,"causal",shellscript,content +686,2460269,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2122,12,"maskprob-fix",shellscript,selection_command +687,2460639,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2122,12,"causal",shellscript,content +688,2460643,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2172,12,"maskprob-fix",shellscript,selection_command +689,2460986,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2172,12,"causal",shellscript,content +690,2460989,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",267,12,"maskprob-fix",shellscript,selection_command +691,2462784,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",267,12,"causal",shellscript,content +692,2462787,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",400,12,"maskprob-fix",shellscript,selection_command +693,2462976,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",400,12,"causal",shellscript,content +694,2465317,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",266,0,"",shellscript,selection_mouse +695,2469639,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",278,1,"m",shellscript,selection_command +696,2469754,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,2,"ma",shellscript,selection_command +697,2469860,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,3,"mas",shellscript,selection_command +698,2469916,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,4,"mask",shellscript,selection_command +699,2470207,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,5,"maskg",shellscript,selection_command +700,2470403,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,6,"maskgi",shellscript,selection_command +701,2470533,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,7,"maskgit",shellscript,selection_command +702,2472155,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",392,7,"causal",shellscript,content +703,2472159,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1530,7,"maskgit",shellscript,selection_command +704,2473515,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1530,7,"causal",shellscript,content +705,2473519,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",259,7,"maskgit",shellscript,selection_command +706,2474101,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",259,7,"causal",shellscript,content +707,2476636,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1535,0,"",shellscript,selection_mouse +708,2476773,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1529,6,"causal",shellscript,selection_mouse +709,2477840,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1529,6,"",shellscript,content +710,2478484,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1529,1,"",shellscript,content +711,2480270,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",394,0,"",shellscript,selection_mouse +712,2480427,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",391,6,"causal",shellscript,selection_mouse +713,2480791,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",391,6,"",shellscript,content +714,2481096,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",391,1,"",shellscript,content +715,2482257,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",262,0,"",shellscript,selection_mouse +716,2482410,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",259,6,"causal",shellscript,selection_mouse +717,2482567,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",259,6,"",shellscript,content +718,2483023,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",259,1,"",shellscript,content +719,2679355,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +720,2681087,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +721,2681089,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +722,2681231,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,1,"6",shellscript,selection_mouse +723,2681232,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_command +724,2681252,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,1,"6",shellscript,selection_mouse +725,2681321,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1549,5,"=4096",shellscript,selection_mouse +726,2681322,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1521,33,"eads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +727,2681322,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1517,37,"um_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +728,2681324,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1515,39,"_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +729,2681337,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1513,41,"na_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +730,2681352,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1512,42,"yna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +731,2681419,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1484,70,"dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +732,2681419,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1483,71,"-dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +733,2681420,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1482,72,"--dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +734,2681423,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1481,73," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +735,2681450,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1480,74," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +736,2681497,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1479,75," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +737,2681552,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1478,76," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +738,2684702,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1551,0,"",shellscript,selection_mouse +739,2685028,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +740,2685043,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +741,2685156,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,1,"6",shellscript,selection_mouse +742,2685157,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_command +743,2685176,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1552,2,"96",shellscript,selection_mouse +744,2685200,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1521,33,"eads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +745,2685214,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1518,36,"m_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +746,2685234,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1485,69,"yna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +747,2685260,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1483,71,"-dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +748,2685276,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1480,74," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +749,2685277,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1478,76," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +750,2685332,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1456,98," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +751,2689068,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +752,2690871,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2028,0,"",shellscript,selection_mouse +753,2690873,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2027,0,"",shellscript,selection_command +754,2691500,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2221,0,"",shellscript,selection_mouse +755,2691500,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2220,0,"",shellscript,selection_command +756,2693580,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2300,0,"",shellscript,selection_mouse +757,2693586,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2299,0,"",shellscript,selection_command +758,2693749,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2299,1,"\",shellscript,selection_mouse +759,2693751,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2300,0,"",shellscript,selection_command +760,2693782,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2298,2," \",shellscript,selection_mouse +761,2693805,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2296,4,"96 \",shellscript,selection_mouse +762,2693872,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2267,33,"ds=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +763,2693873,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2232,68,"_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +764,2693891,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2228,72,"dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +765,2693892,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2204,96,"--dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +766,2693899,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2203,97," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +767,2693914,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2201,99," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +768,2693969,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2200,100," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +769,2696359,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2259,0,"",shellscript,selection_mouse +770,2698368,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +771,2704058,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +772,3089510,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2300,0,"",shellscript,selection_mouse +773,3089525,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2299,0,"",shellscript,selection_command +774,3099817,"TERMINAL",0,0,"queue",,terminal_command +775,3099871,"TERMINAL",0,0,"]633;E;2025-07-24 14:51:54 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +776,3099920,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 14:51:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 14:17:39\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 14:17:39\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R38:42\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:24:05\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373110 dev_accel train_dy tum_cte0 R\t4:53\t 1 hkn0401",,terminal_output +777,3100959,"TERMINAL",0,0,"54040364",,terminal_output +778,3102091,"TERMINAL",0,0,"611475",,terminal_output +779,3102229,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +780,3104238,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +781,3113616,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +782,3118101,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",0,0,"",shellscript,tab +783,3120222,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",0,0,"",shellscript,tab +784,3141634,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",988,0,"",shellscript,selection_mouse +785,3143761,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +786,3143762,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1831,0,"",shellscript,selection_mouse +787,3143833,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1824,9,"save_ckpt",shellscript,selection_mouse +788,3145580,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1872,0,"",shellscript,selection_mouse +789,3145760,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1867,8,"wandb_id",shellscript,selection_mouse +790,3147047,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1872,0,"",shellscript,selection_mouse +791,3147048,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1867,8,"wandb_id",shellscript,selection_mouse +792,3147702,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1848,0,"",shellscript,selection_mouse +793,3147852,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1841,17,"restore_ckpt_flag",shellscript,selection_mouse +794,3149603,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1974,0,"",shellscript,selection_mouse +795,3149724,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1971,6,"max_lr",shellscript,selection_mouse +796,3150341,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1959,0,"",shellscript,selection_mouse +797,3150506,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1953,7,"init_lr",shellscript,selection_mouse +798,3151227,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1938,0,"",shellscript,selection_mouse +799,3151380,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1931,10,"batch_size",shellscript,selection_mouse +800,3173630,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +801,3175426,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +802,3473156,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +803,3473157,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1946,0,"",shellscript,selection_mouse +804,3473174,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1945,0,"",shellscript,selection_command +805,3473766,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +806,3473769,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +807,3473784,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +808,3572682,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +809,3572697,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +810,3619047,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +811,3619064,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +812,3619648,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +813,3619665,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +814,3620698,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +815,3620699,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2300,0,"",shellscript,selection_mouse +816,3620711,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2299,0,"",shellscript,selection_command +817,3660593,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2417,0,"",shellscript,selection_mouse +818,3660775,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2416,1,"\n",shellscript,selection_mouse +819,3660785,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2400,17,"\nwait $child_pid\n",shellscript,selection_mouse +820,3660811,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2296,121,"96 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +821,3660812,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",2168,249,"tant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +822,3660866,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1901,516,"t_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +823,3660873,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1161,1256,"cat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +824,3660873,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",1131,1286,"fi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +825,3660883,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",871,1546,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +826,3660899,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",539,1878,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +827,3660957,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",67,2350,"#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +828,3660958,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,2417,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +829,3672779,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +830,3672780,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",977,0,"",shellscript,selection_mouse +831,3672807,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",976,0,"",shellscript,selection_command +832,3676925,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_mouse +833,3676939,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,0,"",shellscript,selection_command +834,3677091,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1553,1,"6",shellscript,selection_mouse +835,3677096,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1554,0,"",shellscript,selection_command +836,3677136,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",1189,365,"0 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +837,3677165,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",927,627,"\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +838,3677179,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",531,1023,"\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +839,3677195,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",174,1380,"#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_255M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +840,3677249,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,1554,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_255M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +841,3732604,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",313,0,"",shellscript,selection_mouse +842,3733795,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",312,0,"",shellscript,selection_mouse +843,3735042,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +844,3735044,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",468,0,"",shellscript,selection_mouse +845,3735059,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",467,0,"",shellscript,selection_command +846,3735661,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",395,0,"",shellscript,selection_mouse +847,3897148,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +848,3898879,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",2417,0,"",shellscript,selection_mouse +849,3899023,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",2228,189,"dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +850,3899046,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1925,492," --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +851,3899107,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1256,1161,"array_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +852,3899107,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1199,1218,"module unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +853,3899163,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1087,1330,"else\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +854,3899164,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",843,1574,"}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +855,3899165,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",487,1930,"#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +856,3899165,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",0,2417,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +857,3900118,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",0,0,"",shellscript,selection_command +858,3917503,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",21,0,"#SBATCH --nodes=8\n",shellscript,content +859,3917710,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",39,18,"",shellscript,content +860,3919402,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",421,0,"#SBATCH --job-name=train_dynamics_causal_8_node\n",shellscript,content +861,3919520,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",469,48,"",shellscript,content +862,3927362,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1925,0," --batch_size=384 \\n",shellscript,content +863,3927912,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",1948,22,"",shellscript,content +864,3931816,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",2067,0," --name=dynamics-causal-8-node-$slurm_job_id \\n",shellscript,content +865,3932234,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",2117,0," --tags dynamics causal 8-node \\n",shellscript,content +866,3932454,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes copy.sbatch",2153,86,"",shellscript,content +867,4018976,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-8-node-$slurm_job_id \\n --tags dynamics causal 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +868,4136246,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2301,0,"",shellscript,selection_mouse +869,4136261,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2300,0,"",shellscript,selection_command +870,4177535,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2249,0,"",shellscript,selection_mouse +871,4177538,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2248,0,"",shellscript,selection_command +872,4178096,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2301,0,"",shellscript,selection_mouse +873,4178099,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2300,0,"",shellscript,selection_command +874,4178700,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2275,0,"",shellscript,selection_mouse +875,4178702,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2274,0,"",shellscript,selection_command +876,4179170,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2249,0,"",shellscript,selection_mouse +877,4179172,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2248,0,"",shellscript,selection_command +878,4179839,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2222,0,"",shellscript,selection_mouse +879,4179840,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2221,0,"",shellscript,selection_command +880,4180230,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2222,0,"",shellscript,selection_mouse +881,4180232,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2221,0,"",shellscript,selection_command +882,4180391,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2201,22," --dyna_dim=1024 \\n",shellscript,selection_mouse +883,4180393,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2202,21," --dyna_dim=1024 \\n",shellscript,selection_command +884,4180950,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2200,0,"",shellscript,selection_mouse +885,4180961,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2199,0,"",shellscript,selection_command +886,4186689,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2200,0,"",shellscript,selection_mouse +887,4186708,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2199,0,"",shellscript,selection_command +888,4187449,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2222,0,"",shellscript,selection_mouse +889,4187449,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2221,0,"",shellscript,selection_command +890,4188352,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2350,0,"",shellscript,selection_mouse +891,4188353,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",2349,0,"",shellscript,selection_command +892,4718986,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +893,4718989,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1009,0,"",shellscript,selection_mouse +894,4719008,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1008,0,"",shellscript,selection_command +895,4721390,"TERMINAL",0,0,"queue",,terminal_command +896,4721470,"TERMINAL",0,0,"]633;E;2025-07-24 15:18:56 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 15:18:56 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 14:44:41\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 14:44:41\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:05:44\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:51:07\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +897,4722510,"TERMINAL",0,0,"72258",,terminal_output +898,4723618,"TERMINAL",0,0,"83369",,terminal_output +899,4724681,"TERMINAL",0,0,"944710",,terminal_output +900,4725660,"TERMINAL",0,0,"9:005581",,terminal_output +901,4726778,"TERMINAL",0,0,"16692",,terminal_output +902,4727726,"TERMINAL",0,0,"277503",,terminal_output +903,4727970,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +904,4740081,"TERMINAL",0,0,"sync-runner-2",,terminal_command +905,4740124,"TERMINAL",0,0,"]633;E;2025-07-24 15:19:14 sync-runner-2;406cfb31-2341-454a-afa8-cae7781806b2]633;Csending incremental file list\r\n",,terminal_output +906,4741105,"TERMINAL",0,0,"bash",,terminal_focus +907,4742528,"TERMINAL",0,0,"git branch",,terminal_command +908,4742578,"TERMINAL",0,0,"]633;E;2025-07-24 15:19:17 git branch;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1h=\r",,terminal_output +909,4742711,"TERMINAL",0,0," add-wandb-name-and-tags\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +910,4744038,"TERMINAL",0,0,"\r metrics-logging-for-dynamics-model\r\n:",,terminal_output +911,4744201,"TERMINAL",0,0,"\r monkey-patch\r\n:",,terminal_output +912,4744365,"TERMINAL",0,0,"\r* new-arch-sampling\r\n:",,terminal_output +913,4744488,"TERMINAL",0,0,"\r preprocess_video\r\n:",,terminal_output +914,4744597,"TERMINAL",0,0,"\r refactor-tmp\r\n:",,terminal_output +915,4744747,"TERMINAL",0,0,"\r revised-dataloader\r\n:",,terminal_output +916,4744853,"TERMINAL",0,0,"\r runner\r\n:",,terminal_output +917,4745031,"TERMINAL",0,0,"\r runner-grain\r\n:",,terminal_output +918,4745158,"TERMINAL",0,0,"\r sample-from-different-topologies\r\n:",,terminal_output +919,4745532,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +920,4745774,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/\r\nslurm/jobs/mihir/horeka/causal_big_runs/\r\nslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r\nslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch\r\n",,terminal_output +921,4745865,"TERMINAL",0,0,"\r\nsent 29,610 bytes received 192 bytes 4,584.92 bytes/sec\r\ntotal size is 185,131,637 speedup is 6,212.05\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +922,4750180,"TERMINAL",0,0,"bash",,terminal_focus +923,4767178,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",,terminal_command +924,4767239,"TERMINAL",0,0,"]633;E;2025-07-24 15:19:41 sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch;406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373205\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +925,4772380,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",,terminal_command +926,4772394,"TERMINAL",0,0,"]633;E;2025-07-24 15:19:47 sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch;406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373207\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +927,4773680,"TERMINAL",0,0,"queue",,terminal_command +928,4773730,"TERMINAL",0,0,"]633;E;2025-07-24 15:19:48 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +929,4773808,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 15:19:48 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373207 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373205 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 14:45:33\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 14:45:33\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:06:36\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:51:59\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +930,4774838,"TERMINAL",0,0,"94472:00",,terminal_output +931,4775881,"TERMINAL",0,0,"505581",,terminal_output +932,4776927,"TERMINAL",0,0,"16692",,terminal_output +933,4777971,"TERMINAL",0,0,"277403",,terminal_output +934,4779089,"TERMINAL",0,0,"38814",,terminal_output +935,4780128,"TERMINAL",0,0,"49925",,terminal_output +936,4781157,"TERMINAL",0,0,"5404036",,terminal_output +937,4782178,"TERMINAL",0,0,"61147",,terminal_output +938,4783302,"TERMINAL",0,0,"72258",,terminal_output +939,4784250,"TERMINAL",0,0,"83369",,terminal_output +940,4785292,"TERMINAL",0,0,"944710",,terminal_output +941,4786376,"TERMINAL",0,0,"20:006692",,terminal_output +942,4787397,"TERMINAL",0,0,"277503",,terminal_output +943,4788178,"TERMINAL",0,0,"bash",,terminal_focus +944,4788445,"TERMINAL",0,0,"38814",,terminal_output +945,4789487,"TERMINAL",0,0,"49925",,terminal_output +946,4790529,"TERMINAL",0,0,"5505036",,terminal_output +947,4791575,"TERMINAL",0,0,"61147",,terminal_output +948,4792621,"TERMINAL",0,0,"72258",,terminal_output +949,4793665,"TERMINAL",0,0,"83369",,terminal_output +950,4794729,"TERMINAL",0,0,"944720",,terminal_output +951,4795759,"TERMINAL",0,0,"105581",,terminal_output +952,4796845,"TERMINAL",0,0,"16692",,terminal_output +953,4797858,"TERMINAL",0,0,"2777:003",,terminal_output +954,4798906,"TERMINAL",0,0,"38814",,terminal_output +955,4799947,"TERMINAL",0,0,"49925",,terminal_output +956,4800999,"TERMINAL",0,0,"56:006:0036",,terminal_output +957,4802044,"TERMINAL",0,0,"61147",,terminal_output +958,4803092,"TERMINAL",0,0,"72258",,terminal_output +959,4804199,"TERMINAL",0,0,"83369",,terminal_output +960,4805185,"TERMINAL",0,0,"944730",,terminal_output +961,4806235,"TERMINAL",0,0,"205581",,terminal_output +962,4807367,"TERMINAL",0,0,"16692",,terminal_output +963,4808332,"TERMINAL",0,0,"288114",,terminal_output +964,4809379,"TERMINAL",0,0,"49925",,terminal_output +965,4810440,"TERMINAL",0,0,"5101036",,terminal_output +966,4811469,"TERMINAL",0,0,"61147",,terminal_output +967,4812588,"TERMINAL",0,0,"72258",,terminal_output +968,4813615,"TERMINAL",0,0,"83369",,terminal_output +969,4814682,"TERMINAL",0,0,"944740",,terminal_output +970,4815663,"TERMINAL",0,0,"305581",,terminal_output +971,4816794,"TERMINAL",0,0,"16692",,terminal_output +972,4817817,"TERMINAL",0,0,"277203",,terminal_output +973,4818777,"TERMINAL",0,0,"38814",,terminal_output +974,4819965,"TERMINAL",0,0,"49925",,terminal_output +975,4820890,"TERMINAL",0,0,"5202036",,terminal_output +976,4821923,"TERMINAL",0,0,"61147",,terminal_output +977,4822962,"TERMINAL",0,0,"72258",,terminal_output +978,4824014,"TERMINAL",0,0,"83369",,terminal_output +979,4825087,"TERMINAL",0,0,"944750",,terminal_output +980,4826127,"TERMINAL",0,0,"405581",,terminal_output +981,4827155,"TERMINAL",0,0,"16692",,terminal_output +982,4828263,"TERMINAL",0,0,"277303",,terminal_output +983,4829281,"TERMINAL",0,0,"38814",,terminal_output +984,4830285,"TERMINAL",0,0,"49925",,terminal_output +985,4831433,"TERMINAL",0,0,"5313147",,terminal_output +986,4832380,"TERMINAL",0,0,"72258",,terminal_output +987,4833479,"TERMINAL",0,0,"83369",,terminal_output +988,4834469,"TERMINAL",0,0,"94473:00",,terminal_output +989,4835512,"TERMINAL",0,0,"505581",,terminal_output +990,4836558,"TERMINAL",0,0,"16692",,terminal_output +991,4837677,"TERMINAL",0,0,"277403",,terminal_output +992,4838653,"TERMINAL",0,0,"38814",,terminal_output +993,4839696,"TERMINAL",0,0,"49925",,terminal_output +994,4840740,"TERMINAL",0,0,"5404036",,terminal_output +995,4841824,"TERMINAL",0,0,"61147",,terminal_output +996,4842928,"TERMINAL",0,0,"72258",,terminal_output +997,4843874,"TERMINAL",0,0,"83369",,terminal_output +998,4844949,"TERMINAL",0,0,"944710",,terminal_output +999,4845949,"TERMINAL",0,0,"1:005581",,terminal_output +1000,4846992,"TERMINAL",0,0,"16692",,terminal_output +1001,4848042,"TERMINAL",0,0,"277503",,terminal_output +1002,4849078,"TERMINAL",0,0,"38814",,terminal_output +1003,4850172,"TERMINAL",0,0,"49925",,terminal_output +1004,4851159,"TERMINAL",0,0,"5505036",,terminal_output +1005,4852221,"TERMINAL",0,0,"61147",,terminal_output +1006,4853247,"TERMINAL",0,0,"72258",,terminal_output +1007,4854293,"TERMINAL",0,0,"83369",,terminal_output +1008,4855392,"TERMINAL",0,0,"955821",,terminal_output +1009,4856416,"TERMINAL",0,0,"116692",,terminal_output +1010,4857443,"TERMINAL",0,0,"2778:003",,terminal_output +1011,4858461,"TERMINAL",0,0,"38814",,terminal_output +1012,4859505,"TERMINAL",0,0,"49925",,terminal_output +1013,4860617,"TERMINAL",0,0,"57:007:0036",,terminal_output +1014,4861643,"TERMINAL",0,0,"61147",,terminal_output +1015,4862620,"TERMINAL",0,0,"72258",,terminal_output +1016,4863689,"TERMINAL",0,0,"83369",,terminal_output +1017,4864707,"TERMINAL",0,0,"944730",,terminal_output +1018,4865753,"TERMINAL",0,0,"205581",,terminal_output +1019,4866801,"TERMINAL",0,0,"16692",,terminal_output +1020,4867886,"TERMINAL",0,0,"277103",,terminal_output +1021,4868881,"TERMINAL",0,0,"38814",,terminal_output +1022,4869919,"TERMINAL",0,0,"49925",,terminal_output +1023,4870960,"TERMINAL",0,0,"5101036",,terminal_output +1024,4872000,"TERMINAL",0,0,"61147",,terminal_output +1025,4873047,"TERMINAL",0,0,"72258",,terminal_output +1026,4874094,"TERMINAL",0,0,"83369",,terminal_output +1027,4875162,"TERMINAL",0,0,"944740",,terminal_output +1028,4876184,"TERMINAL",0,0,"305581",,terminal_output +1029,4877307,"TERMINAL",0,0,"16692",,terminal_output +1030,4878331,"TERMINAL",0,0,"277203",,terminal_output +1031,4879369,"TERMINAL",0,0,"39925",,terminal_output +1032,4880383,"TERMINAL",0,0,"5202036",,terminal_output +1033,4881508,"TERMINAL",0,0,"61147",,terminal_output +1034,4882527,"TERMINAL",0,0,"72258",,terminal_output +1035,4883480,"TERMINAL",0,0,"83369",,terminal_output +1036,4884587,"TERMINAL",0,0,"944750",,terminal_output +1037,4885578,"TERMINAL",0,0,"405581",,terminal_output +1038,4886623,"TERMINAL",0,0,"16692",,terminal_output +1039,4887752,"TERMINAL",0,0,"277303",,terminal_output +1040,4888703,"TERMINAL",0,0,"38814",,terminal_output +1041,4889750,"TERMINAL",0,0,"49925",,terminal_output +1042,4890782,"TERMINAL",0,0,"5303036",,terminal_output +1043,4891828,"TERMINAL",0,0,"61147",,terminal_output +1044,4892985,"TERMINAL",0,0,"72258",,terminal_output +1045,4893906,"TERMINAL",0,0,"83369",,terminal_output +1046,4894946,"TERMINAL",0,0,"94474:00",,terminal_output +1047,4895988,"TERMINAL",0,0,"505581",,terminal_output +1048,4897036,"TERMINAL",0,0,"16692",,terminal_output +1049,4898070,"TERMINAL",0,0,"277403",,terminal_output +1050,4899120,"TERMINAL",0,0,"38814",,terminal_output +1051,4900245,"TERMINAL",0,0,"49925",,terminal_output +1052,4901207,"TERMINAL",0,0,"5404036",,terminal_output +1053,4902249,"TERMINAL",0,0,"61147",,terminal_output +1054,4902925,"TERMINAL",0,0,"watch",,terminal_focus +1055,4903298,"TERMINAL",0,0,"72258",,terminal_output +1056,4904369,"TERMINAL",0,0,"844710",,terminal_output +1057,4905424,"TERMINAL",0,0,"2:005581",,terminal_output +1058,4906421,"TERMINAL",0,0,"16692",,terminal_output +1059,4907517,"TERMINAL",0,0,"277503",,terminal_output +1060,4908546,"TERMINAL",0,0,"38814",,terminal_output +1061,4909591,"TERMINAL",0,0,"49925",,terminal_output +1062,4910617,"TERMINAL",0,0,"5505036",,terminal_output +1063,4911671,"TERMINAL",0,0,"61147",,terminal_output +1064,4912739,"TERMINAL",0,0,"72258",,terminal_output +1065,4913764,"TERMINAL",0,0,"83369",,terminal_output +1066,4914808,"TERMINAL",0,0,"944720",,terminal_output +1067,4915928,"TERMINAL",0,0,"105581",,terminal_output +1068,4916944,"TERMINAL",0,0,"16692",,terminal_output +1069,4917946,"TERMINAL",0,0,"2779:003",,terminal_output +1070,4918989,"TERMINAL",0,0,"38814",,terminal_output +1071,4920114,"TERMINAL",0,0,"49925",,terminal_output +1072,4921143,"TERMINAL",0,0,"58:008:0036",,terminal_output +1073,4922159,"TERMINAL",0,0,"61147",,terminal_output +1074,4923202,"TERMINAL",0,0,"72258",,terminal_output +1075,4924247,"TERMINAL",0,0,"83369",,terminal_output +1076,4925333,"TERMINAL",0,0,"944730",,terminal_output +1077,4926359,"TERMINAL",0,0,"216692",,terminal_output +1078,4927482,"TERMINAL",0,0,"277103",,terminal_output +1079,4928535,"TERMINAL",0,0,"38814",,terminal_output +1080,4929517,"TERMINAL",0,0,"49925",,terminal_output +1081,4930574,"TERMINAL",0,0,"5101036",,terminal_output +1082,4931685,"TERMINAL",0,0,"61147",,terminal_output +1083,4932719,"TERMINAL",0,0,"72258",,terminal_output +1084,4933833,"TERMINAL",0,0,"83369",,terminal_output +1085,4934800,"TERMINAL",0,0,"944740",,terminal_output +1086,4935883,"TERMINAL",0,0,"305581",,terminal_output +1087,4936898,"TERMINAL",0,0,"16692",,terminal_output +1088,4937950,"TERMINAL",0,0,"277203",,terminal_output +1089,4938992,"TERMINAL",0,0,"38814",,terminal_output +1090,4940044,"TERMINAL",0,0,"49925",,terminal_output +1091,4941091,"TERMINAL",0,0,"5202036",,terminal_output +1092,4942230,"TERMINAL",0,0,"61147",,terminal_output +1093,4943242,"TERMINAL",0,0,"72258",,terminal_output +1094,4944231,"TERMINAL",0,0,"83369",,terminal_output +1095,4945282,"TERMINAL",0,0,"944750",,terminal_output +1096,4946429,"TERMINAL",0,0,"406692",,terminal_output +1097,4947456,"TERMINAL",0,0,"277303",,terminal_output +1098,4948990,"TERMINAL",0,0,"38814",,terminal_output +1099,4949963,"TERMINAL",0,0,"49925",,terminal_output +1100,4950985,"TERMINAL",0,0,"5303036",,terminal_output +1101,4952056,"TERMINAL",0,0,"61147",,terminal_output +1102,4953107,"TERMINAL",0,0,"72258",,terminal_output +1103,4954214,"TERMINAL",0,0,"83369",,terminal_output +1104,4955191,"TERMINAL",0,0,"94475:00",,terminal_output +1105,4955573,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1106,4956268,"TERMINAL",0,0,"505581",,terminal_output +1107,4957388,"TERMINAL",0,0,"16692",,terminal_output +1108,4958356,"TERMINAL",0,0,"288414",,terminal_output +1109,4959448,"TERMINAL",0,0,"49925",,terminal_output +1110,4959886,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1111,4960557,"TERMINAL",0,0,"5404036",,terminal_output +1112,4961589,"TERMINAL",0,0,"61147",,terminal_output +1113,4962234,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1979,0,"",shellscript,selection_mouse +1114,4962643,"TERMINAL",0,0,"72258",,terminal_output +1115,4963411,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",1979,1,"8",shellscript,content +1116,4963697,"TERMINAL",0,0,"83369",,terminal_output +1117,4964744,"TERMINAL",0,0,"944710",,terminal_output +1118,4965799,"TERMINAL",0,0,"3:005581",,terminal_output +1119,4966850,"TERMINAL",0,0,"16692",,terminal_output +1120,4967932,"TERMINAL",0,0,"277503",,terminal_output +1121,4968952,"TERMINAL",0,0,"38814",,terminal_output +1122,4969999,"TERMINAL",0,0,"49925",,terminal_output +1123,4970049,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +1124,4975386,"TERMINAL",0,0,"scancel 3373207",,terminal_command +1125,4975400,"TERMINAL",0,0,"]633;E;2025-07-24 15:23:10 scancel 3373207;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +1126,4978731,"TERMINAL",0,0,"sync-runner-2",,terminal_command +1127,4978779,"TERMINAL",0,0,"]633;E;2025-07-24 15:23:13 sync-runner-2;406cfb31-2341-454a-afa8-cae7781806b2]633;Csending incremental file list\r\n",,terminal_output +1128,4985410,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch\r\n\r\nsent 27,144 bytes received 167 bytes 3,641.47 bytes/sec\r\ntotal size is 185,131,637 speedup is 6,778.65\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +1129,4989365,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",,terminal_command +1130,4989408,"TERMINAL",0,0,"]633;E;2025-07-24 15:23:24 sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch;406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373213\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +1131,4991642,"TERMINAL",0,0,"queue",,terminal_command +1132,4991690,"TERMINAL",0,0,"]633;E;2025-07-24 15:23:26 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1133,4991758,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 15:23:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373205 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373213 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371237 accelerat train_dy tum_cte0 R 14:49:11\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 14:49:11\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:10:14\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 1:55:37\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +1134,4992771,"TERMINAL",0,0,"72258",,terminal_output +1135,4993816,"TERMINAL",0,0,"83369",,terminal_output +1136,4994970,"TERMINAL",0,0,"944740",,terminal_output +1137,4995915,"TERMINAL",0,0,"305581",,terminal_output +1138,4997017,"TERMINAL",0,0,"16692",,terminal_output +1139,4998009,"TERMINAL",0,0,"277203",,terminal_output +1140,4999064,"TERMINAL",0,0,"38814",,terminal_output +1141,5000193,"TERMINAL",0,0,"49925",,terminal_output +1142,5001213,"TERMINAL",0,0,"5202036",,terminal_output +1143,5002263,"TERMINAL",0,0,"61147",,terminal_output +1144,5003319,"TERMINAL",0,0,"73369",,terminal_output +1145,5004390,"TERMINAL",0,0,"944750",,terminal_output +1146,5005516,"TERMINAL",0,0,"405581",,terminal_output +1147,5006521,"TERMINAL",0,0,"16692",,terminal_output +1148,5007669,"TERMINAL",0,0,"277303",,terminal_output +1149,5008692,"TERMINAL",0,0,"38814",,terminal_output +1150,5009664,"TERMINAL",0,0,"49925",,terminal_output +1151,5010740,"TERMINAL",0,0,"5303036",,terminal_output +1152,5011817,"TERMINAL",0,0,"61147",,terminal_output +1153,5012923,"TERMINAL",0,0,"72258",,terminal_output +1154,5013847,"TERMINAL",0,0,"83369",,terminal_output +1155,5014918,"TERMINAL",0,0,"94476:00",,terminal_output +1156,5015954,"TERMINAL",0,0,"505581",,terminal_output +1157,5017006,"TERMINAL",0,0,"16692",,terminal_output +1158,5018048,"TERMINAL",0,0,"277403",,terminal_output +1159,5019135,"TERMINAL",0,0,"38814",,terminal_output +1160,5020158,"TERMINAL",0,0,"49925",,terminal_output +1161,5021225,"TERMINAL",0,0,"5404036",,terminal_output +1162,5022309,"TERMINAL",0,0,"61147",,terminal_output +1163,5023337,"TERMINAL",0,0,"73369",,terminal_output +1164,5024384,"TERMINAL",0,0,"944710",,terminal_output +1165,5025088,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +1166,5029837,"TERMINAL",0,0,"cd ../jafar",,terminal_command +1167,5030488,"TERMINAL",0,0,"ls",,terminal_command +1168,5030539,"TERMINAL",0,0,"]633;E;2025-07-24 15:24:05 ls;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1169,5030742,"TERMINAL",0,0,"data frames LICENSE README.md scripts_horeka utils\r\ndebug generate_dataset.py logs read_tf_record.py slurm wandb\r\ndiff.diff generation_1753196800.0453017.gif models requirements-franz.txt tests weekend-job-requeuer.sh\r\ndiff.log genie.py overfit_dir requirements.txt train_dynamics.py weekend-job-starter.sh\r\nframe-knoms.png gifs overfit_dir.zip sample.py train_lam.py\r\nframe.png input_pipeline __pycache__ scripts_cremers train_tokenizer.py\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1170,5189434,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1171,5193272,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1172,5195030,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1173,5195908,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",844,0,"",shellscript,selection_mouse +1174,5195913,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",843,0,"",shellscript,selection_command +1175,5196430,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",845,0,"",shellscript,selection_mouse +1176,5197469,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1177,5198567,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",845,0,"",shellscript,selection_mouse +1178,5199814,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1179,5200810,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1180,5201592,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1181,5202167,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1182,5202785,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1183,5203368,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1184,5204330,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"",shellscript,tab +1185,5204880,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +1186,5209800,"TERMINAL",0,0,"bash",,terminal_focus +1187,5432413,"TERMINAL",0,0,"bash",,terminal_focus +1188,5442874,"train_dynamics.py",0,0,"",python,tab +1189,5711074,"TERMINAL",0,0,"bash",,terminal_focus +1190,5713480,"TERMINAL",0,0,"queue",,terminal_command +1191,5713566,"TERMINAL",0,0,"]633;E;2025-07-24 15:35:28 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 15:35:28 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373205 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373213 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371237 accelerat train_dy tum_cte0 R 15:01:13\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 15:01:13\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:22:16\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 2:07:39\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +1192,5714644,"TERMINAL",0,0,"944740",,terminal_output +1193,5715649,"TERMINAL",0,0,"305581",,terminal_output +1194,5716792,"TERMINAL",0,0,"16692",,terminal_output +1195,5717740,"TERMINAL",0,0,"277203",,terminal_output +1196,5718845,"TERMINAL",0,0,"38814",,terminal_output +1197,5719869,"TERMINAL",0,0,"49925",,terminal_output +1198,5720898,"TERMINAL",0,0,"5202036",,terminal_output +1199,5721922,"TERMINAL",0,0,"61147",,terminal_output +1200,5722966,"TERMINAL",0,0,"72258",,terminal_output +1201,5724015,"TERMINAL",0,0,"83369",,terminal_output +1202,5725095,"TERMINAL",0,0,"944750",,terminal_output +1203,5726099,"TERMINAL",0,0,"405581",,terminal_output +1204,5727237,"TERMINAL",0,0,"16692",,terminal_output +1205,5728185,"TERMINAL",0,0,"277303",,terminal_output +1206,5729285,"TERMINAL",0,0,"38814",,terminal_output +1207,5730315,"TERMINAL",0,0,"49925",,terminal_output +1208,5731437,"TERMINAL",0,0,"5313147",,terminal_output +1209,5732382,"TERMINAL",0,0,"72258",,terminal_output +1210,5733425,"TERMINAL",0,0,"83369",,terminal_output +1211,5734473,"TERMINAL",0,0,"94478:00",,terminal_output +1212,5735522,"TERMINAL",0,0,"505581",,terminal_output +1213,5736559,"TERMINAL",0,0,"16692",,terminal_output +1214,5737610,"TERMINAL",0,0,"277403",,terminal_output +1215,5738712,"TERMINAL",0,0,"38814",,terminal_output +1216,5739703,"TERMINAL",0,0,"49925",,terminal_output +1217,5740781,"TERMINAL",0,0,"5404036",,terminal_output +1218,5741788,"TERMINAL",0,0,"61147",,terminal_output +1219,5742830,"TERMINAL",0,0,"72258",,terminal_output +1220,5743925,"TERMINAL",0,0,"83369",,terminal_output +1221,5744923,"TERMINAL",0,0,"944710",,terminal_output +1222,5746080,"TERMINAL",0,0,"6:005581",,terminal_output +1223,5747028,"TERMINAL",0,0,"16692",,terminal_output +1224,5748132,"TERMINAL",0,0,"277503",,terminal_output +1225,5749152,"TERMINAL",0,0,"38814",,terminal_output +1226,5750181,"TERMINAL",0,0,"49925",,terminal_output +1227,5751301,"TERMINAL",0,0,"5505036",,terminal_output +1228,5752359,"TERMINAL",0,0,"61147",,terminal_output +1229,5753298,"TERMINAL",0,0,"72258",,terminal_output +1230,5754401,"TERMINAL",0,0,"844720",,terminal_output +1231,5755410,"TERMINAL",0,0,"105581",,terminal_output +1232,5756451,"TERMINAL",0,0,"16692",,terminal_output +1233,5757191,"TERMINAL",0,0,"bash",,terminal_focus +1234,5757508,"TERMINAL",0,0,"2773:003",,terminal_output +1235,5758561,"TERMINAL",0,0,"38814",,terminal_output +1236,5759738,"TERMINAL",0,0,"49925",,terminal_output +1237,5760720,"TERMINAL",0,0,"52:002:0036",,terminal_output +1238,5761750,"TERMINAL",0,0,"61147",,terminal_output +1239,5762756,"TERMINAL",0,0,"72258",,terminal_output +1240,5763786,"TERMINAL",0,0,"git status",,terminal_command +1241,5763838,"TERMINAL",0,0,"]633;E;2025-07-24 15:36:18 git status;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1242,5763839,"TERMINAL",0,0,"83369",,terminal_output +1243,5764330,"TERMINAL",0,0,"On branch new-arch-sampling\r\n",,terminal_output +1244,5764332,"TERMINAL",0,0,"Your branch is ahead of 'origin/new-arch-sampling' by 8 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\tmodified: train_dynamics.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1245,5764871,"TERMINAL",0,0,"944730",,terminal_output +1246,5765939,"TERMINAL",0,0,"205581",,terminal_output +1247,5766970,"TERMINAL",0,0,"16692",,terminal_output +1248,5767998,"TERMINAL",0,0,"277103",,terminal_output +1249,5769053,"TERMINAL",0,0,"38814",,terminal_output +1250,5770146,"TERMINAL",0,0,"49925",,terminal_output +1251,5771147,"TERMINAL",0,0,"5101036",,terminal_output +1252,5772280,"TERMINAL",0,0,"61147",,terminal_output +1253,5773324,"TERMINAL",0,0,"72258",,terminal_output +1254,5774379,"TERMINAL",0,0,"83369",,terminal_output +1255,5775396,"TERMINAL",0,0,"955841",,terminal_output +1256,5776402,"TERMINAL",0,0,"316692",,terminal_output +1257,5777447,"TERMINAL",0,0,"277203",,terminal_output +1258,5778490,"TERMINAL",0,0,"38814",,terminal_output +1259,5778815,"TERMINAL",0,0,"git commit -am ""final hacky dev branch""",,terminal_command +1260,5778865,"TERMINAL",0,0,"]633;E;2025-07-24 15:36:33 git commit -am ""final hacky dev branch"";406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1261,5779068,"TERMINAL",0,0,"[new-arch-sampling 91ecb1f] final hacky dev branch\r\n 3 files changed, 1 insertion(+), 9 deletions(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1262,5779577,"TERMINAL",0,0,"49925",,terminal_output +1263,5780576,"TERMINAL",0,0,"5202036",,terminal_output +1264,5781626,"TERMINAL",0,0,"61147",,terminal_output +1265,5781861,"TERMINAL",0,0,"git push",,terminal_command +1266,5781915,"TERMINAL",0,0,"]633;E;2025-07-24 15:36:36 git push;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1267,5782694,"TERMINAL",0,0,"72258",,terminal_output +1268,5783280,"TERMINAL",0,0,"Enumerating objects: 38, done.\r\nCounting objects: 2% (1/38)\rCounting objects: 5% (2/38)\rCounting objects: 7% (3/38)\rCounting objects: 10% (4/38)\rCounting objects: 13% (5/38)\rCounting objects: 15% (6/38)\rCounting objects: 18% (7/38)\rCounting objects: 21% (8/38)\rCounting objects: 23% (9/38)\rCounting objects: 26% (10/38)\rCounting objects: 28% (11/38)\rCounting objects: 31% (12/38)\rCounting objects: 34% (13/38)\rCounting objects: 36% (14/38)\rCounting objects: 39% (15/38)\rCounting objects: 42% (16/38)\rCounting objects: 44% (17/38)\rCounting objects: 47% (18/38)\rCounting objects: 50% (19/38)\rCounting objects: 52% (20/38)\rCounting objects: 55% (21/38)\rCounting objects: 57% (22/38)\rCounting objects: 60% (23/38)\rCounting objects: 63% (24/38)\rCounting objects: 65% (25/38)\rCounting objects: 68% (26/38)\rCounting objects: 71% (27/38)\rCounting objects: 73% (28/38)\rCounting objects: 76% (29/38)\rCounting objects: 78% (30/38)\rCounting objects: 81% (31/38)\rCounting objects: 84% (32/38)\rCounting objects: 86% (33/38)\rCounting objects: 89% (34/38)\rCounting objects: 92% (35/38)\rCounting objects: 94% (36/38)\rCounting objects: 97% (37/38)\rCounting objects: 100% (38/38)\rCounting objects: 100% (38/38), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 5% (1/20)\rCompressing objects: 10% (2/20)\rCompressing objects: 15% (3/20)\rCompressing objects: 20% (4/20)\rCompressing objects: 25% (5/20)\rCompressing objects: 30% (6/20)\rCompressing objects: 35% (7/20)\rCompressing objects: 40% (8/20)\rCompressing objects: 45% (9/20)\rCompressing objects: 50% (10/20)\rCompressing objects: 55% (11/20)\rCompressing objects: 60% (12/20)\rCompressing objects: 65% (13/20)\rCompressing objects: 70% (14/20)\rCompressing objects: 75% (15/20)\rCompressing objects: 80% (16/20)\rCompressing objects: 85% (17/20)\rCompressing objects: 90% (18/20)\rCompressing objects: 95% (19/20)\rCompressing objects: 100% (20/20)\rCompressing objects: 100% (20/20), done.\r\nWriting objects: 5% (1/20)\rWriting objects: 10% (2/20)\rWriting objects: 15% (3/20)\rWriting objects: 20% (4/20)\rWriting objects: 25% (5/20)\rWriting objects: 30% (6/20)\rWriting objects: 35% (7/20)\rWriting objects: 40% (8/20)\rWriting objects: 45% (9/20)\rWriting objects: 55% (11/20)\rWriting objects: 60% (12/20)\rWriting objects: 70% (14/20)\rWriting objects: 75% (15/20)\rWriting objects: 80% (16/20)\rWriting objects: 85% (17/20)\rWriting objects: 90% (18/20)\rWriting objects: 95% (19/20)\rWriting objects: 100% (20/20)\rWriting objects: 100% (20/20), 2.63 KiB | 674.00 KiB/s, done.\r\nTotal 20 (delta 15), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1269,5783387,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/15)\rremote: Resolving deltas: 6% (1/15)\rremote: Resolving deltas: 13% (2/15)\rremote: Resolving deltas: 20% (3/15)\rremote: Resolving deltas: 26% (4/15)\rremote: Resolving deltas: 33% (5/15)\rremote: Resolving deltas: 40% (6/15)\rremote: Resolving deltas: 46% (7/15)\rremote: Resolving deltas: 53% (8/15)\rremote: Resolving deltas: 60% (9/15)\rremote: Resolving deltas: 66% (10/15)\rremote: Resolving deltas: 73% (11/15)\rremote: Resolving deltas: 80% (12/15)\rremote: Resolving deltas: 86% (13/15)\rremote: Resolving deltas: 93% (14/15)\rremote: Resolving deltas: 100% (15/15)\rremote: Resolving deltas: 100% (15/15), completed with 9 local objects.\r\n",,terminal_output +1270,5783677,"TERMINAL",0,0,"To github.com:p-doom/jafar.git\r\n 52cd7c0..91ecb1f new-arch-sampling -> new-arch-sampling\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1271,5783758,"TERMINAL",0,0,"83369",,terminal_output +1272,5784788,"TERMINAL",0,0,"944750",,terminal_output +1273,5785824,"TERMINAL",0,0,"405581",,terminal_output +1274,5786885,"TERMINAL",0,0,"16692",,terminal_output +1275,5787911,"TERMINAL",0,0,"277303",,terminal_output +1276,5788989,"TERMINAL",0,0,"38814",,terminal_output +1277,5790039,"TERMINAL",0,0,"49925",,terminal_output +1278,5791085,"TERMINAL",0,0,"5303036",,terminal_output +1279,5792107,"TERMINAL",0,0,"61147",,terminal_output +1280,5793141,"TERMINAL",0,0,"72258",,terminal_output +1281,5794240,"TERMINAL",0,0,"83369",,terminal_output +1282,5795250,"TERMINAL",0,0,"94479:00",,terminal_output +1283,5796373,"TERMINAL",0,0,"505581",,terminal_output +1284,5797340,"TERMINAL",0,0,"177403",,terminal_output +1285,5798426,"TERMINAL",0,0,"38814",,terminal_output +1286,5799438,"TERMINAL",0,0,"49925",,terminal_output +1287,5800468,"TERMINAL",0,0,"5404036",,terminal_output +1288,5801517,"TERMINAL",0,0,"61147",,terminal_output +1289,5802560,"TERMINAL",0,0,"72258",,terminal_output +1290,5803620,"TERMINAL",0,0,"83369",,terminal_output +1291,5804694,"TERMINAL",0,0,"944710",,terminal_output +1292,5805700,"TERMINAL",0,0,"7:005581",,terminal_output +1293,5806787,"TERMINAL",0,0,"16692",,terminal_output +1294,5807811,"TERMINAL",0,0,"277503",,terminal_output +1295,5808873,"TERMINAL",0,0,"38814",,terminal_output +1296,5809903,"TERMINAL",0,0,"49925",,terminal_output +1297,5811001,"TERMINAL",0,0,"5505036",,terminal_output +1298,5811996,"TERMINAL",0,0,"61147",,terminal_output +1299,5812524,"TERMINAL",0,0,"watch",,terminal_focus +1300,5813049,"TERMINAL",0,0,"72258",,terminal_output +1301,5814178,"TERMINAL",0,0,"83369",,terminal_output +1302,5815132,"TERMINAL",0,0,"944720",,terminal_output +1303,5816329,"TERMINAL",0,0,"106692",,terminal_output +1304,5817357,"TERMINAL",0,0,"2774:003",,terminal_output +1305,5818412,"TERMINAL",0,0,"38814",,terminal_output +1306,5819445,"TERMINAL",0,0,"49925",,terminal_output +1307,5820493,"TERMINAL",0,0,"53:003:0036",,terminal_output +1308,5820912,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1309,5826049,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +1310,5826078,"TERMINAL",0,0,"]633;E;2025-07-24 15:37:20 source .venv/bin/activate;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1311,5827009,"TERMINAL",0,0,"bash",,terminal_focus +1312,5829000,"TERMINAL",0,0,"smi",,terminal_command +1313,5829051,"TERMINAL",0,0,"]633;E;2025-07-24 15:37:23 smi;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: nvidia-smihkn1991.localdomain: Thu Jul 24 15:37:23 2025sh: line 1: nvidia-smi: command not found",,terminal_output +1314,5829738,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1315,5833696,"TERMINAL",0,0,"git checkout main",,terminal_command +1316,5833747,"TERMINAL",0,0,"]633;E;2025-07-24 15:37:28 git checkout main;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1317,5833854,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1318,5835781,"",0,0,"Switched from branch 'new-arch-sampling' to 'main'",,git_branch_checkout +1319,5836506,"TERMINAL",0,0,"git pull",,terminal_command +1320,5836558,"TERMINAL",0,0,"]633;E;2025-07-24 15:37:31 git pull;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1321,5838384,"TERMINAL",0,0,"remote: Enumerating objects: 13, done.\r\nremote: Counting objects: 7% (1/13)\rremote: Counting objects: 15% (2/13)\rremote: Counting objects: 23% (3/13)\rremote: Counting objects: 30% (4/13)\rremote: Counting objects: 38% (5/13)\rremote: Counting objects: 46% (6/13)\rremote: Counting objects: 53% (7/13)\rremote: Counting objects: 61% (8/13)\rremote: Counting objects: 69% (9/13)\rremote: Counting objects: 76% (10/13)\rremote: Counting objects: 84% (11/13)\rremote: Counting objects: 92% (12/13)\rremote: Counting objects: 100% (13/13)\rremote: Counting objects: 100% (13/13), done.\r\nremote: Compressing objects: 12% (1/8)\rremote: Compressing objects: 25% (2/8)\rremote: Compressing objects: 37% (3/8)\rremote: Compressing objects: 50% (4/8)\rremote: Compressing objects: 62% (5/8)\rremote: Compressing objects: 75% (6/8)\rremote: Compressing objects: 87% (7/8)\rremote: Compressing objects: 100% (8/8)\rremote: Compressing objects: 100% (8/8), done.\r\nremote: Total 13 (delta 5), reused 13 (delta 5), pack-reused 0 (from 0)\r\nUnpacking objects: 7% (1/13)\rUnpacking objects: 15% (2/13)\rUnpacking objects: 23% (3/13)\r",,terminal_output +1322,5838492,"TERMINAL",0,0,"Unpacking objects: 30% (4/13)\rUnpacking objects: 38% (5/13)\rUnpacking objects: 46% (6/13)\rUnpacking objects: 53% (7/13)\rUnpacking objects: 61% (8/13)\rUnpacking objects: 69% (9/13)\rUnpacking objects: 76% (10/13)\rUnpacking objects: 84% (11/13)\rUnpacking objects: 92% (12/13)\rUnpacking objects: 100% (13/13)\rUnpacking objects: 100% (13/13), 1.75 KiB | 14.00 KiB/s, done.\r\n",,terminal_output +1323,5838657,"TERMINAL",0,0,"From github.com:p-doom/jafar\r\n bbeba53..444c585 dev-dynamics_action_ablation_gt_actions -> origin/dev-dynamics_action_ablation_gt_actions\r\n 3020c5c..c179a45 feat/actions_in_dummy_data -> origin/feat/actions_in_dummy_data\r\n",,terminal_output +1324,5838729,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1325,6058078,"TERMINAL",0,0,"git checkout -b ""causal-st-transformer""",,terminal_command +1326,6058117,"TERMINAL",0,0,"]633;E;2025-07-24 15:41:12 git checkout -b ""causal-st-transformer"";406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1327,6058131,"TERMINAL",0,0,"Switched to a new branch 'causal-st-transformer'\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1328,6060797,"",0,0,"Switched from branch 'main' to 'causal-st-transformer'",,git_branch_checkout +1329,6063346,"TERMINAL",0,0,"git status",,terminal_command +1330,6063389,"TERMINAL",0,0,"]633;E;2025-07-24 15:41:18 git status;406cfb31-2341-454a-afa8-cae7781806b2]633;COn branch causal-st-transformer\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1331,6075666,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +1332,6081369,"utils/nn.py",3101,0,"",python,selection_mouse +1333,6081518,"utils/nn.py",3095,13,"STTransformer",python,selection_mouse +1334,6093764,"utils/nn.py",3108,0,"",python,selection_command +1335,6093859,"utils/nn.py",3095,0,"Bidirectional",python,content +1336,6131982,"utils/nn.py",1046,0,"",python,selection_mouse +1337,6132122,"utils/nn.py",1046,1," ",python,selection_mouse +1338,6132612,"utils/nn.py",1021,0,"",python,selection_mouse +1339,6132743,"utils/nn.py",1018,7,"STBlock",python,selection_mouse +1340,6139310,"utils/nn.py",1025,0,"",python,selection_command +1341,6139327,"utils/nn.py",3959,0,"Bidirectional",python,content +1342,6139328,"utils/nn.py",1018,0,"Bidirectional",python,content +1343,6155968,"utils/nn.py",3257,0,"",python,selection_mouse +1344,6155970,"utils/nn.py",3256,0,"",python,selection_command +1345,6156639,"utils/nn.py",3335,0,"",python,selection_mouse +1346,6156641,"utils/nn.py",3334,0,"",python,selection_command +1347,6165744,"utils/nn.py",3643,0,"",python,selection_mouse +1348,6165746,"utils/nn.py",3642,0,"",python,selection_command +1349,6248436,"utils/nn.py",2549,0,"",python,selection_mouse +1350,6268022,"utils/nn.py",1050,0,"",python,selection_command +1351,6269640,"utils/nn.py",4471,0,"",python,selection_mouse +1352,6269807,"utils/nn.py",4470,1,"\n",python,selection_mouse +1353,6269867,"utils/nn.py",3104,1367,"ass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1354,6269867,"utils/nn.py",3103,1368,"lass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1355,6269871,"utils/nn.py",3101,1370,"\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1356,6269936,"utils/nn.py",3100,1371,"\n\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1357,6270004,"utils/nn.py",1012,3459,"class BidirectionalSTBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1358,6270121,"utils/nn.py",1011,3460,"\nclass BidirectionalSTBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n",python,selection_mouse +1359,6271654,"utils/nn.py",1011,0,"",python,selection_command +1360,6272973,"utils/nn.py",1011,0,"\nclass BidirectionalSTBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\n",python,content +1361,6272984,"utils/nn.py",1011,0,"",python,selection_command +1362,6274095,"utils/nn.py",1012,0,"",python,selection_command +1363,6274559,"utils/nn.py",1013,0,"",python,selection_command +1364,6274796,"utils/nn.py",1014,0,"",python,selection_command +1365,6274966,"utils/nn.py",1015,0,"",python,selection_command +1366,6275102,"utils/nn.py",1016,0,"",python,selection_command +1367,6275245,"utils/nn.py",1017,0,"",python,selection_command +1368,6275401,"utils/nn.py",1018,0,"",python,selection_command +1369,6276066,"utils/nn.py",1018,1,"",python,content +1370,6276264,"utils/nn.py",1018,1,"",python,content +1371,6276485,"utils/nn.py",1018,1,"",python,content +1372,6276622,"utils/nn.py",1018,1,"",python,content +1373,6276804,"utils/nn.py",1018,1,"",python,content +1374,6277008,"utils/nn.py",1018,1,"",python,content +1375,6277372,"utils/nn.py",1018,1,"",python,content +1376,6277563,"utils/nn.py",1018,1,"",python,content +1377,6277742,"utils/nn.py",1018,1,"",python,content +1378,6277937,"utils/nn.py",1018,1,"",python,content +1379,6278117,"utils/nn.py",1018,1,"",python,content +1380,6278336,"utils/nn.py",1018,1,"",python,content +1381,6278528,"utils/nn.py",1018,1,"",python,content +1382,6279341,"utils/nn.py",1018,0,"C",python,content +1383,6279343,"utils/nn.py",1019,0,"",python,selection_keyboard +1384,6279419,"utils/nn.py",1019,0,"a",python,content +1385,6279420,"utils/nn.py",1020,0,"",python,selection_keyboard +1386,6279544,"utils/nn.py",1020,0,"u",python,content +1387,6279545,"utils/nn.py",1021,0,"",python,selection_keyboard +1388,6279733,"utils/nn.py",1021,0,"s",python,content +1389,6279734,"utils/nn.py",1022,0,"",python,selection_keyboard +1390,6279805,"utils/nn.py",1022,0,"a",python,content +1391,6279806,"utils/nn.py",1023,0,"",python,selection_keyboard +1392,6279901,"utils/nn.py",1023,0,"l",python,content +1393,6279902,"utils/nn.py",1024,0,"",python,selection_keyboard +1394,6280306,"utils/nn.py",1023,0,"",python,selection_command +1395,6282319,"utils/nn.py",3094,0,"",python,selection_mouse +1396,6283480,"utils/nn.py",3101,0,"",python,selection_mouse +1397,6284705,"utils/nn.py",3101,1,"",python,content +1398,6284861,"utils/nn.py",3101,1,"",python,content +1399,6285030,"utils/nn.py",3101,1,"",python,content +1400,6285212,"utils/nn.py",3101,1,"",python,content +1401,6285381,"utils/nn.py",3101,1,"",python,content +1402,6285566,"utils/nn.py",3101,1,"",python,content +1403,6285752,"utils/nn.py",3101,1,"",python,content +1404,6285933,"utils/nn.py",3101,1,"",python,content +1405,6286136,"utils/nn.py",3101,1,"",python,content +1406,6286322,"utils/nn.py",3101,1,"",python,content +1407,6286524,"utils/nn.py",3101,1,"",python,content +1408,6286717,"utils/nn.py",3101,1,"",python,content +1409,6286938,"utils/nn.py",3101,1,"",python,content +1410,6287424,"utils/nn.py",3101,0,"C",python,content +1411,6287427,"utils/nn.py",3102,0,"",python,selection_keyboard +1412,6287607,"utils/nn.py",3102,0,"a",python,content +1413,6287608,"utils/nn.py",3103,0,"",python,selection_keyboard +1414,6287711,"utils/nn.py",3103,0,"u",python,content +1415,6287712,"utils/nn.py",3104,0,"",python,selection_keyboard +1416,6287828,"utils/nn.py",3104,0,"s",python,content +1417,6287829,"utils/nn.py",3105,0,"",python,selection_keyboard +1418,6287998,"utils/nn.py",3105,0,"a",python,content +1419,6288001,"utils/nn.py",3106,0,"",python,selection_keyboard +1420,6288054,"utils/nn.py",3106,0,"l",python,content +1421,6288055,"utils/nn.py",3107,0,"",python,selection_keyboard +1422,6288509,"utils/nn.py",3106,0,"",python,selection_command +1423,6295003,"utils/nn.py",4458,0,"",python,selection_mouse +1424,6295940,"utils/nn.py",4458,1,"",python,content +1425,6296076,"utils/nn.py",4457,0,"",python,selection_command +1426,6296710,"utils/nn.py",4457,1,"",python,content +1427,6298336,"utils/nn.py",4457,0,"\n",python,content +1428,6298358,"utils/nn.py",4457,0,"",python,selection_command +1429,6302669,"utils/nn.py",3958,0,"",python,selection_mouse +1430,6303414,"utils/nn.py",3958,1,"",python,content +1431,6303638,"utils/nn.py",3958,1,"",python,content +1432,6303812,"utils/nn.py",3958,1,"",python,content +1433,6304289,"utils/nn.py",3958,1,"",python,content +1434,6304466,"utils/nn.py",3958,1,"",python,content +1435,6304650,"utils/nn.py",3958,1,"",python,content +1436,6304819,"utils/nn.py",3958,1,"",python,content +1437,6305009,"utils/nn.py",3958,1,"",python,content +1438,6305222,"utils/nn.py",3958,1,"",python,content +1439,6305411,"utils/nn.py",3958,1,"",python,content +1440,6305638,"utils/nn.py",3958,1,"",python,content +1441,6305782,"utils/nn.py",3958,1,"",python,content +1442,6305999,"utils/nn.py",3958,1,"",python,content +1443,6306581,"utils/nn.py",3958,0,"C",python,content +1444,6306583,"utils/nn.py",3959,0,"",python,selection_keyboard +1445,6306759,"utils/nn.py",3959,0,"a",python,content +1446,6306762,"utils/nn.py",3960,0,"",python,selection_keyboard +1447,6306849,"utils/nn.py",3960,0,"u",python,content +1448,6306851,"utils/nn.py",3961,0,"",python,selection_keyboard +1449,6306967,"utils/nn.py",3961,0,"s",python,content +1450,6306969,"utils/nn.py",3962,0,"",python,selection_keyboard +1451,6307107,"utils/nn.py",3962,0,"a",python,content +1452,6307109,"utils/nn.py",3963,0,"",python,selection_keyboard +1453,6307186,"utils/nn.py",3963,0,"l",python,content +1454,6307187,"utils/nn.py",3964,0,"",python,selection_keyboard +1455,6307532,"utils/nn.py",3963,0,"",python,selection_command +1456,6313684,"utils/nn.py",4206,0,"",python,selection_mouse +1457,6313700,"utils/nn.py",4205,0,"",python,selection_command +1458,6314224,"utils/nn.py",4241,0,"",python,selection_mouse +1459,6316251,"utils/nn.py",3132,0,"",python,selection_command +1460,6321009,"utils/nn.py",1787,0,"",python,selection_mouse +1461,6322136,"utils/nn.py",1787,5,"",python,content +1462,6322157,"utils/nn.py",1786,0,"",python,selection_command +1463,6323146,"utils/nn.py",1787,0,"",python,selection_command +1464,6323755,"utils/nn.py",1787,0,"T",python,content +1465,6323756,"utils/nn.py",1788,0,"",python,selection_keyboard +1466,6323927,"utils/nn.py",1788,0,"r",python,content +1467,6323929,"utils/nn.py",1789,0,"",python,selection_keyboard +1468,6324013,"utils/nn.py",1789,0,"u",python,content +1469,6324015,"utils/nn.py",1790,0,"",python,selection_keyboard +1470,6324092,"utils/nn.py",1790,0,"e",python,content +1471,6324093,"utils/nn.py",1791,0,"",python,selection_keyboard +1472,6324375,"utils/nn.py",1790,0,"",python,selection_command +1473,6324957,"utils/nn.py",1805,0,"",python,selection_command +1474,6325108,"utils/nn.py",1818,0,"",python,selection_command +1475,6326158,"utils/nn.py",1818,0,"m",python,content +1476,6326159,"utils/nn.py",1819,0,"",python,selection_keyboard +1477,6326285,"utils/nn.py",1819,0," ",python,content +1478,6326287,"utils/nn.py",1820,0,"",python,selection_keyboard +1479,6326865,"utils/nn.py",1819,1,"",python,content +1480,6327229,"utils/nn.py",1818,1,"",python,content +1481,6327687,"utils/nn.py",1818,0,",",python,content +1482,6327688,"utils/nn.py",1819,0,"",python,selection_keyboard +1483,6327753,"utils/nn.py",1819,0," ",python,content +1484,6327754,"utils/nn.py",1820,0,"",python,selection_keyboard +1485,6327903,"utils/nn.py",1820,0,"m",python,content +1486,6327905,"utils/nn.py",1821,0,"",python,selection_keyboard +1487,6328125,"utils/nn.py",1821,0,"a",python,content +1488,6328127,"utils/nn.py",1822,0,"",python,selection_keyboard +1489,6328291,"utils/nn.py",1822,0,"s",python,content +1490,6328293,"utils/nn.py",1823,0,"",python,selection_keyboard +1491,6328380,"utils/nn.py",1823,0,"k",python,content +1492,6328381,"utils/nn.py",1824,0,"",python,selection_keyboard +1493,6329815,"utils/nn.py",1823,0,"",python,selection_command +1494,6332092,"utils/nn.py",2578,0,"",python,selection_mouse +1495,6332917,"utils/nn.py",2082,0,"",python,selection_mouse +1496,6333448,"utils/nn.py",2074,0,"",python,selection_command +1497,6334066,"utils/nn.py",2074,0,"t",python,content +1498,6334068,"utils/nn.py",2075,0,"",python,selection_keyboard +1499,6334263,"utils/nn.py",2075,0,"e",python,content +1500,6334264,"utils/nn.py",2076,0,"",python,selection_keyboard +1501,6334368,"utils/nn.py",2076,0,"m",python,content +1502,6334369,"utils/nn.py",2077,0,"",python,selection_keyboard +1503,6334500,"utils/nn.py",2077,0,"p",python,content +1504,6334501,"utils/nn.py",2078,0,"",python,selection_keyboard +1505,6334591,"utils/nn.py",2078,0,"o",python,content +1506,6334592,"utils/nn.py",2079,0,"",python,selection_keyboard +1507,6334890,"utils/nn.py",2079,0,"r",python,content +1508,6334891,"utils/nn.py",2080,0,"",python,selection_keyboard +1509,6335058,"utils/nn.py",2080,0,"a",python,content +1510,6335059,"utils/nn.py",2081,0,"",python,selection_keyboard +1511,6335074,"utils/nn.py",2081,0,"l",python,content +1512,6335075,"utils/nn.py",2082,0,"",python,selection_keyboard +1513,6335468,"utils/nn.py",2082,0,"_",python,content +1514,6335469,"utils/nn.py",2083,0,"",python,selection_keyboard +1515,6335820,"utils/nn.py",2082,0,"",python,selection_command +1516,6336483,"utils/nn.py",2064,0,"",python,selection_command +1517,6336974,"utils/nn.py",2039,0,"",python,selection_command +1518,6337041,"utils/nn.py",1997,0,"",python,selection_command +1519,6337042,"utils/nn.py",1971,0,"",python,selection_command +1520,6337099,"utils/nn.py",1927,0,"",python,selection_command +1521,6337100,"utils/nn.py",1898,0,"",python,selection_command +1522,6337117,"utils/nn.py",1861,0,"",python,selection_command +1523,6337147,"utils/nn.py",1844,0,"",python,selection_command +1524,6337209,"utils/nn.py",1842,0,"",python,selection_command +1525,6337212,"utils/nn.py",1823,0,"",python,selection_command +1526,6337230,"utils/nn.py",1805,0,"",python,selection_command +1527,6337263,"utils/nn.py",1751,0,"",python,selection_command +1528,6337297,"utils/nn.py",1698,0,"",python,selection_command +1529,6337366,"utils/nn.py",1668,0,"",python,selection_command +1530,6337367,"utils/nn.py",1626,0,"",python,selection_command +1531,6337398,"utils/nn.py",1587,0,"",python,selection_command +1532,6337435,"utils/nn.py",1552,0,"",python,selection_command +1533,6337449,"utils/nn.py",1514,0,"",python,selection_command +1534,6337477,"utils/nn.py",1479,0,"",python,selection_command +1535,6337737,"utils/nn.py",1461,0,"",python,selection_command +1536,6338073,"utils/nn.py",1462,0,"\n temporal_causal_mask = jnp.tri(z.shape[-2])",python,content +1537,6338095,"utils/nn.py",1471,0,"",python,selection_command +1538,6339139,"utils/nn.py",1471,1,"",python,content +1539,6339318,"utils/nn.py",1471,1,"",python,content +1540,6339474,"utils/nn.py",1471,1,"",python,content +1541,6339616,"utils/nn.py",1471,1,"",python,content +1542,6339819,"utils/nn.py",1471,1,"",python,content +1543,6340007,"utils/nn.py",1471,1,"",python,content +1544,6340189,"utils/nn.py",1471,1,"",python,content +1545,6340409,"utils/nn.py",1471,1,"",python,content +1546,6341336,"utils/nn.py",1471,0,"s",python,content +1547,6341337,"utils/nn.py",1472,0,"",python,selection_keyboard +1548,6341432,"utils/nn.py",1472,0,"p",python,content +1549,6341433,"utils/nn.py",1473,0,"",python,selection_keyboard +1550,6341578,"utils/nn.py",1473,0,"a",python,content +1551,6341578,"utils/nn.py",1474,0,"",python,selection_keyboard +1552,6341781,"utils/nn.py",1474,0,"t",python,content +1553,6341783,"utils/nn.py",1475,0,"",python,selection_keyboard +1554,6341863,"utils/nn.py",1475,0,"i",python,content +1555,6341865,"utils/nn.py",1476,0,"",python,selection_keyboard +1556,6342024,"utils/nn.py",1476,0,"a",python,content +1557,6342026,"utils/nn.py",1477,0,"",python,selection_keyboard +1558,6342066,"utils/nn.py",1477,0,"l",python,content +1559,6342067,"utils/nn.py",1478,0,"",python,selection_keyboard +1560,6342375,"utils/nn.py",1477,0,"",python,selection_command +1561,6343034,"utils/nn.py",1528,0,"",python,selection_command +1562,6343176,"utils/nn.py",1563,0,"",python,selection_command +1563,6343347,"utils/nn.py",1601,0,"",python,selection_command +1564,6343621,"utils/nn.py",1636,0,"",python,selection_command +1565,6343785,"utils/nn.py",1675,0,"",python,selection_command +1566,6343930,"utils/nn.py",1717,0,"",python,selection_command +1567,6344091,"utils/nn.py",1747,0,"",python,selection_command +1568,6344241,"utils/nn.py",1800,0,"",python,selection_command +1569,6344385,"utils/nn.py",1856,0,"",python,selection_command +1570,6344552,"utils/nn.py",1872,0,"",python,selection_command +1571,6344781,"utils/nn.py",1873,0,"",python,selection_command +1572,6344944,"utils/nn.py",1874,0,"",python,selection_command +1573,6345101,"utils/nn.py",1875,0,"",python,selection_command +1574,6346023,"utils/nn.py",1875,0,"=",python,content +1575,6346024,"utils/nn.py",1876,0,"",python,selection_keyboard +1576,6346311,"utils/nn.py",1876,0,"s",python,content +1577,6346312,"utils/nn.py",1877,0,"",python,selection_keyboard +1578,6346394,"utils/nn.py",1877,0,"p",python,content +1579,6346395,"utils/nn.py",1878,0,"",python,selection_keyboard +1580,6346517,"utils/nn.py",1878,0,"a",python,content +1581,6346519,"utils/nn.py",1879,0,"",python,selection_keyboard +1582,6348461,"utils/nn.py",1876,3,"spatial_causal_mask",python,content +1583,6349108,"utils/nn.py",1894,0,"",python,selection_command +1584,6349273,"utils/nn.py",1913,0,"",python,selection_command +1585,6349492,"utils/nn.py",1915,0,"",python,selection_command +1586,6350008,"utils/nn.py",1951,0,"",python,selection_command +1587,6350009,"utils/nn.py",1980,0,"",python,selection_command +1588,6350036,"utils/nn.py",2018,0,"",python,selection_command +1589,6350063,"utils/nn.py",2050,0,"",python,selection_command +1590,6350095,"utils/nn.py",2088,0,"",python,selection_command +1591,6350152,"utils/nn.py",2122,0,"",python,selection_command +1592,6350157,"utils/nn.py",2135,0,"",python,selection_command +1593,6350219,"utils/nn.py",2173,0,"",python,selection_command +1594,6350221,"utils/nn.py",2222,0,"",python,selection_command +1595,6350285,"utils/nn.py",2260,0,"",python,selection_command +1596,6350287,"utils/nn.py",2295,0,"",python,selection_command +1597,6350305,"utils/nn.py",2333,0,"",python,selection_command +1598,6350408,"utils/nn.py",2372,0,"",python,selection_command +1599,6350575,"utils/nn.py",2406,0,"",python,selection_command +1600,6350729,"utils/nn.py",2444,0,"",python,selection_command +1601,6350894,"utils/nn.py",2497,0,"",python,selection_command +1602,6351049,"utils/nn.py",2531,0,"",python,selection_command +1603,6351196,"utils/nn.py",2569,0,"",python,selection_command +1604,6351340,"utils/nn.py",2666,0,"",python,selection_command +1605,6353043,"utils/nn.py",2655,0,"t",python,content +1606,6353044,"utils/nn.py",2656,0,"",python,selection_keyboard +1607,6354537,"utils/nn.py",2655,1,"temporal_causal_mask",python,content +1608,6355360,"utils/nn.py",2675,11,"",python,content +1609,6355808,"utils/nn.py",2674,0,"",python,selection_command +1610,6393045,"utils/nn.py",1043,0,"",python,selection_command +1611,6530454,"TERMINAL",0,0,"bash",,terminal_focus +1612,6531661,"TERMINAL",0,0,"queue",,terminal_command +1613,6531750,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:06 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 15:49:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373205 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373213 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371237 accelerat train_dy tum_cte0 R 15:14:51\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 15:14:51\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:35:54\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 2:21:17\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +1614,6532844,"TERMINAL",0,0,"72258",,terminal_output +1615,6532880,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1616,6534488,"TERMINAL",0,0,"bash",,terminal_focus +1617,6537988,"TERMINAL",0,0,"git status",,terminal_command +1618,6538026,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:12 git status;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1619,6538129,"TERMINAL",0,0,"On branch causal-st-transformer\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: models/tokenizer.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1620,6547000,"TERMINAL",0,0,"git diff monkey-patch",,terminal_command +1621,6547054,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:21 git diff monkey-patch ;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1h=\r",,terminal_output +1622,6547142,"TERMINAL",0,0,"diff --git a/genie.py b/genie.py\r\nindex f591a73..825e181 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -6,15 +6,11 @@ import jax.numpy as jnp\r\n import flax.linen as nn\r\n from flax.training.train_state import TrainState\r\n import orbax.checkpoint as ocp\r\n-from jax import NamedSharding\r\n-from orbax.checkpoint import PyTreeCheckpointer\r\n-from flax.training import orbax_utils\r\n \r\n from models.dynamics import DynamicsMaskGIT\r\n from models.lam import LatentActionModel\r\n from models.tokenizer import TokenizerVQVAE\r\n \r\n-import os\r\n import grain\r\n \r\n \r\n@@ -24,6 +20,7 @@ class Genie(nn.Module):\r\n # --- Tokenizer ---\r\n in_dim: int\r\n tokenizer_dim: int\r\n+ tokenizer_ffn_dim: int\r\n latent_patch_dim: int\r\n num_patch_latents: int\r\n patch_size: int\r\n@@ -31,6 +28,7 @@ class Genie(nn.Module):\r\n tokenizer_num_heads: int\r\n # --- LAM ---\r\n lam_dim: int\r\n+ lam_ffn_dim: int\r\n latent_action_dim: int\r\n num_latent_actions: int\r\n lam_patch_size: int\r\n@@ -39,8 +37,12 @@ class Genie(nn.Module):\r\n lam_co_train: bool\r\n # --- Dynamics ---\r\n dyna_dim: int\r\n+ dyna_ffn_dim: int\r\n dyna_num_blocks: int\r\n dyna_num_heads: int\r\n+ param_dtype: jnp.dtype\r\n+ dtype: jnp.dtype\r\n+ use_flash_attention: bool\r\n dropout: float = 0.0\r\n:",,terminal_output +1623,6548258,"TERMINAL",0,0,"diff --git a/genie.py b/genie.py\r\nindex f591a73..825e181 100644\r\n--- a/genie.py\r\n+++ b/genie.py\r\n@@ -6,15 +6,11 @@ import jax.numpy as jnp\r\n import flax.linen as nn\r\n from flax.training.train_state import TrainState\r\n import orbax.checkpoint as ocp\r\n-from jax import NamedSharding\r\n-from orbax.checkpoint import PyTreeCheckpointer\r\n-from flax.training import orbax_utils\r\n \r\n from models.dynamics import DynamicsMaskGIT\r\n from models.lam import LatentActionModel\r\n from models.tokenizer import TokenizerVQVAE\r\n \r\n-import os\r\n import grain\r\n \r\n \r\n@@ -24,6 +20,7 @@ class Genie(nn.Module):\r\n # --- Tokenizer ---\r\n in_dim: int\r\n tokenizer_dim: int\r\n+ tokenizer_ffn_dim: int\r\n latent_patch_dim: int\r\n num_patch_latents: int\r\n patch_size: int\r\n@@ -31,6 +28,7 @@ class Genie(nn.Module):\r\n tokenizer_num_heads: int\r\n # --- LAM ---\r\n lam_dim: int\r\n+ lam_ffn_dim: int\r\n latent_action_dim: int\r\n num_latent_actions: int\r\n lam_patch_size: int\r\n@@ -39,8 +37,12 @@ class Genie(nn.Module):\r\n lam_co_train: bool\r\n # --- Dynamics ---\r\n dyna_dim: int\r\n+ dyna_ffn_dim: int\r\n dyna_num_blocks: int\r\n dyna_num_heads: int\r\n+ param_dtype: jnp.dtype\r\n+ dtype: jnp.dtype\r\n+ use_flash_attention: bool\r\n dropout: float = 0.0\r\n:",,terminal_output +1624,6549487,"TERMINAL",0,0,"\r[?1l>\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;130",,terminal_output +1625,6554541,"TERMINAL",0,0,"git diff",,terminal_command +1626,6554638,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:29 git diff;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1h=\rdiff --git a/models/dynamics.py b/models/dynamics.py\r\nindex bcef59c..c9c57c6 100644\r\n--- a/models/dynamics.py\r\n+++ b/models/dynamics.py\r\n@@ -4,7 +4,7 @@ import jax\r\n import jax.numpy as jnp\r\n import flax.linen as nn\r\n \r\n-from utils.nn import STTransformer\r\n+from utils.nn import BidirectionalSTTransformer\r\n \r\n \r\n class DynamicsMaskGIT(nn.Module):\r\n@@ -22,7 +22,7 @@ class DynamicsMaskGIT(nn.Module):\r\n use_flash_attention: bool\r\n \r\n def setup(self):\r\n- self.dynamics = STTransformer(\r\n+ self.dynamics = BidirectionalSTTransformer(\r\n self.model_dim,\r\n self.ffn_dim,\r\n self.num_latents,\r\ndiff --git a/models/lam.py b/models/lam.py\r\nindex 1c98276..a06fb61 100644\r\n--- a/models/lam.py\r\n+++ b/models/lam.py\r\n@@ -4,7 +4,7 @@ import jax.numpy as jnp\r\n import flax.linen as nn\r\n \r\n from utils.preprocess import patchify, unpatchify\r\n-from utils.nn import STTransformer, VectorQuantizer\r\n+from utils.nn import BidirectionalSTTransformer, VectorQuantizer\r\n \r\n \r\n class LatentActionModel(nn.Module):\r\n@@ -26,7 +26,7 @@ class LatentActionModel(nn.Module):\r\n \r\n def setup(self):\r\n self.patch_token_dim = self.in_dim * self.patch_size**2\r\n- self.encoder = STTransformer(\r\n+ self.encoder = BidirectionalSTTransformer(\r\n self.model_dim,\r\n self.ffn_dim,\r\n self.latent_dim,\r\n@@ -57,7 +57,7 @@ class LatentActionModel(nn.Module):\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n:",,terminal_output +1627,6555369,"TERMINAL",0,0,"\r )\r\n:",,terminal_output +1628,6556512,"TERMINAL",0,0,"\r- self.decoder = STTransformer(\r\n:\r+ self.decoder = BidirectionalSTTransformer(\r\n:\r self.model_dim,\r\n:\r self.ffn_dim,\r\n:\r self.patch_token_dim,\r\n:\rdiff --git a/models/tokenizer.py b/models/tokenizer.py\r\n:\rindex 123c953..6a8dddb 100644\r\n:\r--- a/models/tokenizer.py\r\n:\r+++ b/models/tokenizer.py\r\n:\r@@ -4,7 +4,7 @@ import flax.linen as nn\r\n:\r import jax.numpy as jnp\r\n:\r \r\n:\r from utils.preprocess import patchify, unpatchify\r\n:\r-from utils.nn import STTransformer, VectorQuantizer\r\n:\r+from utils.nn import BidirectionalSTTransformer, VectorQuantizer\r\n:\r \r\n:\r \r\n:\r class TokenizerVQVAE(nn.Module):\r\n:\r@@ -25,7 +25,7 @@ class TokenizerVQVAE(nn.Module):\r\n:\r use_flash_attention: bool\r\n:\r \r\n:\r def setup(self):\r\n:\r- self.encoder = STTransformer(\r\n:\r+ self.encoder = BidirectionalSTTransformer(\r\n:",,terminal_output +1629,6556859,"TERMINAL",0,0,"\r self.model_dim,\r\n:\r self.ffn_dim,\r\n:\r self.latent_dim,\r\n:\r@@ -42,7 +42,7 @@ class TokenizerVQVAE(nn.Module):\r\n:\r self.codebook_dropout,\r\n:\r )\r\n:\r self.out_dim = self.in_dim * self.patch_size**2\r\n:\r- self.decoder = STTransformer(\r\n:\r+ self.decoder = BidirectionalSTTransformer(\r\n:\r self.model_dim,\r\n:\r self.ffn_dim,\r\n:",,terminal_output +1630,6556949,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1631,6569237,"TERMINAL",0,0,"git commit -am ""implemented causal ST transformer""",,terminal_command +1632,6569277,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:43 git commit -am ""implemented causal ST transformer"";406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1633,6569472,"TERMINAL",0,0,"[causal-st-transformer d8c55c3] implemented causal ST transformer\r\n 4 files changed, 133 insertions(+), 11 deletions(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1634,6577012,"TERMINAL",0,0,"git checkout new-arch-sampling",,terminal_command +1635,6577057,"TERMINAL",0,0,"]633;E;2025-07-24 15:49:51 git checkout new-arch-sampling;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1636,6577087,"TERMINAL",0,0,"Switched to branch 'new-arch-sampling'\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1637,6580826,"",0,0,"Switched from branch 'causal-st-transformer' to 'new-arch-sampling'",,git_branch_checkout +1638,6583252,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((patches, action_pad), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +1639,6585941,"models/lam.py",3437,0,"",python,selection_mouse +1640,6587020,"models/lam.py",2964,0,"",python,selection_mouse +1641,6587780,"models/lam.py",2963,0,"",python,selection_command +1642,6588439,"models/lam.py",2963,7,"",python,content +1643,6589147,"models/lam.py",2963,1,"",python,content +1644,6589328,"models/lam.py",2963,1,"",python,content +1645,6592066,"models/lam.py",2973,0,",",python,content +1646,6592068,"models/lam.py",2974,0,"",python,selection_keyboard +1647,6592146,"models/lam.py",2974,0," ",python,content +1648,6592147,"models/lam.py",2975,0,"",python,selection_keyboard +1649,6592335,"models/lam.py",2975,0,"p",python,content +1650,6592336,"models/lam.py",2976,0,"",python,selection_keyboard +1651,6592440,"models/lam.py",2976,0,"a",python,content +1652,6592442,"models/lam.py",2977,0,"",python,selection_keyboard +1653,6592630,"models/lam.py",2977,0,"t",python,content +1654,6592632,"models/lam.py",2978,0,"",python,selection_keyboard +1655,6592825,"models/lam.py",2978,0,"c",python,content +1656,6592826,"models/lam.py",2979,0,"",python,selection_keyboard +1657,6592893,"models/lam.py",2979,0,"h",python,content +1658,6592895,"models/lam.py",2980,0,"",python,selection_keyboard +1659,6593058,"models/lam.py",2980,0,"e",python,content +1660,6593060,"models/lam.py",2981,0,"",python,selection_keyboard +1661,6593203,"models/lam.py",2981,0,"s",python,content +1662,6593203,"models/lam.py",2982,0,"",python,selection_keyboard +1663,6593417,"models/lam.py",2981,0,"",python,selection_command +1664,6597870,"models/lam.py",2973,9,"",python,content +1665,6597875,"models/lam.py",2963,0,", ",python,content +1666,6597878,"models/lam.py",2963,0,"",python,selection_command +1667,6598326,"models/lam.py",2963,0,"patches",python,content +1668,6598343,"models/lam.py",2963,0,"",python,selection_command +1669,6602015,"models/lam.py",2963,7,"",python,content +1670,6602332,"models/lam.py",2963,2,"",python,content +1671,6602332,"models/lam.py",2973,0,", patches",python,content +1672,6625091,"models/lam.py",2973,9,"",python,content +1673,6625093,"models/lam.py",2963,0,", ",python,content +1674,6625096,"models/lam.py",2963,0,"",python,selection_command +1675,6625552,"models/lam.py",2963,0,"patches",python,content +1676,6625571,"models/lam.py",2963,0,"",python,selection_command +1677,6668763,"TERMINAL",0,0,"git checkout causal-st-transformer",,terminal_command +1678,6668813,"TERMINAL",0,0,"]633;E;2025-07-24 15:51:23 git checkout causal-st-transformer;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +1679,6668867,"TERMINAL",0,0,"Switched to branch 'causal-st-transformer'\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1680,6670835,"",0,0,"Switched from branch 'new-arch-sampling' to 'causal-st-transformer'",,git_branch_checkout +1681,6671495,"TERMINAL",0,0,"git branch",,terminal_command +1682,6671512,"TERMINAL",0,0,"]633;E;2025-07-24 15:51:26 git branch;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1h=\r add-wandb-name-and-tags\r\n* causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1683,6713314,"TERMINAL",0,0,"bash",,terminal_focus +1684,6714298,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import BidirectionalSTTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = BidirectionalSTTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = BidirectionalSTTransformer(\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +1685,6728407,"models/lam.py",2947,0,"",python,selection_mouse +1686,6728544,"models/lam.py",2938,11,"concatenate",python,selection_mouse +1687,6729752,"models/lam.py",2926,0,"",python,selection_mouse +1688,6729909,"models/lam.py",2917,14,"padded_patches",python,selection_mouse +1689,6730713,"models/lam.py",3098,0,"",python,selection_mouse +1690,6730878,"models/lam.py",3096,3,"all",python,selection_mouse +1691,6731375,"models/lam.py",3040,0,"",python,selection_mouse +1692,6731549,"models/lam.py",3032,14,"padded_patches",python,selection_mouse +1693,6732162,"models/lam.py",3029,0,"",python,selection_mouse +1694,6732304,"models/lam.py",3024,7,"encoder",python,selection_mouse +1695,6732805,"models/lam.py",3036,0,"",python,selection_mouse +1696,6732971,"models/lam.py",3032,14,"padded_patches",python,selection_mouse +1697,6735701,"models/lam.py",3003,0,"",python,selection_mouse +1698,6736216,"models/lam.py",3028,0,"",python,selection_mouse +1699,6766299,"models/lam.py",3038,0,"",python,selection_mouse +1700,6766459,"models/lam.py",3032,14,"padded_patches",python,selection_mouse +1701,6766591,"models/lam.py",3007,57," z = self.encoder(padded_patches) # (B, T, N, E)\n",python,selection_mouse +1702,6768183,"models/lam.py",2945,0,"",python,selection_mouse +1703,6768322,"models/lam.py",2938,11,"concatenate",python,selection_mouse +1704,6768457,"models/lam.py",2909,72," padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n",python,selection_mouse +1705,6793272,"models/lam.py",3006,0,"",python,selection_mouse +1706,6793290,"models/lam.py",3005,0,"",python,selection_command +1707,6794262,"models/lam.py",2628,0,"",python,selection_mouse +1708,6794396,"models/lam.py",2627,3,"def",python,selection_mouse +1709,6794583,"models/lam.py",2627,158,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches",python,selection_mouse +1710,6794605,"models/lam.py",2627,354,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n",python,selection_mouse +1711,6794624,"models/lam.py",2627,516,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B",python,selection_mouse +1712,6794641,"models/lam.py",2627,560,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---",python,selection_mouse +1713,6794659,"models/lam.py",2627,663,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training",python,selection_mouse +1714,6794676,"models/lam.py",2627,719,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim",python,selection_mouse +1715,6794692,"models/lam.py",2627,775,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb",python,selection_mouse +1716,6794758,"models/lam.py",2627,779,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb",python,selection_mouse +1717,6794758,"models/lam.py",2627,798,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,selection_mouse +1718,6795407,"models/lam.py",2627,797,"def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)",python,selection_mouse +1719,6803144,"models/lam.py",2856,0,"",python,selection_mouse +1720,6803145,"models/lam.py",2847,12,"broadcast_to",python,selection_mouse +1721,6803305,"models/lam.py",2822,87," action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n",python,selection_mouse +1722,6804277,"models/lam.py",2945,0,"",python,selection_mouse +1723,6804421,"models/lam.py",2938,11,"concatenate",python,selection_mouse +1724,6804570,"models/lam.py",2909,72," padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n",python,selection_mouse +1725,7027645,"models/lam.py",2821,0,"",python,selection_mouse +1726,7027648,"models/lam.py",2820,0,"",python,selection_command +1727,7028166,"models/lam.py",2966,0,"",python,selection_mouse +1728,7028724,"models/lam.py",2868,0,"",python,selection_mouse +1729,7029215,"models/lam.py",2981,0,"",python,selection_mouse +1730,7029984,"models/lam.py",2909,0,"",python,selection_command +1731,7030282,"models/lam.py",2910,0,"",python,selection_command +1732,7030785,"models/lam.py",2911,0,"",python,selection_command +1733,7030796,"models/lam.py",2912,0,"",python,selection_command +1734,7030854,"models/lam.py",2913,0,"",python,selection_command +1735,7030864,"models/lam.py",2914,0,"",python,selection_command +1736,7030928,"models/lam.py",2915,0,"",python,selection_command +1737,7030928,"models/lam.py",2916,0,"",python,selection_command +1738,7031001,"models/lam.py",2917,0,"",python,selection_command +1739,7031002,"models/lam.py",2918,0,"",python,selection_command +1740,7031008,"models/lam.py",2919,0,"",python,selection_command +1741,7031061,"models/lam.py",2920,0,"",python,selection_command +1742,7031078,"models/lam.py",2921,0,"",python,selection_command +1743,7031111,"models/lam.py",2922,0,"",python,selection_command +1744,7031132,"models/lam.py",2923,0,"",python,selection_command +1745,7031165,"models/lam.py",2924,0,"",python,selection_command +1746,7031194,"models/lam.py",2925,0,"",python,selection_command +1747,7031226,"models/lam.py",2926,0,"",python,selection_command +1748,7031298,"models/lam.py",2927,0,"",python,selection_command +1749,7031299,"models/lam.py",2928,0,"",python,selection_command +1750,7031308,"models/lam.py",2929,0,"",python,selection_command +1751,7031346,"models/lam.py",2930,0,"",python,selection_command +1752,7031379,"models/lam.py",2931,0,"",python,selection_command +1753,7031422,"models/lam.py",2932,0,"",python,selection_command +1754,7031439,"models/lam.py",2933,0,"",python,selection_command +1755,7031498,"models/lam.py",2934,0,"",python,selection_command +1756,7031499,"models/lam.py",2935,0,"",python,selection_command +1757,7031520,"models/lam.py",2936,0,"",python,selection_command +1758,7031562,"models/lam.py",2937,0,"",python,selection_command +1759,7031594,"models/lam.py",2938,0,"",python,selection_command +1760,7031660,"models/lam.py",2939,0,"",python,selection_command +1761,7031681,"models/lam.py",2940,0,"",python,selection_command +1762,7031696,"models/lam.py",2941,0,"",python,selection_command +1763,7031722,"models/lam.py",2942,0,"",python,selection_command +1764,7031755,"models/lam.py",2943,0,"",python,selection_command +1765,7031802,"models/lam.py",2944,0,"",python,selection_command +1766,7031838,"models/lam.py",2945,0,"",python,selection_command +1767,7031856,"models/lam.py",2946,0,"",python,selection_command +1768,7031869,"models/lam.py",2947,0,"",python,selection_command +1769,7031894,"models/lam.py",2948,0,"",python,selection_command +1770,7031936,"models/lam.py",2949,0,"",python,selection_command +1771,7031968,"models/lam.py",2950,0,"",python,selection_command +1772,7032150,"models/lam.py",2951,0,"",python,selection_command +1773,7032365,"models/lam.py",2952,0,"",python,selection_command +1774,7033291,"models/lam.py",2952,9,"",python,content +1775,7033957,"models/lam.py",2951,1,"",python,content +1776,7035175,"models/lam.py",2951,1,"",python,content +1777,7035344,"models/lam.py",2951,1,"",python,content +1778,7037596,"models/lam.py",2958,0,",",python,content +1779,7037598,"models/lam.py",2959,0,"",python,selection_keyboard +1780,7037683,"models/lam.py",2959,0," ",python,content +1781,7037684,"models/lam.py",2960,0,"",python,selection_keyboard +1782,7037947,"models/lam.py",2960,0,"c",python,content +1783,7037948,"models/lam.py",2961,0,"",python,selection_keyboard +1784,7038067,"models/lam.py",2961,0,"a",python,content +1785,7038068,"models/lam.py",2962,0,"",python,selection_keyboard +1786,7038569,"models/lam.py",2961,1,"",python,content +1787,7038696,"models/lam.py",2960,1,"",python,content +1788,7038774,"models/lam.py",2960,0,"a",python,content +1789,7038774,"models/lam.py",2961,0,"",python,selection_keyboard +1790,7038848,"models/lam.py",2961,0,"c",python,content +1791,7038849,"models/lam.py",2962,0,"",python,selection_keyboard +1792,7039472,"models/lam.py",2960,2,"action_pad",python,content +1793,7065922,"models/lam.py",2964,0,"",python,selection_mouse +1794,7066102,"models/lam.py",2963,0,"",python,selection_command +1795,7070097,"models/lam.py",2981,0,"",python,selection_command +1796,7070292,"models/lam.py",3005,0,"",python,selection_command +1797,7070461,"models/lam.py",3061,0,"",python,selection_command +1798,7070650,"models/lam.py",3112,0,"",python,selection_command +1799,7070849,"models/lam.py",3151,0,"",python,selection_command +1800,7076585,"models/lam.py",3153,0,"",python,selection_command +1801,7076757,"models/lam.py",3186,0,"",python,selection_command +1802,7077042,"models/lam.py",3238,0,"",python,selection_command +1803,7150678,"models/lam.py",2878,0,"",python,selection_mouse +1804,7150826,"models/lam.py",2877,1,"B",python,selection_mouse +1805,7151716,"models/lam.py",2978,0,"",python,selection_mouse +1806,7151816,"models/lam.py",2978,1,"2",python,selection_mouse +1807,7153221,"models/lam.py",3081,0,"",python,selection_mouse +1808,7156354,"models/lam.py",3136,0,"",python,selection_mouse +1809,7157958,"models/lam.py",3134,0,"",python,selection_mouse +1810,7158493,"models/lam.py",3135,0,"",python,selection_mouse +1811,7178133,"models/lam.py",3152,0,"",python,selection_mouse +1812,7178134,"models/lam.py",3151,0,"",python,selection_command +1813,7181479,"models/lam.py",3152,0,"",python,selection_command +1814,7181606,"models/lam.py",3152,0," ",python,content +1815,7181609,"models/lam.py",3153,0,"",python,selection_keyboard +1816,7183112,"models/lam.py",3153,0,"<",python,content +1817,7183113,"models/lam.py",3154,0,"",python,selection_keyboard +1818,7183301,"models/lam.py",3154,0,"-",python,content +1819,7183302,"models/lam.py",3155,0,"",python,selection_keyboard +1820,7183497,"models/lam.py",3155,0,"-",python,content +1821,7183498,"models/lam.py",3156,0,"",python,selection_keyboard +1822,7183637,"models/lam.py",3156,0,"-",python,content +1823,7183638,"models/lam.py",3157,0,"",python,selection_keyboard +1824,7183931,"models/lam.py",3157,0," ",python,content +1825,7183932,"models/lam.py",3158,0,"",python,selection_keyboard +1826,7184211,"models/lam.py",3158,0,"c",python,content +1827,7184212,"models/lam.py",3159,0,"",python,selection_keyboard +1828,7184366,"models/lam.py",3159,0,"h",python,content +1829,7184367,"models/lam.py",3160,0,"",python,selection_keyboard +1830,7184437,"models/lam.py",3160,0,"a",python,content +1831,7184438,"models/lam.py",3161,0,"",python,selection_keyboard +1832,7184572,"models/lam.py",3161,0,"n",python,content +1833,7184573,"models/lam.py",3162,0,"",python,selection_keyboard +1834,7184724,"models/lam.py",3162,0,"g",python,content +1835,7184725,"models/lam.py",3163,0,"",python,selection_keyboard +1836,7184806,"models/lam.py",3163,0,"e",python,content +1837,7184807,"models/lam.py",3164,0,"",python,selection_keyboard +1838,7184903,"models/lam.py",3164,0," ",python,content +1839,7184904,"models/lam.py",3165,0,"",python,selection_keyboard +1840,7185022,"models/lam.py",3165,0,"t",python,content +1841,7185024,"models/lam.py",3166,0,"",python,selection_keyboard +1842,7185184,"models/lam.py",3166,0,"h",python,content +1843,7185185,"models/lam.py",3167,0,"",python,selection_keyboard +1844,7185199,"models/lam.py",3167,0,"i",python,content +1845,7185200,"models/lam.py",3168,0,"",python,selection_keyboard +1846,7185250,"models/lam.py",3168,0,"s",python,content +1847,7185251,"models/lam.py",3169,0,"",python,selection_keyboard +1848,7185389,"models/lam.py",3169,0," ",python,content +1849,7185390,"models/lam.py",3170,0,"",python,selection_keyboard +1850,7185589,"models/lam.py",3170,0,"t",python,content +1851,7185589,"models/lam.py",3171,0,"",python,selection_keyboard +1852,7185590,"models/lam.py",3171,0,"o",python,content +1853,7185591,"models/lam.py",3172,0,"",python,selection_keyboard +1854,7185630,"models/lam.py",3172,0," ",python,content +1855,7185631,"models/lam.py",3173,0,"",python,selection_keyboard +1856,7187210,"models/lam.py",3173,0,"-",python,content +1857,7187212,"models/lam.py",3174,0,"",python,selection_keyboard +1858,7187276,"models/lam.py",3174,0,"1",python,content +1859,7187277,"models/lam.py",3175,0,"",python,selection_keyboard +1860,7187432,"models/lam.py",3175,0," ",python,content +1861,7187433,"models/lam.py",3176,0,"",python,selection_keyboard +1862,7295873,"models/lam.py",2980,0,"",python,selection_mouse +1863,7297059,"models/lam.py",3113,0,"",python,selection_mouse +1864,7298477,"models/lam.py",3028,0,"",python,selection_mouse +1865,7307356,"models/lam.py",668,0,"",python,selection_mouse +1866,7307719,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass CausalSTBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spatial_causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n )(z, mask=spatial_causal_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n temporal_causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=temporal_causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass CausalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = CausalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\nclass BidirectionalSTBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass BidirectionalSTTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = BidirectionalSTBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +1867,7334042,"utils/nn.py",7516,0,"",python,selection_mouse +1868,7339037,"utils/nn.py",5032,0,"",python,selection_mouse +1869,7339586,"utils/nn.py",5030,0,"",python,selection_mouse +1870,7339725,"utils/nn.py",5018,18,"MultiHeadAttention",python,selection_mouse +1871,7341010,"utils/nn.py",5261,0,"",python,selection_mouse +1872,7341186,"utils/nn.py",5247,26,"_create_flash_attention_fn",python,selection_mouse +1873,7342256,"utils/nn.py",5260,0,"",python,selection_mouse +1874,7342796,"utils/nn.py",5247,26,"_create_flash_attention_fn",python,selection_mouse +1875,7346043,"utils/nn.py",5260,0,"",python,selection_mouse +1876,7356549,"utils/nn.py",10080,0,"",python,selection_mouse +1877,7356704,"utils/nn.py",10080,3,"jnp",python,selection_mouse +1878,7357279,"utils/nn.py",10055,0,"",python,selection_mouse +1879,7357428,"utils/nn.py",10052,4,"_pad",python,selection_mouse +1880,7363727,"utils/nn.py",11642,0,"",python,selection_mouse +1881,7364493,"utils/nn.py",11621,0,"",python,selection_mouse +1882,7364496,"utils/nn.py",11620,0,"",python,selection_command +1883,7366686,"utils/nn.py",11696,0,"",python,selection_mouse +1884,7366832,"utils/nn.py",11685,14,"original_shape",python,selection_mouse +1885,7367030,"utils/nn.py",11684,15,"(original_shape",python,selection_mouse +1886,7367065,"utils/nn.py",11621,78,"\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1887,7367563,"utils/nn.py",11637,62,"output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1888,7369517,"utils/nn.py",11701,0,"",python,selection_mouse +1889,7369835,"utils/nn.py",11685,16,"original_shape)\n",python,selection_mouse +1890,7369890,"utils/nn.py",11677,24,"reshape(original_shape)\n",python,selection_mouse +1891,7369930,"utils/nn.py",11621,80,"\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1892,7370062,"utils/nn.py",11653,48,"original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1893,7370080,"utils/nn.py",11651,50," :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1894,7370096,"utils/nn.py",11649,52,"., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1895,7370113,"utils/nn.py",11648,53,".., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1896,7370129,"utils/nn.py",11646,55,"[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1897,7370145,"utils/nn.py",11637,64,"output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1898,7370277,"utils/nn.py",11636,65," output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1899,7370334,"utils/nn.py",11630,71,"return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1900,7370789,"utils/nn.py",11633,0,"",python,selection_mouse +1901,7370789,"utils/nn.py",11630,6,"return",python,selection_mouse +1902,7371001,"utils/nn.py",11630,7,"return ",python,selection_mouse +1903,7371016,"utils/nn.py",11630,16,"return output_4d",python,selection_mouse +1904,7371082,"utils/nn.py",11630,19,"return output_4d[..",python,selection_mouse +1905,7371082,"utils/nn.py",11630,21,"return output_4d[...,",python,selection_mouse +1906,7371096,"utils/nn.py",11630,39,"return output_4d[..., :original_seq_len",python,selection_mouse +1907,7371197,"utils/nn.py",11630,71,"return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n",python,selection_mouse +1908,7371553,"utils/nn.py",11630,69,"return output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1909,7372048,"utils/nn.py",11699,0,"",python,selection_mouse +1910,7372049,"utils/nn.py",11685,14,"original_shape",python,selection_mouse +1911,7372307,"utils/nn.py",11677,22,"reshape(original_shape",python,selection_mouse +1912,7372311,"utils/nn.py",11672,27,", :].reshape(original_shape",python,selection_mouse +1913,7372334,"utils/nn.py",11669,30,", :, :].reshape(original_shape",python,selection_mouse +1914,7372397,"utils/nn.py",11653,46,"original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1915,7372464,"utils/nn.py",11652,47,":original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1916,7372470,"utils/nn.py",11649,50,"., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1917,7372482,"utils/nn.py",11648,51,".., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1918,7372507,"utils/nn.py",11646,53,"[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1919,7372567,"utils/nn.py",11637,62,"output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1920,7372628,"utils/nn.py",11636,63," output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1921,7372629,"utils/nn.py",11630,69,"return output_4d[..., :original_seq_len, :, :].reshape(original_shape",python,selection_mouse +1922,7373100,"utils/nn.py",11631,0,"",python,selection_mouse +1923,7373100,"utils/nn.py",11630,6,"return",python,selection_mouse +1924,7384231,"utils/nn.py",11488,0,"",python,selection_mouse +1925,7384250,"utils/nn.py",11487,0,"",python,selection_command +1926,7384406,"utils/nn.py",11488,0,"",python,selection_mouse +1927,7384408,"utils/nn.py",11487,0,"",python,selection_command +1928,7384756,"utils/nn.py",11463,26," bias=bias_4d,\n",python,selection_mouse +1929,7384775,"utils/nn.py",11464,25," bias=bias_4d,\n",python,selection_command +1930,7388553,"utils/nn.py",11701,0,"",python,selection_mouse +1931,7389685,"utils/nn.py",11621,0,"",python,selection_mouse +1932,7389701,"utils/nn.py",11620,0,"",python,selection_command +1933,7422193,"utils/nn.py",11665,0,"",python,selection_mouse +1934,7422324,"utils/nn.py",11653,16,"original_seq_len",python,selection_mouse +1935,7433944,"models/lam.py",0,0,"",python,tab +1936,7438208,"models/lam.py",3135,0,"",python,selection_mouse +1937,7440985,"models/lam.py",3135,1,"",python,content +1938,7441700,"models/lam.py",3135,0,"-",python,content +1939,7441701,"models/lam.py",3136,0,"",python,selection_keyboard +1940,7441761,"models/lam.py",3136,0,"1",python,content +1941,7441762,"models/lam.py",3137,0,"",python,selection_keyboard +1942,7442322,"models/lam.py",3136,0,"",python,selection_command +1943,7443178,"models/lam.py",3177,0,"",python,selection_command +1944,7443852,"models/lam.py",3175,2,"",python,content +1945,7444012,"models/lam.py",3174,1,"",python,content +1946,7444155,"models/lam.py",3171,3,"",python,content +1947,7444302,"models/lam.py",3166,5,"",python,content +1948,7444491,"models/lam.py",3159,7,"",python,content +1949,7444796,"models/lam.py",3154,5,"",python,content +1950,7445528,"models/lam.py",3153,1,"",python,content +1951,7477747,"TERMINAL",0,0,"queue",,terminal_command +1952,7477792,"TERMINAL",0,0,"]633;E;2025-07-24 16:04:52 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +1953,7477862,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:04:52 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373205 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373213 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371237 accelerat train_dy tum_cte0 R 15:30:37\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 15:30:37\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:51:40\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 2:37:03\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +1954,7478884,"TERMINAL",0,0,"38814",,terminal_output +1955,7480040,"TERMINAL",0,0,"49925",,terminal_output +1956,7480702,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1957,7483055,"TERMINAL",0,0,"scancel 3373205",,terminal_command +1958,7483070,"TERMINAL",0,0,"]633;E;2025-07-24 16:04:57 scancel 3373205;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1959,7483815,"TERMINAL",0,0,"queue",,terminal_command +1960,7483913,"TERMINAL",0,0,"]633;E;2025-07-24 16:04:58 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:04:58 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373213 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371237 accelerat train_dy tum_cte0 R 15:30:43\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 15:30:43\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 1:51:46\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 2:37:09\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +1961,7484956,"TERMINAL",0,0,"944710",,terminal_output +1962,7486121,"TERMINAL",0,0,"5:005581",,terminal_output +1963,7486877,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1964,7489124,"TERMINAL",0,0,"scancel 3373213",,terminal_command +1965,7489125,"TERMINAL",0,0,"]633;E;2025-07-24 16:05:03 scancel 3373213;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1966,7496650,"models/lam.py",0,0,"",python,tab +1967,7504246,"models/lam.py",3032,0,"",python,selection_mouse +1968,7504842,"models/lam.py",3182,0,"",python,selection_mouse +1969,7507073,"models/lam.py",3181,0,"",python,selection_command +1970,7557078,"TERMINAL",0,0,"git status",,terminal_command +1971,7557111,"TERMINAL",0,0,"]633;E;2025-07-24 16:06:11 git status;adbf53fe-397b-40d3-9339-94ea79afad56]633;COn branch causal-st-transformer\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/lam.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1972,7568534,"models/lam.py",0,0,"",python,tab +1973,7599949,"TERMINAL",0,0,"git commit -am ""put action pad at the end of the sequence to also support the causal transformer""",,terminal_command +1974,7600009,"TERMINAL",0,0,"]633;E;2025-07-24 16:06:54 git commit -am ""put action pad at the end of the sequence to also support the causal transformer"";adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +1975,7600139,"TERMINAL",0,0,"[causal-st-transformer be1e14f] put action pad at the end of the sequence to also support the causal transformer\r\n 1 file changed, 2 insertions(+), 2 deletions(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1976,7617968,"TERMINAL",0,0,"git checkout new-arch-sampling",,terminal_command +1977,7618020,"TERMINAL",0,0,"]633;E;2025-07-24 16:07:12 git checkout new-arch-sampling;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +1978,7618038,"TERMINAL",0,0,"Switched to branch 'new-arch-sampling'\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1979,7620954,"",0,0,"Switched from branch 'causal-st-transformer' to 'new-arch-sampling'",,git_branch_checkout +1980,7622006,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((patches, action_pad), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +1981,7624604,"models/lam.py",3147,0,"",python,selection_mouse +1982,7626852,"models/lam.py",3147,1,"-",python,content +1983,7628614,"models/lam.py",3148,0,"",python,selection_command +1984,7629716,"models/lam.py",3148,0,"1",python,content +1985,7629718,"models/lam.py",3149,0,"",python,selection_keyboard +1986,7633649,"TERMINAL",0,0,"git status",,terminal_command +1987,7633656,"TERMINAL",0,0,"]633;E;2025-07-24 16:07:28 git status;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +1988,7633722,"TERMINAL",0,0,"On branch new-arch-sampling\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/lam.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1989,7653618,"TERMINAL",0,0,"git commit -am ""hotfix: supervise the action-pad token""",,terminal_command +1990,7653690,"TERMINAL",0,0,"]633;E;2025-07-24 16:07:48 git commit -am ""hotfix: supervise the action-pad token"";adbf53fe-397b-40d3-9339-94ea79afad56]633;C[new-arch-sampling 5e1cd9b] hotfix: supervise the action-pad token\r\n 1 file changed, 1 insertion(+), 1 deletion(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +1991,7657399,"models/lam.py",0,0,"",python,tab +1992,7674573,"models/lam.py",2546,0,"",python,selection_mouse +1993,7676513,"models/lam.py",2773,0,"",python,selection_mouse +1994,7676678,"models/lam.py",2771,10,"patch_size",python,selection_mouse +1995,7681523,"models/lam.py",2978,0,"",python,selection_mouse +1996,7681653,"models/lam.py",2972,10,"action_pad",python,selection_mouse +1997,7684352,"models/lam.py",2832,0,"",python,selection_mouse +1998,7684458,"models/lam.py",2826,9,"action_in",python,selection_mouse +1999,7702358,"models/lam.py",1538,0,"",python,selection_mouse +2000,7702552,"models/lam.py",1512,26,"type=self.dtype,\n )",python,selection_mouse +2001,7702566,"models/lam.py",1465,73," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +2002,7702582,"models/lam.py",1462,76," param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +2003,7702609,"models/lam.py",1431,107," self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +2004,7702627,"models/lam.py",1429,109," self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +2005,7702643,"models/lam.py",1394,144," self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,selection_mouse +2006,7703276,"models/lam.py",1394,144,"",python,content +2007,7703707,"models/lam.py",1393,1,"",python,content +2008,7728284,"models/lam.py",1393,0,"\n",python,content +2009,7728689,"models/lam.py",1394,0," self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )",python,content +2010,7729455,"models/lam.py",1570,0,"",python,selection_mouse +2011,7730065,"models/lam.py",1538,0,"",python,selection_mouse +2012,7782720,"models/lam.py",2463,0,"",python,selection_mouse +2013,7784036,"models/lam.py",2275,0,"",python,selection_mouse +2014,7785645,"models/lam.py",1556,0,"",python,selection_mouse +2015,7785790,"models/lam.py",1552,7,"decoder",python,selection_mouse +2016,7787644,"models/lam.py",2373,0,"",python,selection_mouse +2017,7787800,"models/lam.py",2367,6,"astype",python,selection_mouse +2018,7789151,"models/lam.py",2320,0,"",python,selection_mouse +2019,7789347,"models/lam.py",2311,20,"video_action_patches",python,selection_mouse +2020,7812526,"TERMINAL",0,0,"git push",,terminal_command +2021,7812576,"TERMINAL",0,0,"]633;E;2025-07-24 16:10:27 git push;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +2022,7814187,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 434 bytes | 434.00 KiB/s, done.\r\nTotal 4 (delta 2), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +2023,7814282,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +2024,7814459,"TERMINAL",0,0,"To github.com:p-doom/jafar.git\r\n 91ecb1f..5e1cd9b new-arch-sampling -> new-arch-sampling\r\n",,terminal_output +2025,7814492,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2026,7817403,"TERMINAL",0,0,"runner-2",,terminal_command +2027,7819711,"TERMINAL",0,0,"sync-runner-2",,terminal_command +2028,7819763,"TERMINAL",0,0,"]633;E;2025-07-24 16:10:34 sync-runner-2;adbf53fe-397b-40d3-9339-94ea79afad56]633;Csending incremental file list\r\n",,terminal_output +2029,7825641,"TERMINAL",0,0,"./\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\n",,terminal_output +2030,7827901,"TERMINAL",0,0,"models/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nutils/\r\nutils/nn.py\r\n",,terminal_output +2031,7827997,"TERMINAL",0,0,"\r\nsent 86,140 bytes received 298 bytes 10,169.18 bytes/sec\r\ntotal size is 185,131,638 speedup is 2,141.79\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +2032,7839539,"models/lam.py",0,0,"",python,tab +2033,7848502,"/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_2/train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:]\n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +2034,7896588,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2035,7902690,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",,terminal_command +2036,7902732,"TERMINAL",0,0,"]633;E;2025-07-24 16:11:57 sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +2037,7902810,"TERMINAL",0,0,"Submitted batch job 3373276\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +2038,7905679,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",,terminal_command +2039,7905702,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:00 sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch;adbf53fe-397b-40d3-9339-94ea79afad56]633;CSubmitted batch job 3373277\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +2040,7907896,"TERMINAL",0,0,"cd ..",,terminal_command +2041,7907916,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:02 cd ..;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0",,terminal_output +2042,7910338,"TERMINAL",0,0,"cd jafar",,terminal_command +2043,7913302,"TERMINAL",0,0,"git status",,terminal_command +2044,7913349,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:07 git status;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +2045,7913381,"TERMINAL",0,0,"On branch new-arch-sampling\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2046,7920717,"TERMINAL",0,0,"bash",,terminal_focus +2047,7922658,"TERMINAL",0,0,"git branch",,terminal_command +2048,7922698,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:17 git branch;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1h=\r add-wandb-name-and-tags\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n:",,terminal_output +2049,7923603,"TERMINAL",0,0,"\r maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2050,7923834,"TERMINAL",0,0,"\r metrics-logging-for-dynamics-model\r\n:",,terminal_output +2051,7924282,"TERMINAL",0,0,"\rM causal-st-transformer\r\n\r:",,terminal_output +2052,7924512,"TERMINAL",0,0,"\rM add-wandb-name-and-tags\r\n\r:",,terminal_output +2053,7924627,"TERMINAL",0,0,"\r\r:",,terminal_output +2054,7926836,"TERMINAL",0,0,"bash",,terminal_focus +2055,7927813,"TERMINAL",0,0,"git checkout causal-st-transformer",,terminal_command +2056,7927856,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:22 git checkout causal-st-transformer;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +2057,7928024,"TERMINAL",0,0,"Switched to branch 'causal-st-transformer'\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2058,7931001,"",0,0,"Switched from branch 'new-arch-sampling' to 'causal-st-transformer'",,git_branch_checkout +2059,7935991,"TERMINAL",0,0,"git branch",,terminal_command +2060,7936082,"TERMINAL",0,0,"]633;E;2025-07-24 16:12:30 git branch;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1h=\r add-wandb-name-and-tags\r\n* causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2061,7969199,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import BidirectionalSTTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = BidirectionalSTTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = BidirectionalSTTransformer(\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n padded_patches = jnp.concatenate((patches, action_pad), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, -1] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +2062,7975743,"TERMINAL",0,0,"git",,terminal_focus +2063,7976544,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2064,7977487,"TERMINAL",0,0,"bash",,terminal_focus +2065,7979716,"TERMINAL",0,0,"git branch",,terminal_command +2066,7979751,"TERMINAL",0,0,"]633;E;2025-07-24 16:13:14 git branch;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1h=\r add-wandb-name-and-tags\r\n* causal-st-transformer\r\n causal-transformer-dynamics-model\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-from-different-topologies\r\n speedup-tfrecord-preprocessing\r\n tmp\r\n",,terminal_output +2067,7979786,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2068,7989200,"TERMINAL",0,0,"git checkout new-arch-sampling",,terminal_command +2069,7989251,"TERMINAL",0,0,"]633;E;2025-07-24 16:13:23 git checkout new-arch-sampling;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +2070,7989324,"TERMINAL",0,0,"Switched to branch 'new-arch-sampling'\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2071,7991001,"",0,0,"Switched from branch 'causal-st-transformer' to 'new-arch-sampling'",,git_branch_checkout +2072,7998845,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +2073,7998880,"TERMINAL",0,0,"]633;E;2025-07-24 16:13:33 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;adbf53fe-397b-40d3-9339-94ea79afad56]633;Csalloc: Pending job allocation 3373279\r\nsalloc: job 3373279 queued and waiting for resources\r\n",,terminal_output +2074,8000294,"TERMINAL",0,0,"bash",,terminal_focus +2075,8001437,"TERMINAL",0,0,"idling",,terminal_command +2076,8001497,"TERMINAL",0,0,"]633;E;2025-07-24 16:13:36 idling;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Jul 24 16:13:36 2025Partition dev_cpuonly:\t 9 nodes idle\rPartition cpuonly: 10 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +2077,8002594,"TERMINAL",0,0,"7\t ",,terminal_output +2078,8003616,"TERMINAL",0,0,"8\t ",,terminal_output +2079,8004614,"TERMINAL",0,0,"9\t ",,terminal_output +2080,8005419,"TERMINAL",0,0,"salloc",,terminal_focus +2081,8005634,"TERMINAL",0,0,"40\t ",,terminal_output +2082,8005949,"TERMINAL",0,0,"^Csalloc: Job allocation 3373279 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +2083,8006673,"TERMINAL",0,0,"1\t ",,terminal_output +2084,8007711,"TERMINAL",0,0,"2\t ",,terminal_output +2085,8008737,"TERMINAL",0,0,"3\t ",,terminal_output +2086,8009778,"TERMINAL",0,0,"4\t ",,terminal_output +2087,8010343,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +2088,8010422,"TERMINAL",0,0,"]633;E;2025-07-24 16:13:45 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5;adbf53fe-397b-40d3-9339-94ea79afad56]633;Csalloc: Granted job allocation 3373280\r\n",,terminal_output +2089,8010529,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +2090,8010889,"TERMINAL",0,0,"50",,terminal_output +2091,8011857,"TERMINAL",0,0,"6\t ",,terminal_output +2092,8012933,"TERMINAL",0,0,"7\t ",,terminal_output +2093,8013959,"TERMINAL",0,0,"8\t ",,terminal_output +2094,8014982,"TERMINAL",0,0,"9\t ",,terminal_output +2095,8016024,"TERMINAL",0,0,"50\t ",,terminal_output +2096,8017057,"TERMINAL",0,0,"1\t ",,terminal_output +2097,8018102,"TERMINAL",0,0,"2\t ",,terminal_output +2098,8019179,"TERMINAL",0,0,"3\t ",,terminal_output +2099,8020177,"TERMINAL",0,0,"41",,terminal_output +2100,8021221,"TERMINAL",0,0,"5\t ",,terminal_output +2101,8022302,"TERMINAL",0,0,"6\t ",,terminal_output +2102,8023382,"TERMINAL",0,0,"7\t ",,terminal_output +2103,8024365,"TERMINAL",0,0,"9\t ",,terminal_output +2104,8025426,"TERMINAL",0,0,"4:00\t ",,terminal_output +2105,8026435,"TERMINAL",0,0,"1\t ",,terminal_output +2106,8027478,"TERMINAL",0,0,"2\t ",,terminal_output +2107,8028520,"TERMINAL",0,0,"3\t ",,terminal_output +2108,8029622,"TERMINAL",0,0,"4\t ",,terminal_output +2109,8030647,"TERMINAL",0,0,"5\t ",,terminal_output +2110,8031672,"TERMINAL",0,0,"6\t ",,terminal_output +2111,8032797,"TERMINAL",0,0,"70",,terminal_output +2112,8033825,"TERMINAL",0,0,"8\t ",,terminal_output +2113,8034784,"TERMINAL",0,0,"9\t ",,terminal_output +2114,8035868,"TERMINAL",0,0,"10\t ",,terminal_output +2115,8036857,"TERMINAL",0,0,"1\t ",,terminal_output +2116,8037930,"TERMINAL",0,0,"2\t ",,terminal_output +2117,8038406,"TERMINAL",0,0,"salloc: Prolog hung on node hkn0901\r\n",,terminal_output +2118,8038938,"TERMINAL",0,0,"3\t ",,terminal_output +2119,8039974,"TERMINAL",0,0,"4\t ",,terminal_output +2120,8041019,"TERMINAL",0,0,"5\t ",,terminal_output +2121,8042056,"TERMINAL",0,0,"6\t ",,terminal_output +2122,8043099,"TERMINAL",0,0,"7\t ",,terminal_output +2123,8044140,"TERMINAL",0,0,"8\t ",,terminal_output +2124,8045290,"TERMINAL",0,0,"9\t ",,terminal_output +2125,8046336,"TERMINAL",0,0,"20\t ",,terminal_output +2126,8047301,"TERMINAL",0,0,"1\t ",,terminal_output +2127,8048368,"TERMINAL",0,0,"2\t ",,terminal_output +2128,8049088,"TERMINAL",0,0,"salloc: Nodes hkn0901 are ready for job\r\n",,terminal_output +2129,8049372,"TERMINAL",0,0,"4\t ",,terminal_output +2130,8049898,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h[tum_cte0515@hkn0901 jafar]$ ",,terminal_output +2131,8050419,"TERMINAL",0,0,"5\t ",,terminal_output +2132,8051443,"TERMINAL",0,0,"6\t ",,terminal_output +2133,8052469,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_yolorun_new_arch\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/shift-spatial/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=1000 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=8 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-new-arch-speedrun-shift-temporal-$slurm_job_id \\n --tags dynamics \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n ",shellscript,tab +2134,8052524,"TERMINAL",0,0,"7\t ",,terminal_output +2135,8053518,"TERMINAL",0,0,"8\t ",,terminal_output +2136,8053578,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1219,0,"",shellscript,selection_mouse +2137,8054571,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1565,0,"",shellscript,selection_mouse +2138,8054576,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1564,0,"",shellscript,selection_command +2139,8054609,"TERMINAL",0,0,"9\t ",,terminal_output +2140,8055632,"TERMINAL",0,0,"30\t ",,terminal_output +2141,8056659,"TERMINAL",0,0,"1\t ",,terminal_output +2142,8057683,"TERMINAL",0,0,"2\t ",,terminal_output +2143,8058805,"TERMINAL",0,0,"3\t ",,terminal_output +2144,8059772,"TERMINAL",0,0,"4\t ",,terminal_output +2145,8060819,"TERMINAL",0,0,"5\t ",,terminal_output +2146,8061880,"TERMINAL",0,0,"6\t ",,terminal_output +2147,8062887,"TERMINAL",0,0,"71",,terminal_output +2148,8063346,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:]\n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +2149,8063970,"TERMINAL",0,0,"8\t ",,terminal_output +2150,8064987,"TERMINAL",0,0,"9\t ",,terminal_output +2151,8066016,"TERMINAL",0,0,"40\t ",,terminal_output +2152,8067061,"TERMINAL",0,0,"1\t ",,terminal_output +2153,8068095,"TERMINAL",0,0,"2\t ",,terminal_output +2154,8068762,"train_dynamics.py",12134,0,"",python,selection_mouse +2155,8069136,"TERMINAL",0,0,"3\t ",,terminal_output +2156,8069240,"train_dynamics.py",12249,0,"",python,selection_mouse +2157,8069255,"train_dynamics.py",12248,0,"",python,selection_command +2158,8070179,"TERMINAL",0,0,"4\t ",,terminal_output +2159,8070326,"train_dynamics.py",12228,21," # while True:",python,selection_command +2160,8070553,"train_dynamics.py",12145,104," # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2161,8070699,"train_dynamics.py",12102,147," # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2162,8070850,"train_dynamics.py",12027,222," # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2163,8071022,"train_dynamics.py",11994,255," # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2164,8071207,"train_dynamics.py",11925,324," # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2165,8071265,"TERMINAL",0,0,"5\t ",,terminal_output +2166,8071326,"train_dynamics.py",11866,383," # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2167,8071485,"train_dynamics.py",11823,426," # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2168,8072367,"TERMINAL",0,0,"6\t ",,terminal_output +2169,8073348,"TERMINAL",0,0,"7\t ",,terminal_output +2170,8074173,"train_dynamics.py",11866,383," # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2171,8074351,"train_dynamics.py",11925,324," # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2172,8074352,"TERMINAL",0,0,"8\t ",,terminal_output +2173,8074515,"train_dynamics.py",11994,255," # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2174,8074873,"train_dynamics.py",12027,222," # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2175,8075010,"train_dynamics.py",11994,255," # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2176,8075184,"train_dynamics.py",11925,324," # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2177,8075346,"train_dynamics.py",11866,383," # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2178,8075383,"TERMINAL",0,0,"50\t ",,terminal_output +2179,8075495,"train_dynamics.py",11823,426," # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2180,8075960,"train_dynamics.py",11843,0,"",python,selection_command +2181,8076092,"train_dynamics.py",11809,0,"",python,selection_command +2182,8076250,"train_dynamics.py",11776,0,"",python,selection_command +2183,8076406,"TERMINAL",0,0,"1\t ",,terminal_output +2184,8076553,"train_dynamics.py",11809,0,"",python,selection_command +2185,8076942,"train_dynamics.py",11797,0,"",python,selection_command +2186,8077567,"train_dynamics.py",11797,0,"#",python,content +2187,8077568,"train_dynamics.py",11798,0,"",python,selection_keyboard +2188,8077568,"TERMINAL",0,0,"2\t ",,terminal_output +2189,8077650,"train_dynamics.py",11798,0," ",python,content +2190,8077651,"train_dynamics.py",11799,0,"",python,selection_keyboard +2191,8077981,"train_dynamics.py",11798,0,"",python,selection_command +2192,8078095,"train_dynamics.py",11834,0,"",python,selection_command +2193,8078259,"train_dynamics.py",11877,0,"",python,selection_command +2194,8078392,"train_dynamics.py",11936,0,"",python,selection_command +2195,8078486,"TERMINAL",0,0,"3\t ",,terminal_output +2196,8078527,"train_dynamics.py",12005,0,"",python,selection_command +2197,8078697,"train_dynamics.py",12038,0,"",python,selection_command +2198,8078842,"train_dynamics.py",12113,0,"",python,selection_command +2199,8078985,"train_dynamics.py",12156,0,"",python,selection_command +2200,8079179,"train_dynamics.py",12239,0,"",python,selection_command +2201,8079525,"TERMINAL",0,0,"4\t ",,terminal_output +2202,8080565,"TERMINAL",0,0,"5\t ",,terminal_output +2203,8081023,"train_dynamics.py",12230,21," # while True:",python,selection_command +2204,8081283,"train_dynamics.py",12147,104," # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2205,8081447,"train_dynamics.py",12104,147," # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2206,8081609,"TERMINAL",0,0,"6\t ",,terminal_output +2207,8082619,"train_dynamics.py",12029,222," # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:",python,selection_command +2208,8082654,"TERMINAL",0,0,"7\t ",,terminal_output +2209,8082866,"train_dynamics.py",12037,0,"",python,selection_command +2210,8083345,"train_dynamics.py",12238,1,"",python,content +2211,8083346,"train_dynamics.py",12155,1,"",python,content +2212,8083346,"train_dynamics.py",12112,1,"",python,content +2213,8083346,"train_dynamics.py",12037,1,"",python,content +2214,8083456,"train_dynamics.py",12235,1,"",python,content +2215,8083456,"train_dynamics.py",12153,1,"",python,content +2216,8083456,"train_dynamics.py",12111,1,"",python,content +2217,8083456,"train_dynamics.py",12037,1,"",python,content +2218,8083671,"train_dynamics.py",12036,0,"",python,selection_command +2219,8083706,"TERMINAL",0,0,"8\t ",,terminal_output +2220,8084107,"train_dynamics.py",12109,0,"",python,selection_command +2221,8084367,"train_dynamics.py",12110,0,"",python,selection_command +2222,8084750,"TERMINAL",0,0,"9\t ",,terminal_output +2223,8085262,"train_dynamics.py",12110,0,"#",python,content +2224,8085263,"train_dynamics.py",12111,0,"",python,selection_keyboard +2225,8085315,"train_dynamics.py",12111,0," ",python,content +2226,8085316,"train_dynamics.py",12112,0,"",python,selection_keyboard +2227,8085800,"train_dynamics.py",12111,0,"",python,selection_command +2228,8085805,"TERMINAL",0,0,"5:00\t ",,terminal_output +2229,8085910,"train_dynamics.py",12038,0,"",python,selection_command +2230,8086047,"train_dynamics.py",12005,0,"",python,selection_command +2231,8086840,"TERMINAL",0,0,"1\t ",,terminal_output +2232,8087888,"TERMINAL",0,0,"2\t ",,terminal_output +2233,8088912,"TERMINAL",0,0,"3\t ",,terminal_output +2234,8089794,"train_dynamics.py",12316,0,"",python,selection_mouse +2235,8089947,"TERMINAL",0,0,"4\t ",,terminal_output +2236,8090381,"train_dynamics.py",12024,0,"",python,selection_mouse +2237,8090878,"train_dynamics.py",12057,0,"",python,selection_mouse +2238,8091001,"train_dynamics.py",12055,11,"overfit_dir",python,selection_mouse +2239,8091013,"TERMINAL",0,0,"5\t ",,terminal_output +2240,8091230,"train_dynamics.py",12029,73," videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n",python,selection_mouse +2241,8091810,"train_dynamics.py",12062,0,"",python,selection_mouse +2242,8091811,"train_dynamics.py",12055,11,"overfit_dir",python,selection_mouse +2243,8092097,"TERMINAL",0,0,"6\t ",,terminal_output +2244,8092334,"train_dynamics.py",12076,0,"",python,selection_mouse +2245,8092494,"train_dynamics.py",12067,19,"oai_sample_seed69_1",python,selection_mouse +2246,8093126,"TERMINAL",0,0,"7\t ",,terminal_output +2247,8093318,"train_dynamics.py",12173,0,"",python,selection_mouse +2248,8094094,"TERMINAL",0,0,"8\t ",,terminal_output +2249,8094665,"train_dynamics.py",12166,34,"make_array_from_process_local_data",python,selection_mouse +2250,8095129,"TERMINAL",0,0,"9\t ",,terminal_output +2251,8096174,"TERMINAL",0,0,"10\t ",,terminal_output +2252,8097206,"TERMINAL",0,0,"1\t ",,terminal_output +2253,8098241,"TERMINAL",0,0,"2\t ",,terminal_output +2254,8099281,"TERMINAL",0,0,"32",,terminal_output +2255,8100381,"TERMINAL",0,0,"4\t ",,terminal_output +2256,8101404,"TERMINAL",0,0,"6\t ",,terminal_output +2257,8102398,"TERMINAL",0,0,"7\t ",,terminal_output +2258,8103454,"TERMINAL",0,0,"8\t ",,terminal_output +2259,8104474,"TERMINAL",0,0,"9\t ",,terminal_output +2260,8105483,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +2261,8105556,"TERMINAL",0,0,"20\t ",,terminal_output +2262,8106630,"TERMINAL",0,0,"1\t ",,terminal_output +2263,8107656,"TERMINAL",0,0,"2\t ",,terminal_output +2264,8108676,"TERMINAL",0,0,"3\t ",,terminal_output +2265,8109716,"TERMINAL",0,0,"4\t ",,terminal_output +2266,8109872,"train_dynamics.py",0,0,"",python,tab +2267,8110777,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +2268,8110801,"TERMINAL",0,0,"5\t ",,terminal_output +2269,8111847,"TERMINAL",0,0,"6\t ",,terminal_output +2270,8112210,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1149,0,"",shellscript,selection_mouse +2271,8112793,"TERMINAL",0,0,"7\t ",,terminal_output +2272,8113119,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1148,1,"",shellscript,content +2273,8113222,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1148,0,"1",shellscript,content +2274,8113223,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1149,0,"",shellscript,selection_keyboard +2275,8113834,"TERMINAL",0,0,"8\t ",,terminal_output +2276,8114862,"TERMINAL",0,0,"9\t ",,terminal_output +2277,8115949,"TERMINAL",0,0,"30\t ",,terminal_output +2278,8116900,"train_dynamics.py",0,0,"",python,tab +2279,8116978,"TERMINAL",0,0,"1\t ",,terminal_output +2280,8118018,"TERMINAL",0,0,"2\t ",,terminal_output +2281,8119023,"TERMINAL",0,0,"3\t ",,terminal_output +2282,8120057,"TERMINAL",0,0,"4\t ",,terminal_output +2283,8121101,"TERMINAL",0,0,"5\t ",,terminal_output +2284,8122151,"TERMINAL",0,0,"6\t ",,terminal_output +2285,8123217,"TERMINAL",0,0,"7\t ",,terminal_output +2286,8124215,"TERMINAL",0,0,"8\t ",,terminal_output +2287,8125263,"TERMINAL",0,0,"9\t ",,terminal_output +2288,8126390,"TERMINAL",0,0,"40\t ",,terminal_output +2289,8127364,"TERMINAL",0,0,"1\t ",,terminal_output +2290,8128455,"TERMINAL",0,0,"3\t ",,terminal_output +2291,8129406,"TERMINAL",0,0,"4\t ",,terminal_output +2292,8130448,"TERMINAL",0,0,"5\t ",,terminal_output +2293,8131512,"TERMINAL",0,0,"6\t ",,terminal_output +2294,8132527,"TERMINAL",0,0,"7\t ",,terminal_output +2295,8133663,"TERMINAL",0,0,"8\t ",,terminal_output +2296,8134610,"TERMINAL",0,0,"9\t ",,terminal_output +2297,8135811,"TERMINAL",0,0,"50\t ",,terminal_output +2298,8136694,"TERMINAL",0,0,"1\t ",,terminal_output +2299,8137859,"TERMINAL",0,0,"2\t ",,terminal_output +2300,8138773,"TERMINAL",0,0,"3\t ",,terminal_output +2301,8139949,"TERMINAL",0,0,"4\t ",,terminal_output +2302,8140862,"TERMINAL",0,0,"5\t ",,terminal_output +2303,8141903,"TERMINAL",0,0,"6\t ",,terminal_output +2304,8142994,"TERMINAL",0,0,"7\t ",,terminal_output +2305,8144007,"TERMINAL",0,0,"8\t ",,terminal_output +2306,8145034,"TERMINAL",0,0,"9\t ",,terminal_output +2307,8146082,"TERMINAL",0,0,"6:00\t ",,terminal_output +2308,8147109,"TERMINAL",0,0,"1\t ",,terminal_output +2309,8148160,"TERMINAL",0,0,"2\t ",,terminal_output +2310,8149227,"TERMINAL",0,0,"3\t ",,terminal_output +2311,8150235,"TERMINAL",0,0,"4\t ",,terminal_output +2312,8151377,"TERMINAL",0,0,"5\t ",,terminal_output +2313,8152312,"TERMINAL",0,0,"6\t ",,terminal_output +2314,8153352,"TERMINAL",0,0,"8\t ",,terminal_output +2315,8154405,"TERMINAL",0,0,"bash",,terminal_focus +2316,8154420,"TERMINAL",0,0,"9\t ",,terminal_output +2317,8155367,"TERMINAL",0,0,"srun",,terminal_focus +2318,8155481,"TERMINAL",0,0,"10\t ",,terminal_output +2319,8155959,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +2320,8156486,"TERMINAL",0,0,"1\t ",,terminal_output +2321,8157531,"TERMINAL",0,0,"2\t ",,terminal_output +2322,8158649,"TERMINAL",0,0,"3\t ",,terminal_output +2323,8159510,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1066,0,"",shellscript,selection_mouse +2324,8159603,"TERMINAL",0,0,"4\t ",,terminal_output +2325,8160017,"slurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1043,0,"",shellscript,selection_mouse +2326,8160647,"TERMINAL",0,0,"5\t ",,terminal_output +2327,8161726,"TERMINAL",0,0,"6\t ",,terminal_output +2328,8162835,"TERMINAL",0,0,"7\t ",,terminal_output +2329,8163952,"TERMINAL",0,0,"8\t ",,terminal_output +2330,8164805,"TERMINAL",0,0,"9\t ",,terminal_output +2331,8165871,"TERMINAL",0,0,"20\t ",,terminal_output +2332,8166976,"TERMINAL",0,0,"1\t ",,terminal_output +2333,8168017,"TERMINAL",0,0,"2\t ",,terminal_output +2334,8168969,"TERMINAL",0,0,"3\t ",,terminal_output +2335,8170020,"TERMINAL",0,0,"4\t ",,terminal_output +2336,8171045,"TERMINAL",0,0,"5\t ",,terminal_output +2337,8171561,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --seed=69 \\n --num_steps=2000 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-overfit-$slurm_job_id \\n --tags dynamics causal overfit \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n ",shellscript,tab +2338,8172077,"TERMINAL",0,0,"6\t ",,terminal_output +2339,8172558,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1012,0,"",shellscript,selection_mouse +2340,8172579,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1011,0,"",shellscript,selection_command +2341,8173128,"TERMINAL",0,0,"7\t ",,terminal_output +2342,8173208,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1079,0,"",shellscript,selection_mouse +2343,8173225,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1078,0,"",shellscript,selection_command +2344,8174168,"TERMINAL",0,0,"8\t ",,terminal_output +2345,8174641,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1094,0,"",shellscript,selection_command +2346,8174828,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1117,0,"",shellscript,selection_command +2347,8174988,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1140,0,"",shellscript,selection_command +2348,8175117,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1166,0,"",shellscript,selection_command +2349,8175205,"TERMINAL",0,0,"9\t ",,terminal_output +2350,8175259,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1199,0,"",shellscript,selection_command +2351,8175504,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1200,0,"",shellscript,selection_command +2352,8175675,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1201,0,"",shellscript,selection_command +2353,8175801,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1202,0,"",shellscript,selection_command +2354,8175947,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1203,0,"",shellscript,selection_command +2355,8176093,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1204,0,"",shellscript,selection_command +2356,8176254,"TERMINAL",0,0,"30\t ",,terminal_output +2357,8176464,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1203,1,"",shellscript,content +2358,8176583,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1202,1,"",shellscript,content +2359,8176899,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1202,0,"1",shellscript,content +2360,8176900,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1203,0,"",shellscript,selection_keyboard +2361,8177310,"TERMINAL",0,0,"1\t ",,terminal_output +2362,8177719,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1202,0,"",shellscript,selection_command +2363,8178328,"TERMINAL",0,0,"2\t ",,terminal_output +2364,8179414,"TERMINAL",0,0,"4\t ",,terminal_output +2365,8180463,"TERMINAL",0,0,"5\t ",,terminal_output +2366,8181443,"TERMINAL",0,0,"6\t ",,terminal_output +2367,8182507,"TERMINAL",0,0,"7\t ",,terminal_output +2368,8183541,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +2369,8183560,"TERMINAL",0,0,"8\t ",,terminal_output +2370,8184558,"TERMINAL",0,0,"9\t ",,terminal_output +2371,8185606,"TERMINAL",0,0,"40\t ",,terminal_output +2372,8186705,"TERMINAL",0,0,"1\t ",,terminal_output +2373,8187734,"TERMINAL",0,0,"2\t ",,terminal_output +2374,8188755,"TERMINAL",0,0,"3\t ",,terminal_output +2375,8189781,"TERMINAL",0,0,"4\t ",,terminal_output +2376,8190869,"TERMINAL",0,0,"5\t ",,terminal_output +2377,8191846,"TERMINAL",0,0,"6\t ",,terminal_output +2378,8192891,"TERMINAL",0,0,"7\t ",,terminal_output +2379,8193977,"TERMINAL",0,0,"8\t ",,terminal_output +2380,8195004,"TERMINAL",0,0,"9\t ",,terminal_output +2381,8196029,"TERMINAL",0,0,"50\t ",,terminal_output +2382,8197061,"TERMINAL",0,0,"1\t ",,terminal_output +2383,8198094,"TERMINAL",0,0,"2\t ",,terminal_output +2384,8199136,"TERMINAL",0,0,"3\t ",,terminal_output +2385,8200223,"TERMINAL",0,0,"4\t ",,terminal_output +2386,8200257,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1272,0,"",shellscript,selection_mouse +2387,8201215,"TERMINAL",0,0,"5\t ",,terminal_output +2388,8201476,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1274,0,"",shellscript,selection_mouse +2389,8202182,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1274,1,"",shellscript,content +2390,8202264,"TERMINAL",0,0,"6\t ",,terminal_output +2391,8203296,"TERMINAL",0,0,"7\t ",,terminal_output +2392,8204384,"TERMINAL",0,0,"8\t ",,terminal_output +2393,8205444,"TERMINAL",0,0,"7:00\t ",,terminal_output +2394,8206469,"TERMINAL",0,0,"1\t ",,terminal_output +2395,8207494,"TERMINAL",0,0,"2\t ",,terminal_output +2396,8208516,"TERMINAL",0,0,"3\t ",,terminal_output +2397,8209574,"TERMINAL",0,0,"4\t ",,terminal_output +2398,8210670,"TERMINAL",0,0,"5\t ",,terminal_output +2399,8211691,"TERMINAL",0,0,"6\t ",,terminal_output +2400,8212663,"TERMINAL",0,0,"7\t ",,terminal_output +2401,8212966,"train_dynamics.py",0,0,"",python,tab +2402,8213714,"TERMINAL",0,0,"8\t ",,terminal_output +2403,8214811,"TERMINAL",0,0,"9\t ",,terminal_output +2404,8214965,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +2405,8215889,"TERMINAL",0,0,"10\t ",,terminal_output +2406,8216832,"TERMINAL",0,0,"1\t ",,terminal_output +2407,8217869,"TERMINAL",0,0,"2\t ",,terminal_output +2408,8218917,"TERMINAL",0,0,"3\t ",,terminal_output +2409,8219950,"TERMINAL",0,0,"4\t ",,terminal_output +2410,8220773,"TERMINAL",0,0,"s",,terminal_output +2411,8220898,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +2412,8221013,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2413,8221013,"TERMINAL",0,0,"5\t ",,terminal_output +2414,8221065,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2415,8221297,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2416,8221410,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2417,8221486,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2418,8221594,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2419,8221672,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +2420,8221781,"TERMINAL",0,0,"env/",,terminal_output +2421,8222034,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2422,8222047,"TERMINAL",0,0,"6\t ",,terminal_output +2423,8222103,"TERMINAL",0,0,"in/",,terminal_output +2424,8222411,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2425,8222487,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2426,8222822,"TERMINAL",0,0,"tivate",,terminal_output +2427,8223081,"TERMINAL",0,0,"7\t ",,terminal_output +2428,8223187,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +2429,8223545,"TERMINAL",0,0,"[?25lsh[?25h[?25lh[?25h",,terminal_output +2430,8223691,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2431,8224045,"TERMINAL",0,0,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +2432,8224107,"TERMINAL",0,0,"8\t ",,terminal_output +2433,8224977,"TERMINAL",0,0,"\rslurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --seed=69 \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n ",,terminal_output +2434,8225155,"TERMINAL",0,0,"91",,terminal_output +2435,8225171,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +2436,8225336,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2437,8226234,"TERMINAL",0,0,"20\t ",,terminal_output +2438,8227259,"TERMINAL",0,0,"1\t ",,terminal_output +2439,8228282,"TERMINAL",0,0,"2\t ",,terminal_output +2440,8229379,"TERMINAL",0,0,"3\t ",,terminal_output +2441,8230432,"TERMINAL",0,0,"5\t ",,terminal_output +2442,8231457,"TERMINAL",0,0,"6\t ",,terminal_output +2443,8232482,"TERMINAL",0,0,"7\t ",,terminal_output +2444,8233472,"TERMINAL",0,0,"8\t ",,terminal_output +2445,8234535,"TERMINAL",0,0,"9\t ",,terminal_output +2446,8235543,"TERMINAL",0,0,"30\t ",,terminal_output +2447,8236588,"TERMINAL",0,0,"1\t ",,terminal_output +2448,8237703,"TERMINAL",0,0,"2\t ",,terminal_output +2449,8238670,"TERMINAL",0,0,"3\t ",,terminal_output +2450,8239716,"TERMINAL",0,0,"4\t ",,terminal_output +2451,8240774,"TERMINAL",0,0,"5\t ",,terminal_output +2452,8241697,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2453,8241808,"TERMINAL",0,0,"6\t ",,terminal_output +2454,8242832,"TERMINAL",0,0,"7\t ",,terminal_output +2455,8243880,"TERMINAL",0,0,"8\t ",,terminal_output +2456,8244389,"TERMINAL",0,0,"2025-07-24 16:17:39.084860: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2457,8244976,"TERMINAL",0,0,"9\t ",,terminal_output +2458,8246010,"TERMINAL",0,0,"40\t ",,terminal_output +2459,8247042,"TERMINAL",0,0,"1\t ",,terminal_output +2460,8248032,"TERMINAL",0,0,"2\t ",,terminal_output +2461,8249076,"TERMINAL",0,0,"3\t ",,terminal_output +2462,8250113,"TERMINAL",0,0,"4\t ",,terminal_output +2463,8251154,"TERMINAL",0,0,"5\t ",,terminal_output +2464,8252193,"TERMINAL",0,0,"6\t ",,terminal_output +2465,8253267,"TERMINAL",0,0,"7\t ",,terminal_output +2466,8254291,"TERMINAL",0,0,"2025-07-24 16:17:48.944838: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2467,8254307,"TERMINAL",0,0,"8\t ",,terminal_output +2468,8255325,"TERMINAL",0,0,"9\t ",,terminal_output +2469,8256444,"TERMINAL",0,0,"51\t ",,terminal_output +2470,8257398,"TERMINAL",0,0,"2\t ",,terminal_output +2471,8258439,"TERMINAL",0,0,"3\t ",,terminal_output +2472,8259479,"TERMINAL",0,0,"4\t ",,terminal_output +2473,8260538,"TERMINAL",0,0,"5\t ",,terminal_output +2474,8261558,"TERMINAL",0,0,"6\t ",,terminal_output +2475,8262602,"TERMINAL",0,0,"7\t ",,terminal_output +2476,8263649,"TERMINAL",0,0,"8\t ",,terminal_output +2477,8264736,"TERMINAL",0,0,"9\t ",,terminal_output +2478,8265761,"TERMINAL",0,0,"8:00\t ",,terminal_output +2479,8266789,"TERMINAL",0,0,"1\t ",,terminal_output +2480,8267909,"TERMINAL",0,0,"2\t ",,terminal_output +2481,8268935,"TERMINAL",0,0,"3\t ",,terminal_output +2482,8269890,"TERMINAL",0,0,"4\t ",,terminal_output +2483,8270982,"TERMINAL",0,0,"5\t ",,terminal_output +2484,8272018,"TERMINAL",0,0,"6\t ",,terminal_output +2485,8272197,"TERMINAL",0,0,"2025-07-24 16:18:06.889281: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2486,8273047,"TERMINAL",0,0,"7\t ",,terminal_output +2487,8274162,"TERMINAL",0,0,"8\t ",,terminal_output +2488,8274435,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +2489,8275097,"TERMINAL",0,0,"9\t ",,terminal_output +2490,8275207,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_161809-wt6urmv2\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-overfit-3373280\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/wt6urmv2\r\n",,terminal_output +2491,8276131,"TERMINAL",0,0,"10\t ",,terminal_output +2492,8277179,"TERMINAL",0,0,"1\t ",,terminal_output +2493,8278219,"TERMINAL",0,0,"2\t ",,terminal_output +2494,8279280,"TERMINAL",0,0,"30",,terminal_output +2495,8280171,"train_dynamics.py",0,0,"",python,tab +2496,8280352,"TERMINAL",0,0,"4\t ",,terminal_output +2497,8281337,"TERMINAL",0,0,"5\t ",,terminal_output +2498,8282382,"TERMINAL",0,0,"7\t ",,terminal_output +2499,8283428,"TERMINAL",0,0,"8\t ",,terminal_output +2500,8284461,"TERMINAL",0,0,"9\t ",,terminal_output +2501,8285499,"TERMINAL",0,0,"20\t ",,terminal_output +2502,8286550,"TERMINAL",0,0,"1\t ",,terminal_output +2503,8287580,"TERMINAL",0,0,"2\t ",,terminal_output +2504,8288614,"TERMINAL",0,0,"3\t ",,terminal_output +2505,8289652,"TERMINAL",0,0,"4\t ",,terminal_output +2506,8290748,"TERMINAL",0,0,"5\t ",,terminal_output +2507,8291770,"TERMINAL",0,0,"6\t ",,terminal_output +2508,8292785,"TERMINAL",0,0,"7\t ",,terminal_output +2509,8293823,"TERMINAL",0,0,"8\t ",,terminal_output +2510,8294857,"TERMINAL",0,0,"9\t ",,terminal_output +2511,8295970,"TERMINAL",0,0,"30\t ",,terminal_output +2512,8296519,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +2513,8296929,"TERMINAL",0,0,"1\t ",,terminal_output +2514,8297967,"TERMINAL",0,0,"2\t ",,terminal_output +2515,8298776,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1201,0,"",shellscript,selection_mouse +2516,8298897,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1191,10,"batch_size",shellscript,selection_mouse +2517,8299013,"TERMINAL",0,0,"3\t ",,terminal_output +2518,8300065,"TERMINAL",0,0,"4\t ",,terminal_output +2519,8301092,"TERMINAL",0,0,"5\t ",,terminal_output +2520,8302118,"TERMINAL",0,0,"6\t ",,terminal_output +2521,8303161,"TERMINAL",0,0,"7\t ",,terminal_output +2522,8304262,"TERMINAL",0,0,"8\t ",,terminal_output +2523,8305252,"TERMINAL",0,0,"9\t ",,terminal_output +2524,8306311,"TERMINAL",0,0,"40\t ",,terminal_output +2525,8307345,"TERMINAL",0,0,"1\t ",,terminal_output +2526,8308351,"TERMINAL",0,0,"3\t ",,terminal_output +2527,8309392,"TERMINAL",0,0,"4\t ",,terminal_output +2528,8310270,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 80000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/080000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 140000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/140000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 144000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/144000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/146000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 120000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/120000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 145000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/145000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 60000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/060000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 100000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4/100000/metrics/metrics not found.\r\n",,terminal_output +2529,8310425,"TERMINAL",0,0,"5\t ",,terminal_output +2530,8311022,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 1603072, 'total': 52583120}\r\n",,terminal_output +2531,8311110,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 341, in \r\n train_state = restore_genie_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 405, in restore_genie_components\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1608, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 558, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 300, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 319, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 837, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/standard_checkpoint_handler.py"", line 246, in restore\r\n return self._impl.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 796, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 737, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match: {'params': {'params': {'decoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}, 'encoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}}}, 'opt_state': [{'mu': {'params': {'decoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}, 'encoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}}}, 'nu': {'params': {'decoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}, 'encoder': {'STBlock_0': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_1': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_2': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_3': {'Dense_1': Diff(lhs={'bias': ShapeDtypeStruct(shape=(512,), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device)), 'kernel': ShapeDtypeStruct(shape=(2048, 512), dtype=float32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}, rhs=None)}, 'STBlock_4': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_5': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_6': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}}), 'STBlock_7': Diff(lhs=None, rhs={'Dense_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 512))}, 'LayerNorm_0': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_1': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'LayerNorm_2': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'scale': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,))}, 'MultiHeadAttention_0': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}, 'MultiHeadAttention_1': {'key': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'out': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64,)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64, 512))}, 'query': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}, 'value': {'bias': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 64)), 'kernel': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(64, 8, 64))}}})}}}}, None, None]}\r\n",,terminal_output +2532,8311538,"TERMINAL",0,0,"6\t ",,terminal_output +2533,8312453,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-overfit-3373280 at: https://wandb.ai/instant-uv/jafar/runs/wt6urmv2\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_161809-wt6urmv2/logs\r\n",,terminal_output +2534,8312518,"TERMINAL",0,0,"7\t ",,terminal_output +2535,8313583,"TERMINAL",0,0,"8\t ",,terminal_output +2536,8314147,"TERMINAL",0,0,"srun: error: hkn0901: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +2537,8314611,"TERMINAL",0,0,"9\t ",,terminal_output +2538,8315619,"TERMINAL",0,0,"50\t ",,terminal_output +2539,8316658,"TERMINAL",0,0,"1\t ",,terminal_output +2540,8317699,"TERMINAL",0,0,"2\t ",,terminal_output +2541,8318735,"TERMINAL",0,0,"3\t ",,terminal_output +2542,8319781,"TERMINAL",0,0,"4\t ",,terminal_output +2543,8320828,"TERMINAL",0,0,"5\t ",,terminal_output +2544,8321866,"TERMINAL",0,0,"6\t ",,terminal_output +2545,8322898,"TERMINAL",0,0,"7\t ",,terminal_output +2546,8323925,"TERMINAL",0,0,"8\t ",,terminal_output +2547,8325066,"TERMINAL",0,0,"9\t ",,terminal_output +2548,8326086,"TERMINAL",0,0,"9:00\t ",,terminal_output +2549,8327039,"TERMINAL",0,0,"1\t ",,terminal_output +2550,8328128,"TERMINAL",0,0,"2\t ",,terminal_output +2551,8329117,"TERMINAL",0,0,"3\t ",,terminal_output +2552,8330154,"TERMINAL",0,0,"4\t ",,terminal_output +2553,8331198,"TERMINAL",0,0,"5\t ",,terminal_output +2554,8332238,"TERMINAL",0,0,"6\t ",,terminal_output +2555,8333273,"TERMINAL",0,0,"7\t ",,terminal_output +2556,8334327,"TERMINAL",0,0,"8\t ",,terminal_output +2557,8335356,"TERMINAL",0,0,"10\t ",,terminal_output +2558,8336302,"train_dynamics.py",0,0,"",python,tab +2559,8336398,"TERMINAL",0,0,"1\t ",,terminal_output +2560,8337455,"TERMINAL",0,0,"2\t ",,terminal_output +2561,8338586,"TERMINAL",0,0,"3\t ",,terminal_output +2562,8339410,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +2563,8339564,"TERMINAL",0,0,"4\t ",,terminal_output +2564,8340563,"TERMINAL",0,0,"5\t ",,terminal_output +2565,8341319,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",995,0,"",shellscript,selection_mouse +2566,8341637,"TERMINAL",0,0,"6\t ",,terminal_output +2567,8341809,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",990,0,"",shellscript,selection_mouse +2568,8342663,"TERMINAL",0,0,"7\t ",,terminal_output +2569,8343789,"TERMINAL",0,0,"810",,terminal_output +2570,8344804,"TERMINAL",0,0,"9\t ",,terminal_output +2571,8345764,"TERMINAL",0,0,"20\t ",,terminal_output +2572,8346809,"TERMINAL",0,0,"1\t ",,terminal_output +2573,8347844,"TERMINAL",0,0,"2\t ",,terminal_output +2574,8348906,"TERMINAL",0,0,"3\t ",,terminal_output +2575,8350042,"TERMINAL",0,0,"4\t ",,terminal_output +2576,8350891,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:10:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_causal_255M\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=50 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-causal-255M-$slurm_job_id \\n --tags dynamics causal 255M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,tab +2577,8351574,"TERMINAL",0,0,"5\t ",,terminal_output +2578,8351704,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",870,0,"",shellscript,selection_mouse +2579,8351819,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",868,4,"data",shellscript,selection_mouse +2580,8351966,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",793,134,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,selection_mouse +2581,8352583,"TERMINAL",0,0,"7\t ",,terminal_output +2582,8353621,"TERMINAL",0,0,"8\t ",,terminal_output +2583,8354181,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +2584,8354662,"TERMINAL",0,0,"9\t ",,terminal_output +2585,8355476,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",994,0,"\n",shellscript,content +2586,8355717,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",995,0,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n",shellscript,content +2587,8355736,"TERMINAL",0,0,"30\t ",,terminal_output +2588,8356332,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",995,0,"",shellscript,selection_command +2589,8356498,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",848,0,"",shellscript,selection_command +2590,8356759,"TERMINAL",0,0,"12",,terminal_output +2591,8356984,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",848,147,"",shellscript,content +2592,8357788,"TERMINAL",0,0,"2\t ",,terminal_output +2593,8358835,"TERMINAL",0,0,"3\t ",,terminal_output +2594,8359271,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +2595,8359854,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --seed=69 \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n ",,terminal_output +2596,8359884,"TERMINAL",0,0,"4\t ",,terminal_output +2597,8360180,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +2598,8360295,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2599,8360992,"TERMINAL",0,0,"5\t ",,terminal_output +2600,8362025,"TERMINAL",0,0,"6\t ",,terminal_output +2601,8363044,"TERMINAL",0,0,"7\t ",,terminal_output +2602,8364064,"TERMINAL",0,0,"8\t ",,terminal_output +2603,8365123,"TERMINAL",0,0,"9\t ",,terminal_output +2604,8366128,"TERMINAL",0,0,"40\t ",,terminal_output +2605,8366421,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +2606,8367174,"TERMINAL",0,0,"1\t ",,terminal_output +2607,8368207,"TERMINAL",0,0,"2\t ",,terminal_output +2608,8368469,"TERMINAL",0,0,"2025-07-24 16:19:43.135967: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2609,8369244,"TERMINAL",0,0,"3\t ",,terminal_output +2610,8370294,"TERMINAL",0,0,"4\t ",,terminal_output +2611,8371340,"TERMINAL",0,0,"5\t ",,terminal_output +2612,8372375,"TERMINAL",0,0,"7\t ",,terminal_output +2613,8373483,"TERMINAL",0,0,"8\t ",,terminal_output +2614,8374457,"TERMINAL",0,0,"9\t ",,terminal_output +2615,8375537,"TERMINAL",0,0,"50\t ",,terminal_output +2616,8376543,"TERMINAL",0,0,"1\t ",,terminal_output +2617,8377684,"TERMINAL",0,0,"2\t ",,terminal_output +2618,8377735,"TERMINAL",0,0,"2025-07-24 16:19:52.431581: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2619,8378620,"TERMINAL",0,0,"3\t ",,terminal_output +2620,8379664,"TERMINAL",0,0,"4\t ",,terminal_output +2621,8380697,"TERMINAL",0,0,"5\t ",,terminal_output +2622,8381731,"TERMINAL",0,0,"6\t ",,terminal_output +2623,8382776,"TERMINAL",0,0,"7\t ",,terminal_output +2624,8383820,"TERMINAL",0,0,"8\t ",,terminal_output +2625,8384858,"TERMINAL",0,0,"9\t ",,terminal_output +2626,8385902,"TERMINAL",0,0,"20:00\t ",,terminal_output +2627,8386941,"TERMINAL",0,0,"1\t ",,terminal_output +2628,8388024,"TERMINAL",0,0,"2\t ",,terminal_output +2629,8389052,"TERMINAL",0,0,"3\t ",,terminal_output +2630,8390177,"TERMINAL",0,0,"4\t ",,terminal_output +2631,8391200,"TERMINAL",0,0,"5\t ",,terminal_output +2632,8392240,"TERMINAL",0,0,"6\t ",,terminal_output +2633,8393199,"TERMINAL",0,0,"7\t ",,terminal_output +2634,8394334,"TERMINAL",0,0,"8\t ",,terminal_output +2635,8395279,"TERMINAL",0,0,"9\t ",,terminal_output +2636,8396421,"TERMINAL",0,0,"10\t ",,terminal_output +2637,8396472,"TERMINAL",0,0,"2025-07-24 16:20:11.146240: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2638,8397357,"TERMINAL",0,0,"2\t ",,terminal_output +2639,8398478,"TERMINAL",0,0,"3\t ",,terminal_output +2640,8398781,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +2641,8399258,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_162013-m0b20qmd\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-overfit-3373280\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/m0b20qmd\r\n",,terminal_output +2642,8399442,"TERMINAL",0,0,"4\t ",,terminal_output +2643,8400478,"TERMINAL",0,0,"5\t ",,terminal_output +2644,8401519,"TERMINAL",0,0,"6\t ",,terminal_output +2645,8402566,"TERMINAL",0,0,"7\t ",,terminal_output +2646,8403592,"TERMINAL",0,0,"8\t ",,terminal_output +2647,8404718,"TERMINAL",0,0,"9\t ",,terminal_output +2648,8405742,"TERMINAL",0,0,"20\t ",,terminal_output +2649,8406766,"TERMINAL",0,0,"1\t ",,terminal_output +2650,8407799,"TERMINAL",0,0,"2\t ",,terminal_output +2651,8408786,"TERMINAL",0,0,"3\t ",,terminal_output +2652,8409824,"TERMINAL",0,0,"4\t ",,terminal_output +2653,8410857,"TERMINAL",0,0,"5\t ",,terminal_output +2654,8411988,"TERMINAL",0,0,"6\t ",,terminal_output +2655,8412935,"TERMINAL",0,0,"7\t ",,terminal_output +2656,8414036,"TERMINAL",0,0,"8\t ",,terminal_output +2657,8415059,"TERMINAL",0,0,"9\t ",,terminal_output +2658,8416057,"TERMINAL",0,0,"30\t ",,terminal_output +2659,8417102,"TERMINAL",0,0,"1\t ",,terminal_output +2660,8418232,"TERMINAL",0,0,"2\t ",,terminal_output +2661,8419260,"TERMINAL",0,0,"3\t ",,terminal_output +2662,8420214,"TERMINAL",0,0,"4\t ",,terminal_output +2663,8421256,"TERMINAL",0,0,"5\t ",,terminal_output +2664,8422302,"TERMINAL",0,0,"6\t ",,terminal_output +2665,8423333,"TERMINAL",0,0,"7\t ",,terminal_output +2666,8424375,"TERMINAL",0,0,"9\t ",,terminal_output +2667,8425506,"TERMINAL",0,0,"40\t ",,terminal_output +2668,8426456,"TERMINAL",0,0,"1\t ",,terminal_output +2669,8427494,"TERMINAL",0,0,"2\t ",,terminal_output +2670,8428532,"TERMINAL",0,0,"3\t ",,terminal_output +2671,8429608,"TERMINAL",0,0,"4\t ",,terminal_output +2672,8430615,"TERMINAL",0,0,"5\t ",,terminal_output +2673,8431660,"TERMINAL",0,0,"6\t ",,terminal_output +2674,8432778,"TERMINAL",0,0,"7\t ",,terminal_output +2675,8433799,"TERMINAL",0,0,"8\t ",,terminal_output +2676,8434110,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +2677,8434824,"TERMINAL",0,0,"9\t ",,terminal_output +2678,8435847,"TERMINAL",0,0,"50\t ",,terminal_output +2679,8436872,"TERMINAL",0,0,"1\t ",,terminal_output +2680,8437899,"TERMINAL",0,0,"2\t ",,terminal_output +2681,8438941,"TERMINAL",0,0,"3\t ",,terminal_output +2682,8440046,"TERMINAL",0,0,"4\t ",,terminal_output +2683,8441028,"TERMINAL",0,0,"5\t ",,terminal_output +2684,8441994,"TERMINAL",0,0,"2025-07-24 16:20:56.632105: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:20:56.632522: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:20:56.634386: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:20:56.634401: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:20:56.635148: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2685,8442105,"TERMINAL",0,0,"6\t ",,terminal_output +2686,8443118,"TERMINAL",0,0,"7\t ",,terminal_output +2687,8444137,"TERMINAL",0,0,"8\t ",,terminal_output +2688,8445274,"TERMINAL",0,0,"9\t ",,terminal_output +2689,8446221,"TERMINAL",0,0,"1:00\t ",,terminal_output +2690,8447252,"TERMINAL",0,0,"1\t ",,terminal_output +2691,8448292,"TERMINAL",0,0,"2\t ",,terminal_output +2692,8449408,"TERMINAL",0,0,"3\t ",,terminal_output +2693,8450375,"TERMINAL",0,0,"5\t ",,terminal_output +2694,8451417,"TERMINAL",0,0,"6\t ",,terminal_output +2695,8452455,"TERMINAL",0,0,"7\t ",,terminal_output +2696,8453493,"TERMINAL",0,0,"8\t ",,terminal_output +2697,8454528,"TERMINAL",0,0,"9\t ",,terminal_output +2698,8455569,"TERMINAL",0,0,"10\t ",,terminal_output +2699,8456601,"TERMINAL",0,0,"1\t ",,terminal_output +2700,8457642,"TERMINAL",0,0,"2\t ",,terminal_output +2701,8458786,"TERMINAL",0,0,"3\t ",,terminal_output +2702,8459815,"TERMINAL",0,0,"4\t ",,terminal_output +2703,8460757,"TERMINAL",0,0,"5\t ",,terminal_output +2704,8461862,"TERMINAL",0,0,"6\t ",,terminal_output +2705,8462837,"TERMINAL",0,0,"7\t ",,terminal_output +2706,8463869,"TERMINAL",0,0,"8\t ",,terminal_output +2707,8464924,"TERMINAL",0,0,"9\t ",,terminal_output +2708,8465962,"TERMINAL",0,0,"20\t ",,terminal_output +2709,8467083,"TERMINAL",0,0,"1\t ",,terminal_output +2710,8468111,"TERMINAL",0,0,"2\t ",,terminal_output +2711,8469131,"TERMINAL",0,0,"3\t ",,terminal_output +2712,8470201,"TERMINAL",0,0,"4\t ",,terminal_output +2713,8471187,"TERMINAL",0,0,"5\t ",,terminal_output +2714,8472197,"TERMINAL",0,0,"6\t ",,terminal_output +2715,8473239,"TERMINAL",0,0,"7\t ",,terminal_output +2716,8474281,"TERMINAL",0,0,"8\t ",,terminal_output +2717,8475323,"TERMINAL",0,0,"9\t ",,terminal_output +2718,8476362,"TERMINAL",0,0,"31\t ",,terminal_output +2719,8477404,"TERMINAL",0,0,"2\t ",,terminal_output +2720,8478554,"TERMINAL",0,0,"3\t ",,terminal_output +2721,8479487,"TERMINAL",0,0,"4\t ",,terminal_output +2722,8480362,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 1603072, 'total': 52583120}\r\nStep 0, loss: 8.674175262451172\r\nStep 1, loss: 8.668670654296875\r\nStep 2, loss: 8.574606895446777\r\nStep 3, loss: 8.406307220458984\r\nStep 4, loss: 8.203619956970215\r\nStep 5, loss: 8.023378372192383\r\nStep 6, loss: 7.9560441970825195\r\nStep 7, loss: 7.877461910247803\r\nStep 8, loss: 7.824573040008545\r\nStep 9, loss: 7.750797748565674\r\nStep 10, loss: 7.717311382293701\r\nStep 11, loss: 7.669999599456787\r\nStep 12, loss: 7.609342098236084\r\nStep 13, loss: 7.595484256744385\r\nStep 14, loss: 7.5520453453063965\r\nStep 15, loss: 7.526183128356934\r\nStep 16, loss: 7.513381004333496\r\nStep 17, loss: 7.4649200439453125\r\nStep 18, loss: 7.451118469238281\r\nStep 19, loss: 7.427468776702881\r\nStep 20, loss: 7.402746677398682\r\nStep 21, loss: 7.3866047859191895\r\nStep 22, loss: 7.363571643829346\r\nStep 23, loss: 7.353758811950684\r\nStep 24, loss: 7.326560020446777\r\nStep 25, loss: 7.302070140838623\r\nStep 26, loss: 7.278361797332764\r\nStep 27, loss: 7.26531457901001\r\nStep 28, loss: 7.2429609298706055\r\nStep 29, loss: 7.1981916427612305\r\nStep 30, loss: 7.189955711364746\r\nStep 31, loss: 7.158105850219727\r\nStep 32, loss: 7.129963397979736\r\nStep 33, loss: 7.115097999572754\r\nStep 34, loss: 7.098100662231445\r\nStep 35, loss: 7.075397968292236\r\nStep 36, loss: 7.052320957183838\r\nStep 37, loss: 7.009749889373779\r\nStep 38, loss: 7.015416622161865\r\nStep 39, loss: 6.986695289611816\r\nStep 40, loss: 6.965033054351807\r\nStep 41, loss: 6.954548358917236\r\nStep 42, loss: 6.92303466796875\r\nStep 43, loss: 6.8957061767578125\r\nStep 44, loss: 6.902023792266846\r\nStep 45, loss: 6.87942361831665\r\nStep 46, loss: 6.846747398376465\r\nStep 47, loss: 6.839104175567627\r\nStep 48, loss: 6.831884860992432\r\nStep 49, loss: 6.798049449920654\r\nStep 50, loss: 6.789340496063232\r\nStep 51, loss: 6.766947269439697\r\nStep 52, loss: 6.748626232147217\r\nStep 53, loss: 6.728170394897461\r\nStep 54, loss: 6.715014457702637\r\nStep 55, loss: 6.7036895751953125\r\nStep 56, loss: 6.684207916259766\r\nStep 57, loss: 6.669750690460205\r\nStep 58, loss: 6.655115127563477\r\nStep 59, loss: 6.640089511871338\r\nStep 60, loss: 6.622304439544678\r\nStep 61, loss: 6.614370346069336\r\nStep 62, loss: 6.5929365158081055\r\nStep 63, loss: 6.564897537231445\r\nStep 64, loss: 6.55553674697876\r\nStep 65, loss: 6.545957565307617\r\nStep 66, loss: 6.525360584259033\r\nStep 67, loss: 6.5073065757751465\r\nStep 68, loss: 6.503278732299805\r\nStep 69, loss: 6.485255718231201\r\nStep 70, loss: 6.478335857391357\r\nStep 71, loss: 6.460400581359863\r\nStep 72, loss: 6.444827079772949\r\nStep 73, loss: 6.4275431632995605\r\nStep 74, loss: 6.418704509735107\r\nStep 75, loss: 6.39842414855957\r\nStep 76, loss: 6.393788814544678\r\nStep 77, loss: 6.38428258895874\r\nStep 78, loss: 6.378833770751953\r\nStep 79, loss: 6.351770877838135\r\nStep 80, loss: 6.343893527984619\r\nStep 81, loss: 6.33534574508667\r\nStep 82, loss: 6.316616058349609\r\nStep 83, loss: 6.306394100189209\r\nStep 84, loss: 6.2900824546813965\r\nStep 85, loss: 6.280661582946777\r\nStep 86, loss: 6.256990432739258\r\nStep 87, loss: 6.258281230926514\r\nStep 88, loss: 6.248733043670654\r\nStep 89, loss: 6.229138374328613\r\nStep 90, loss: 6.213196277618408\r\nStep 91, loss: 6.208481788635254\r\nStep 92, loss: 6.1952619552612305\r\nStep 93, loss: 6.18841028213501\r\nStep 94, loss: 6.172695159912109\r\nStep 95, loss: 6.166540622711182\r\nStep 96, loss: 6.154155254364014\r\nStep 97, loss: 6.1471357345581055\r\nStep 98, loss: 6.13299036026001\r\nStep 99, loss: 6.128570079803467\r\nStep 100, loss: 6.121886730194092\r\nStep 101, loss: 6.1068315505981445\r\nStep 102, loss: 6.106794357299805\r\nStep 103, loss: 6.093207359313965\r\nStep 104, loss: 6.074648857116699\r\nStep 105, loss: 6.072917938232422\r\nStep 106, loss: 6.067320823669434\r\nStep 107, loss: 6.0497331619262695\r\nStep 108, loss: 6.042314529418945\r\nStep 109, loss: 6.038609981536865\r\nStep 110, loss: 6.02829647064209\r\nStep 111, loss: 6.021379470825195\r\nStep 112, loss: 6.0132551193237305\r\nStep 113, loss: 6.013428211212158\r\nStep 114, loss: 5.99829626083374\r\nStep 115, loss: 5.9912309646606445\r\nStep 116, loss: 5.97770881652832\r\nStep 117, loss: 5.977640151977539\r\nStep 118, loss: 5.965694904327393\r\nStep 119, loss: 5.956897258758545\r\nStep 120, loss: 5.9596991539001465\r\nStep 121, loss: 5.947717666625977\r\nStep 122, loss: 5.938053607940674\r\nStep 123, loss: 5.929181098937988\r\nStep 124, loss: 5.923599720001221\r\nStep 125, loss: 5.919835567474365\r\nStep 126, loss: 5.9050164222717285\r\nStep 127, loss: 5.902151584625244\r\nStep 128, loss: 5.893804550170898\r\nStep 129, loss: 5.8920769691467285\r\nStep 130, loss: 5.887382984161377\r\nStep 131, loss: 5.877043724060059\r\nStep 132, loss: 5.879669189453125\r\nStep 133, loss: 5.866776466369629\r\nStep 134, loss: 5.8618316650390625\r\nStep 135, loss: 5.852167129516602\r\nStep 136, loss: 5.84889030456543\r\nStep 137, loss: 5.838191509246826\r\nStep 138, loss: 5.8331804275512695\r\nStep 139, loss: 5.829548358917236\r\nStep 140, loss: 5.83687686920166\r\nStep 141, loss: 5.811373710632324\r\nStep 142, loss: 5.806796550750732\r\nStep 143, loss: 5.798097610473633\r\nStep 144, loss: 5.805070877075195\r\nStep 145, loss: 5.821244239807129\r\nStep 146, loss: 5.7903852462768555\r\nStep 147, loss: 5.781966209411621\r\nStep 148, loss: 5.772430896759033\r\nStep 149, loss: 5.770227432250977\r\nStep 150, loss: 5.763456344604492\r\nStep 151, loss: 5.7645978927612305\r\nStep 152, loss: 5.7851386070251465\r\nStep 153, loss: 5.770552158355713\r\nStep 154, loss: 5.748880863189697\r\nStep 155, loss: 5.737582683563232\r\nStep 156, loss: 5.737066268920898\r\nStep 157, loss: 5.726796627044678\r\nStep 158, loss: 5.71999979019165\r\nStep 159, loss: 5.715255260467529\r\nStep 160, loss: 5.709979057312012\r\nStep 161, loss: 5.707757949829102\r\nStep 162, loss: 5.70421838760376\r\nStep 163, loss: 5.703351020812988\r\nStep 164, loss: 5.705657482147217\r\nStep 165, loss: 5.690601348876953\r\nStep 166, loss: 5.678736209869385\r\nStep 167, loss: 5.673732280731201\r\nStep 168, loss: 5.671648979187012\r\nStep 169, loss: 5.6711201667785645\r\nStep 170, loss: 5.665239334106445\r\nStep 171, loss: 5.658088684082031\r\nStep 172, loss: 5.6495537757873535\r\nStep 173, loss: 5.649227142333984\r\nStep 174, loss: 5.644308090209961\r\nStep 175, loss: 5.643322467803955\r\nStep 176, loss: 5.6302361488342285\r\nStep 177, loss: 5.630355358123779\r\nStep 178, loss: 5.618348121643066\r\nStep 179, loss: 5.613123893737793\r\nStep 180, loss: 5.634921073913574\r\nStep 181, loss: 5.60896110534668\r\nStep 182, loss: 5.609810829162598\r\nStep 183, loss: 5.5972981452941895\r\nStep 184, loss: 5.598633766174316\r\nStep 185, loss: 5.584133148193359\r\nStep 186, loss: 5.58749532699585\r\nStep 187, loss: 5.575747966766357\r\nStep 188, loss: 5.575098514556885\r\nStep 189, loss: 5.562448978424072\r\nStep 190, loss: 5.565751075744629\r\nStep 191, loss: 5.559344291687012\r\nStep 192, loss: 5.546035289764404\r\nStep 193, loss: 5.556068420410156\r\nStep 194, loss: 5.541036128997803\r\nStep 195, loss: 5.5301032066345215\r\nStep 196, loss: 5.526306629180908\r\nStep 197, loss: 5.519721984863281\r\nStep 198, loss: 5.537485122680664\r\nStep 199, loss: 5.525476455688477\r\nStep 200, loss: 5.509968280792236\r\nStep 201, loss: 5.507684707641602\r\nStep 202, loss: 5.500143051147461\r\nStep 203, loss: 5.498023986816406\r\nStep 204, loss: 5.49220085144043\r\nStep 205, loss: 5.487483978271484\r\nStep 206, loss: 5.476466655731201\r\nStep 207, loss: 5.471756935119629\r\nStep 208, loss: 5.474320411682129\r\nStep 209, loss: 5.469831466674805\r\nStep 210, loss: 5.459320545196533\r\nStep 211, loss: 5.4613728523254395\r\nStep 212, loss: 5.452045440673828\r\nStep 213, loss: 5.444828033447266\r\nStep 214, loss: 5.441102504730225\r\nStep 215, loss: 5.444048881530762\r\nStep 216, loss: 5.440446376800537\r\nStep 217, loss: 5.430531024932861\r\nStep 218, loss: 5.422695159912109\r\nStep 219, loss: 5.421215057373047\r\nStep 220, loss: 5.419504642486572\r\nStep 221, loss: 5.399036884307861\r\nStep 222, loss: 5.412620544433594\r\nStep 223, loss: 5.397492408752441\r\nStep 224, loss: 5.3947224617004395\r\nStep 225, loss: 5.400577068328857\r\nStep 226, loss: 5.389159679412842\r\nStep 227, loss: 5.384253025054932\r\nStep 228, loss: 5.3743133544921875\r\nStep 229, loss: 5.395868301391602\r\nStep 230, loss: 5.370507717132568\r\nStep 231, loss: 5.358748435974121\r\nStep 232, loss: 5.3537774085998535\r\nStep 233, loss: 5.347180366516113\r\nStep 234, loss: 5.351746559143066\r\nStep 235, loss: 5.34335470199585\r\nStep 236, loss: 5.3399529457092285\r\nStep 237, loss: 5.333498001098633\r\n",,terminal_output +2723,8480527,"TERMINAL",0,0,"5\t ",,terminal_output +2724,8481574,"TERMINAL",0,0,"6\t ",,terminal_output +2725,8482647,"TERMINAL",0,0,"7\t ",,terminal_output +2726,8483650,"TERMINAL",0,0,"8\t ",,terminal_output +2727,8484797,"TERMINAL",0,0,"9\t ",,terminal_output +2728,8485597,"TERMINAL",0,0,"Step 238, loss: 5.329776763916016\r\nStep 239, loss: 5.325694561004639\r\nStep 240, loss: 5.327670574188232\r\nStep 241, loss: 5.316344738006592\r\nStep 242, loss: 5.3163323402404785\r\nStep 243, loss: 5.307192325592041\r\nStep 244, loss: 5.307132720947266\r\nStep 245, loss: 5.300736904144287\r\nStep 246, loss: 5.317860126495361\r\nStep 247, loss: 5.290336608886719\r\nStep 248, loss: 5.279738903045654\r\nStep 249, loss: 5.27883768081665\r\nStep 250, loss: 5.283246994018555\r\nStep 251, loss: 5.27727746963501\r\nStep 252, loss: 5.2701239585876465\r\nStep 253, loss: 5.267218112945557\r\nStep 254, loss: 5.258137226104736\r\nStep 255, loss: 5.248106479644775\r\nStep 256, loss: 5.242778301239014\r\nStep 257, loss: 5.248732566833496\r\nStep 258, loss: 5.25508975982666\r\nStep 259, loss: 5.232602596282959\r\nStep 260, loss: 5.224575042724609\r\nStep 261, loss: 5.227688312530518\r\nStep 262, loss: 5.215153694152832\r\nStep 263, loss: 5.218138694763184\r\nStep 264, loss: 5.217498302459717\r\nStep 265, loss: 5.215456008911133\r\nStep 266, loss: 5.205094814300537\r\nStep 267, loss: 5.195743083953857\r\nStep 268, loss: 5.185235500335693\r\nStep 269, loss: 5.1816205978393555\r\nStep 270, loss: 5.176819324493408\r\nStep 271, loss: 5.170180797576904\r\nStep 272, loss: 5.203779697418213\r\nStep 273, loss: 5.16251802444458\r\nStep 274, loss: 5.156702518463135\r\nStep 275, loss: 5.152243137359619\r\nStep 276, loss: 5.148741722106934\r\nStep 277, loss: 5.152563095092773\r\nStep 278, loss: 5.14493989944458\r\nStep 279, loss: 5.131824016571045\r\nStep 280, loss: 5.130490303039551\r\nStep 281, loss: 5.120980262756348\r\nStep 282, loss: 5.117804050445557\r\nStep 283, loss: 5.1373701095581055\r\nStep 284, loss: 5.119662284851074\r\nStep 285, loss: 5.1121015548706055\r\nStep 286, loss: 5.101524353027344\r\nStep 287, loss: 5.125157356262207\r\nStep 288, loss: 5.09190034866333\r\nStep 289, loss: 5.102667331695557\r\nStep 290, loss: 5.090808868408203\r\nStep 291, loss: 5.084634780883789\r\nStep 292, loss: 5.084208011627197\r\nStep 293, loss: 5.079932689666748\r\nStep 294, loss: 5.067696571350098\r\nStep 295, loss: 5.104025363922119\r\nStep 296, loss: 5.062191486358643\r\nStep 297, loss: 5.079304218292236\r\nStep 298, loss: 5.0550079345703125\r\nStep 299, loss: 5.050905704498291\r\nStep 300, loss: 5.053891181945801\r\nStep 301, loss: 5.0422444343566895\r\nStep 302, loss: 5.041848659515381\r\nStep 303, loss: 5.02760124206543\r\nStep 304, loss: 5.0279622077941895\r\nStep 305, loss: 5.022447109222412\r\nStep 306, loss: 5.023200511932373\r\nStep 307, loss: 5.014390468597412\r\nStep 308, loss: 5.010059356689453\r\nStep 309, loss: 5.008781433105469\r\nStep 310, loss: 5.002236843109131\r\nStep 311, loss: 5.0024824142456055\r\nStep 312, loss: 4.996912002563477\r\nStep 313, loss: 5.012515068054199\r\nStep 314, loss: 4.982471466064453\r\nStep 315, loss: 4.987527847290039\r\nStep 316, loss: 4.979400634765625\r\nStep 317, loss: 4.969332695007324\r\nStep 318, loss: 4.95458984375\r\nStep 319, loss: 4.952398300170898\r\nStep 320, loss: 4.951416969299316\r\nStep 321, loss: 4.947865009307861\r\nStep 322, loss: 4.945812702178955\r\nStep 323, loss: 4.939417839050293\r\nStep 324, loss: 4.957915306091309\r\nStep 325, loss: 4.929662227630615\r\nStep 326, loss: 4.927073955535889\r\nStep 327, loss: 4.922922611236572\r\nStep 328, loss: 4.912182331085205\r\nStep 329, loss: 4.900724411010742\r\nStep 330, loss: 4.911631107330322\r\nStep 331, loss: 4.9068989753723145\r\nStep 332, loss: 4.900992393493652\r\nStep 333, loss: 4.9312424659729\r\nStep 334, loss: 4.882149696350098\r\nStep 335, loss: 4.892817497253418\r\nStep 336, loss: 4.880038738250732\r\nStep 337, loss: 4.873488426208496\r\nStep 338, loss: 4.863410472869873\r\nStep 339, loss: 4.865196704864502\r\nStep 340, loss: 4.861401081085205\r\nStep 341, loss: 4.856941223144531\r\nStep 342, loss: 4.84530782699585\r\nStep 343, loss: 4.847756385803223\r\nStep 344, loss: 4.881572246551514\r\nStep 345, loss: 4.8741841316223145\r\nStep 346, loss: 4.841855049133301\r\nStep 347, loss: 4.840559959411621\r\nStep 348, loss: 4.838076591491699\r\nStep 349, loss: 4.828072547912598\r\nStep 350, loss: 4.8201446533203125\r\nStep 351, loss: 4.810567855834961\r\nStep 352, loss: 4.8092360496521\r\nStep 353, loss: 4.809211254119873\r\nStep 354, loss: 4.804335594177246\r\nStep 355, loss: 4.802347660064697\r\nStep 356, loss: 4.794157981872559\r\nStep 357, loss: 4.787291049957275\r\nStep 358, loss: 4.78963565826416\r\nStep 359, loss: 4.787293910980225\r\nStep 360, loss: 4.769772529602051\r\nStep 361, loss: 4.772130012512207\r\nStep 362, loss: 4.795124053955078\r\nStep 363, loss: 4.792688369750977\r\nStep 364, loss: 4.756403923034668\r\nStep 365, loss: 4.760043621063232\r\nStep 366, loss: 4.754371166229248\r\nStep 367, loss: 4.739218235015869\r\nStep 368, loss: 4.756822109222412\r\nStep 369, loss: 4.731760501861572\r\nStep 370, loss: 4.728282928466797\r\nStep 371, loss: 4.722902297973633\r\nStep 372, loss: 4.718770980834961\r\nStep 373, loss: 4.715792655944824\r\nStep 374, loss: 4.725358963012695\r\nStep 375, loss: 4.701916694641113\r\nStep 376, loss: 4.702508449554443\r\nStep 377, loss: 4.7011637687683105\r\nStep 378, loss: 4.686628818511963\r\nStep 379, loss: 4.695858955383301\r\nStep 380, loss: 4.690489768981934\r\nStep 381, loss: 4.687510967254639\r\nStep 382, loss: 4.688262462615967\r\nStep 383, loss: 4.682755947113037\r\nStep 384, loss: 4.693565845489502\r\nStep 385, loss: 4.682764530181885\r\nStep 386, loss: 4.660360813140869\r\nStep 387, loss: 4.662494659423828\r\nStep 388, loss: 4.647305011749268\r\nStep 389, loss: 4.655887603759766\r\nStep 390, loss: 4.6393656730651855\r\nStep 391, loss: 4.634350299835205\r\nStep 392, loss: 4.66060209274292\r\nStep 393, loss: 4.639071464538574\r\nStep 394, loss: 4.6243743896484375\r\nStep 395, loss: 4.621366500854492\r\nStep 396, loss: 4.629249572753906\r\nStep 397, loss: 4.623109817504883\r\nStep 398, loss: 4.623954772949219\r\nStep 399, loss: 4.612716197967529\r\nStep 400, loss: 4.609229564666748\r\nStep 401, loss: 4.62969970703125\r\nStep 402, loss: 4.623652458190918\r\nStep 403, loss: 4.596227645874023\r\nStep 404, loss: 4.596382141113281\r\nStep 405, loss: 4.590897560119629\r\nStep 406, loss: 4.574085712432861\r\nStep 407, loss: 4.579984188079834\r\nStep 408, loss: 4.569983005523682\r\nStep 409, loss: 4.563737869262695\r\nStep 410, loss: 4.560784339904785\r\nStep 411, loss: 4.547574520111084\r\nStep 412, loss: 4.553075313568115\r\nStep 413, loss: 4.565923690795898\r\nStep 414, loss: 4.545528411865234\r\nStep 415, loss: 4.542304039001465\r\nStep 416, loss: 4.532782077789307\r\nStep 417, loss: 4.5360188484191895\r\nStep 418, loss: 4.519146919250488\r\nStep 419, loss: 4.532624244689941\r\nStep 420, loss: 4.522458076477051\r\nStep 421, loss: 4.5140156745910645\r\nStep 422, loss: 4.516432762145996\r\nStep 423, loss: 4.508829116821289\r\nStep 424, loss: 4.513571739196777\r\nStep 425, loss: 4.504456996917725\r\nStep 426, loss: 4.5098958015441895\r\nStep 427, loss: 4.505548000335693\r\nStep 428, loss: 4.518496990203857\r\nStep 429, loss: 4.484988212585449\r\nStep 430, loss: 4.493155002593994\r\nStep 431, loss: 4.482624053955078\r\nStep 432, loss: 4.4716010093688965\r\nStep 433, loss: 4.477433681488037\r\nStep 434, loss: 4.4725341796875\r\nStep 435, loss: 4.472967147827148\r\nStep 436, loss: 4.462329864501953\r\nStep 437, loss: 4.488564491271973\r\nStep 438, loss: 4.453413486480713\r\nStep 439, loss: 4.458971977233887\r\nStep 440, loss: 4.440028667449951\r\nStep 441, loss: 4.438808917999268\r\nStep 442, loss: 4.437032699584961\r\nStep 443, loss: 4.4303879737854\r\nStep 444, loss: 4.418288230895996\r\nStep 445, loss: 4.443573474884033\r\nStep 446, loss: 4.420436859130859\r\nStep 447, loss: 4.408929347991943\r\nStep 448, loss: 4.422056198120117\r\nStep 449, loss: 4.40527868270874\r\nStep 450, loss: 4.432740211486816\r\nStep 451, loss: 4.398776531219482\r\nStep 452, loss: 4.3976030349731445\r\nStep 453, loss: 4.38792085647583\r\nStep 454, loss: 4.3737382888793945\r\nStep 455, loss: 4.3746185302734375\r\nStep 456, loss: 4.3898444175720215\r\nStep 457, loss: 4.379148483276367\r\nStep 458, loss: 4.381988048553467\r\nStep 459, loss: 4.3760504722595215\r\nStep 460, loss: 4.366771697998047\r\nStep 461, loss: 4.369098663330078\r\nStep 462, loss: 4.35321044921875\r\nStep 463, loss: 4.350816249847412\r\nStep 464, loss: 4.3716888427734375\r\nStep 465, loss: 4.345888614654541\r\nStep 466, loss: 4.343746662139893\r\nStep 467, loss: 4.327823162078857\r\nStep 468, loss: 4.3292107582092285\r\nStep 469, loss: 4.3385796546936035\r\nStep 470, loss: 4.325247764587402\r\nStep 471, loss: 4.324270725250244\r\nStep 472, loss: 4.338938236236572\r\nStep 473, loss: 4.321204662322998\r\nStep 474, loss: 4.2982964515686035\r\nStep 475, loss: 4.322055339813232\r\nStep 476, loss: 4.307989120483398\r\nStep 477, loss: 4.287895202636719\r\n",,terminal_output +2729,8485748,"TERMINAL",0,0,"40\t ",,terminal_output +2730,8486842,"TERMINAL",0,0,"1\t ",,terminal_output +2731,8487868,"TERMINAL",0,0,"2\t ",,terminal_output +2732,8488892,"TERMINAL",0,0,"3\t ",,terminal_output +2733,8489911,"TERMINAL",0,0,"4\t ",,terminal_output +2734,8490951,"TERMINAL",0,0,"5\t ",,terminal_output +2735,8491321,"TERMINAL",0,0,"Step 478, loss: 4.291557788848877\r\nStep 479, loss: 4.303369522094727\r\nStep 480, loss: 4.284358501434326\r\nStep 481, loss: 4.280134201049805\r\nStep 482, loss: 4.273751735687256\r\nStep 483, loss: 4.262834072113037\r\nStep 484, loss: 4.266302585601807\r\nStep 485, loss: 4.25850772857666\r\nStep 486, loss: 4.270524501800537\r\nStep 487, loss: 4.2795000076293945\r\nStep 488, loss: 4.2553582191467285\r\nStep 489, loss: 4.2427802085876465\r\nStep 490, loss: 4.237804889678955\r\nStep 491, loss: 4.253228664398193\r\nStep 492, loss: 4.25295352935791\r\nStep 493, loss: 4.235777378082275\r\nStep 494, loss: 4.228907585144043\r\nStep 495, loss: 4.233348846435547\r\nStep 496, loss: 4.2213640213012695\r\nStep 497, loss: 4.2308502197265625\r\nStep 498, loss: 4.202448844909668\r\nStep 499, loss: 4.219991683959961\r\nStep 500, loss: 4.23018741607666\r\nStep 501, loss: 4.233881950378418\r\nStep 502, loss: 4.194858551025391\r\nStep 503, loss: 4.187500476837158\r\nStep 504, loss: 4.185210227966309\r\nStep 505, loss: 4.183363914489746\r\nStep 506, loss: 4.208914756774902\r\nStep 507, loss: 4.171588897705078\r\nStep 508, loss: 4.187896728515625\r\nStep 509, loss: 4.1709885597229\r\nStep 510, loss: 4.171844959259033\r\nStep 511, loss: 4.160091400146484\r\nStep 512, loss: 4.158123016357422\r\nStep 513, loss: 4.143055438995361\r\nStep 514, loss: 4.1494669914245605\r\nStep 515, loss: 4.159925937652588\r\nStep 516, loss: 4.149556636810303\r\nStep 517, loss: 4.1368560791015625\r\nStep 518, loss: 4.1339240074157715\r\nStep 519, loss: 4.160516262054443\r\nStep 520, loss: 4.123724937438965\r\nStep 521, loss: 4.119964599609375\r\nStep 522, loss: 4.113111972808838\r\nStep 523, loss: 4.128612518310547\r\nStep 524, loss: 4.106175899505615\r\nStep 525, loss: 4.103194713592529\r\nStep 526, loss: 4.1034722328186035\r\nStep 527, loss: 4.125179767608643\r\nStep 528, loss: 4.094694137573242\r\nStep 529, loss: 4.091785430908203\r\nStep 530, loss: 4.088562488555908\r\nStep 531, loss: 4.081974983215332\r\nStep 532, loss: 4.092736721038818\r\nStep 533, loss: 4.070742130279541\r\nStep 534, loss: 4.072827339172363\r\nStep 535, loss: 4.073520660400391\r\nStep 536, loss: 4.060091018676758\r\nStep 537, loss: 4.090692520141602\r\nStep 538, loss: 4.055988788604736\r\nStep 539, loss: 4.0873332023620605\r\nStep 540, loss: 4.050271987915039\r\nStep 541, loss: 4.04437780380249\r\nStep 542, loss: 4.047336101531982\r\nStep 543, loss: 4.043661594390869\r\nStep 544, loss: 4.0426106452941895\r\nStep 545, loss: 4.038684844970703\r\nStep 546, loss: 4.0309553146362305\r\nStep 547, loss: 4.031425476074219\r\nStep 548, loss: 4.028134346008301\r\nStep 549, loss: 4.0077924728393555\r\nStep 550, loss: 4.023715019226074\r\nStep 551, loss: 4.040329933166504\r\nStep 552, loss: 4.016040802001953\r\nStep 553, loss: 4.008286952972412\r\nStep 554, loss: 4.030107498168945\r\nStep 555, loss: 3.9926788806915283\r\nStep 556, loss: 4.002552032470703\r\nStep 557, loss: 3.9922361373901367\r\nStep 558, loss: 3.985311269760132\r\nStep 559, loss: 3.9913032054901123\r\nStep 560, loss: 3.9678568840026855\r\nStep 561, loss: 3.9776554107666016\r\nStep 562, loss: 3.9681506156921387\r\nStep 563, loss: 3.97571063041687\r\nStep 564, loss: 3.9810261726379395\r\nStep 565, loss: 3.9587368965148926\r\nStep 566, loss: 3.9550623893737793\r\nStep 567, loss: 3.950706720352173\r\nStep 568, loss: 3.9796910285949707\r\nStep 569, loss: 3.9481232166290283\r\nStep 570, loss: 3.9469964504241943\r\nStep 571, loss: 3.943410634994507\r\nStep 572, loss: 3.9447550773620605\r\nStep 573, loss: 3.9368999004364014\r\nStep 574, loss: 3.9621260166168213\r\nStep 575, loss: 3.9364113807678223\r\nStep 576, loss: 3.9214630126953125\r\nStep 577, loss: 3.926889419555664\r\nStep 578, loss: 3.9164459705352783\r\nStep 579, loss: 3.9093456268310547\r\nStep 580, loss: 3.9123921394348145\r\nStep 581, loss: 3.899747610092163\r\nStep 582, loss: 3.9039306640625\r\nStep 583, loss: 3.916299343109131\r\nStep 584, loss: 3.9223179817199707\r\nStep 585, loss: 3.902918577194214\r\nStep 586, loss: 3.9014790058135986\r\nStep 587, loss: 3.8822004795074463\r\nStep 588, loss: 3.872868299484253\r\nStep 589, loss: 3.878551959991455\r\nStep 590, loss: 3.8796308040618896\r\nStep 591, loss: 3.8758299350738525\r\nStep 592, loss: 3.863330364227295\r\nStep 593, loss: 3.869246244430542\r\nStep 594, loss: 3.8703792095184326\r\nStep 595, loss: 3.8841264247894287\r\nStep 596, loss: 3.8570775985717773\r\nStep 597, loss: 3.8569018840789795\r\nStep 598, loss: 3.8635075092315674\r\nStep 599, loss: 3.8484597206115723\r\nStep 600, loss: 3.8387413024902344\r\nStep 601, loss: 3.9414875507354736\r\nStep 602, loss: 3.8301095962524414\r\nStep 603, loss: 3.8200337886810303\r\nStep 604, loss: 3.8218162059783936\r\nStep 605, loss: 3.8195745944976807\r\nStep 606, loss: 3.8210251331329346\r\nStep 607, loss: 3.8104372024536133\r\nStep 608, loss: 3.8043277263641357\r\nStep 609, loss: 3.8310530185699463\r\nStep 610, loss: 3.8158864974975586\r\nStep 611, loss: 3.812917709350586\r\nStep 612, loss: 3.8086154460906982\r\nStep 613, loss: 3.8331305980682373\r\nStep 614, loss: 3.8240408897399902\r\nStep 615, loss: 3.803926706314087\r\nStep 616, loss: 3.782737970352173\r\nStep 617, loss: 3.776934862136841\r\nStep 618, loss: 3.784985065460205\r\nStep 619, loss: 3.7640604972839355\r\nStep 620, loss: 3.7729036808013916\r\nStep 621, loss: 3.7885355949401855\r\nStep 622, loss: 3.7648441791534424\r\nStep 623, loss: 3.7585103511810303\r\nStep 624, loss: 3.7470829486846924\r\nStep 625, loss: 3.749617099761963\r\nStep 626, loss: 3.7380528450012207\r\nStep 627, loss: 3.7525107860565186\r\nStep 628, loss: 3.7466468811035156\r\nStep 629, loss: 3.731041431427002\r\nStep 630, loss: 3.7277638912200928\r\nStep 631, loss: 3.73300838470459\r\nStep 632, loss: 3.7333614826202393\r\nStep 633, loss: 3.7236740589141846\r\nStep 634, loss: 3.716989278793335\r\nStep 635, loss: 3.715615749359131\r\nStep 636, loss: 3.716261148452759\r\nStep 637, loss: 3.7572038173675537\r\nStep 638, loss: 3.705672025680542\r\nStep 639, loss: 3.704786777496338\r\nStep 640, loss: 3.6950793266296387\r\nStep 641, loss: 3.6976401805877686\r\nStep 642, loss: 3.6893296241760254\r\nStep 643, loss: 3.6906139850616455\r\nStep 644, loss: 3.70758056640625\r\nStep 645, loss: 3.6795530319213867\r\nStep 646, loss: 3.6958439350128174\r\nStep 647, loss: 3.6663942337036133\r\nStep 648, loss: 3.6652891635894775\r\nStep 649, loss: 3.679201126098633\r\nStep 650, loss: 3.6864233016967773\r\nStep 651, loss: 3.7003464698791504\r\nStep 652, loss: 3.6455352306365967\r\nStep 653, loss: 3.6554441452026367\r\nStep 654, loss: 3.6419262886047363\r\nStep 655, loss: 3.6498284339904785\r\nStep 656, loss: 3.6521315574645996\r\nStep 657, loss: 3.641070604324341\r\nStep 658, loss: 3.639512300491333\r\nStep 659, loss: 3.6741573810577393\r\nStep 660, loss: 3.6378190517425537\r\nStep 661, loss: 3.6181962490081787\r\nStep 662, loss: 3.622748851776123\r\nStep 663, loss: 3.625574827194214\r\nStep 664, loss: 3.626727819442749\r\nStep 665, loss: 3.6200222969055176\r\nStep 666, loss: 3.6169145107269287\r\nStep 667, loss: 3.6133029460906982\r\nStep 668, loss: 3.6126859188079834\r\nStep 669, loss: 3.6147332191467285\r\nStep 670, loss: 3.6025888919830322\r\nStep 671, loss: 3.6002163887023926\r\nStep 672, loss: 3.5915215015411377\r\nStep 673, loss: 3.6523265838623047\r\nStep 674, loss: 3.5867717266082764\r\nStep 675, loss: 3.6193196773529053\r\nStep 676, loss: 3.577577829360962\r\nStep 677, loss: 3.5739431381225586\r\nStep 678, loss: 3.5880415439605713\r\nStep 679, loss: 3.57814359664917\r\nStep 680, loss: 3.5703814029693604\r\nStep 681, loss: 3.5590782165527344\r\nStep 682, loss: 3.5630245208740234\r\nStep 683, loss: 3.5535824298858643\r\nStep 684, loss: 3.5547027587890625\r\nStep 685, loss: 3.5535778999328613\r\nStep 686, loss: 3.5393097400665283\r\nStep 687, loss: 3.5849297046661377\r\nStep 688, loss: 3.5366761684417725\r\nStep 689, loss: 3.5378153324127197\r\nStep 690, loss: 3.5349464416503906\r\nStep 691, loss: 3.5566041469573975\r\nStep 692, loss: 3.521968364715576\r\nStep 693, loss: 3.5204508304595947\r\nStep 694, loss: 3.5220260620117188\r\nStep 695, loss: 3.5268638134002686\r\nStep 696, loss: 3.515597343444824\r\nStep 697, loss: 3.514417886734009\r\nStep 698, loss: 3.5207905769348145\r\nStep 699, loss: 3.505972146987915\r\nStep 700, loss: 3.5332233905792236\r\nStep 701, loss: 3.4886462688446045\r\nStep 702, loss: 3.4924867153167725\r\nStep 703, loss: 3.4870705604553223\r\nStep 704, loss: 3.4824371337890625\r\nStep 705, loss: 3.490525484085083\r\nStep 706, loss: 3.4747982025146484\r\nStep 707, loss: 3.469162940979004\r\nStep 708, loss: 3.517742395401001\r\nStep 709, loss: 3.4621756076812744\r\nStep 710, loss: 3.4713621139526367\r\nStep 711, loss: 3.4638373851776123\r\nStep 712, loss: 3.464184522628784\r\nStep 713, loss: 3.454157590866089\r\nStep 714, loss: 3.4510250091552734\r\n",,terminal_output +2736,8491992,"TERMINAL",0,0,"6\t ",,terminal_output +2737,8493030,"TERMINAL",0,0,"7\t ",,terminal_output +2738,8494076,"TERMINAL",0,0,"8\t ",,terminal_output +2739,8495140,"TERMINAL",0,0,"9\t ",,terminal_output +2740,8496164,"TERMINAL",0,0,"50\t ",,terminal_output +2741,8496573,"TERMINAL",0,0,"Step 715, loss: 3.452220916748047\r\nStep 716, loss: 3.4543521404266357\r\nStep 717, loss: 3.4401297569274902\r\nStep 718, loss: 3.4588747024536133\r\nStep 719, loss: 3.4418153762817383\r\nStep 720, loss: 3.4302871227264404\r\nStep 721, loss: 3.43442440032959\r\nStep 722, loss: 3.4214484691619873\r\nStep 723, loss: 3.424147367477417\r\nStep 724, loss: 3.4198801517486572\r\nStep 725, loss: 3.423811435699463\r\nStep 726, loss: 3.518169403076172\r\nStep 727, loss: 3.425844192504883\r\nStep 728, loss: 3.414850950241089\r\nStep 729, loss: 3.415541410446167\r\nStep 730, loss: 3.3948476314544678\r\nStep 731, loss: 3.3966877460479736\r\nStep 732, loss: 3.387463331222534\r\nStep 733, loss: 3.3963286876678467\r\nStep 734, loss: 3.414280652999878\r\nStep 735, loss: 3.3901820182800293\r\nStep 736, loss: 3.381934404373169\r\nStep 737, loss: 3.3804521560668945\r\nStep 738, loss: 3.3740427494049072\r\nStep 739, loss: 3.3813936710357666\r\nStep 740, loss: 3.3687186241149902\r\nStep 741, loss: 3.3711202144622803\r\nStep 742, loss: 3.363731861114502\r\nStep 743, loss: 3.4069690704345703\r\nStep 744, loss: 3.367016553878784\r\nStep 745, loss: 3.3706047534942627\r\nStep 746, loss: 3.3629446029663086\r\nStep 747, loss: 3.348632574081421\r\nStep 748, loss: 3.347393751144409\r\nStep 749, loss: 3.3421542644500732\r\nStep 750, loss: 3.3762547969818115\r\nStep 751, loss: 3.3374600410461426\r\nStep 752, loss: 3.3341639041900635\r\nStep 753, loss: 3.3226077556610107\r\nStep 754, loss: 3.334002733230591\r\nStep 755, loss: 3.330939531326294\r\nStep 756, loss: 3.3116793632507324\r\nStep 757, loss: 3.3203723430633545\r\nStep 758, loss: 3.3131892681121826\r\nStep 759, loss: 3.3163468837738037\r\nStep 760, loss: 3.31977915763855\r\nStep 761, loss: 3.319287061691284\r\nStep 762, loss: 3.3112828731536865\r\nStep 763, loss: 3.3017032146453857\r\nStep 764, loss: 3.3030855655670166\r\nStep 765, loss: 3.3005826473236084\r\nStep 766, loss: 3.2833948135375977\r\nStep 767, loss: 3.31762433052063\r\nStep 768, loss: 3.3161461353302\r\nStep 769, loss: 3.2816319465637207\r\nStep 770, loss: 3.279048442840576\r\nStep 771, loss: 3.282259941101074\r\nStep 772, loss: 3.2699270248413086\r\nStep 773, loss: 3.3801984786987305\r\nStep 774, loss: 3.278341293334961\r\nStep 775, loss: 3.264591693878174\r\nStep 776, loss: 3.2670063972473145\r\nStep 777, loss: 3.259495496749878\r\nStep 778, loss: 3.2550528049468994\r\nStep 779, loss: 3.2642087936401367\r\nStep 780, loss: 3.2564923763275146\r\nStep 781, loss: 3.29453182220459\r\nStep 782, loss: 3.26042103767395\r\nStep 783, loss: 3.2422492504119873\r\nStep 784, loss: 3.234773874282837\r\nStep 785, loss: 3.2554168701171875\r\nStep 786, loss: 3.224392890930176\r\nStep 787, loss: 3.2376708984375\r\nStep 788, loss: 3.2290592193603516\r\nStep 789, loss: 3.2345049381256104\r\nStep 790, loss: 3.2150702476501465\r\nStep 791, loss: 3.221007823944092\r\nStep 792, loss: 3.2198750972747803\r\nStep 793, loss: 3.202873706817627\r\nStep 794, loss: 3.2122392654418945\r\nStep 795, loss: 3.207622766494751\r\nStep 796, loss: 3.2292377948760986\r\nStep 797, loss: 3.199307918548584\r\nStep 798, loss: 3.2041327953338623\r\nStep 799, loss: 3.188765287399292\r\nStep 800, loss: 3.1878135204315186\r\nStep 801, loss: 3.1993231773376465\r\nStep 802, loss: 3.190980911254883\r\nStep 803, loss: 3.1727101802825928\r\nStep 804, loss: 3.1880736351013184\r\nStep 805, loss: 3.2113866806030273\r\nStep 806, loss: 3.1747515201568604\r\nStep 807, loss: 3.169316530227661\r\nStep 808, loss: 3.2133655548095703\r\nStep 809, loss: 3.1789679527282715\r\nStep 810, loss: 3.1660187244415283\r\nStep 811, loss: 3.1661770343780518\r\nStep 812, loss: 3.1586477756500244\r\nStep 813, loss: 3.149427890777588\r\nStep 814, loss: 3.1351287364959717\r\nStep 815, loss: 3.1585564613342285\r\nStep 816, loss: 3.1409056186676025\r\nStep 817, loss: 3.1312355995178223\r\nStep 818, loss: 3.142853021621704\r\nStep 819, loss: 3.236429452896118\r\nStep 820, loss: 3.125951051712036\r\nStep 821, loss: 3.1240015029907227\r\nStep 822, loss: 3.1334896087646484\r\nStep 823, loss: 3.1242992877960205\r\nStep 824, loss: 3.1140527725219727\r\nStep 825, loss: 3.1125872135162354\r\nStep 826, loss: 3.1097989082336426\r\nStep 827, loss: 3.1181437969207764\r\nStep 828, loss: 3.115785598754883\r\nStep 829, loss: 3.1401212215423584\r\nStep 830, loss: 3.0986244678497314\r\nStep 831, loss: 3.1018221378326416\r\nStep 832, loss: 3.1023480892181396\r\nStep 833, loss: 3.0971338748931885\r\nStep 834, loss: 3.0878708362579346\r\nStep 835, loss: 3.0745716094970703\r\nStep 836, loss: 3.0882630348205566\r\nStep 837, loss: 3.0805399417877197\r\nStep 838, loss: 3.076524257659912\r\nStep 839, loss: 3.0642926692962646\r\nStep 840, loss: 3.0727648735046387\r\nStep 841, loss: 3.062732219696045\r\nStep 842, loss: 3.0638973712921143\r\nStep 843, loss: 3.082012176513672\r\nStep 844, loss: 3.049668312072754\r\nStep 845, loss: 3.052675485610962\r\nStep 846, loss: 3.058013439178467\r\nStep 847, loss: 3.056062936782837\r\nStep 848, loss: 3.055447816848755\r\nStep 849, loss: 3.0441277027130127\r\nStep 850, loss: 3.0437002182006836\r\nStep 851, loss: 3.0349948406219482\r\nStep 852, loss: 3.0318353176116943\r\nStep 853, loss: 3.031405448913574\r\nStep 854, loss: 3.02474308013916\r\nStep 855, loss: 3.1087799072265625\r\nStep 856, loss: 3.0305161476135254\r\nStep 857, loss: 3.011469602584839\r\nStep 858, loss: 3.023132085800171\r\nStep 859, loss: 3.016911268234253\r\nStep 860, loss: 3.0080299377441406\r\nStep 861, loss: 3.004092216491699\r\nStep 862, loss: 3.0056533813476562\r\nStep 863, loss: 3.004362106323242\r\nStep 864, loss: 3.0223615169525146\r\nStep 865, loss: 3.0556511878967285\r\nStep 866, loss: 3.001739263534546\r\nStep 867, loss: 2.987731456756592\r\nStep 868, loss: 2.997671604156494\r\nStep 869, loss: 2.997199296951294\r\nStep 870, loss: 2.9838430881500244\r\nStep 871, loss: 2.9726381301879883\r\nStep 872, loss: 2.968327045440674\r\nStep 873, loss: 2.9676589965820312\r\nStep 874, loss: 2.978487253189087\r\nStep 875, loss: 2.9653780460357666\r\nStep 876, loss: 2.9618821144104004\r\nStep 877, loss: 2.973937511444092\r\nStep 878, loss: 2.9771201610565186\r\nStep 879, loss: 3.0112006664276123\r\nStep 880, loss: 2.950934410095215\r\nStep 881, loss: 2.945429563522339\r\nStep 882, loss: 2.9530489444732666\r\nStep 883, loss: 2.958892345428467\r\nStep 884, loss: 2.9561400413513184\r\nStep 885, loss: 2.9463260173797607\r\nStep 886, loss: 2.9454152584075928\r\nStep 887, loss: 2.9372642040252686\r\nStep 888, loss: 2.9195969104766846\r\nStep 889, loss: 2.9215469360351562\r\nStep 890, loss: 2.9272406101226807\r\nStep 891, loss: 2.926670789718628\r\nStep 892, loss: 2.970978021621704\r\nStep 893, loss: 2.9142518043518066\r\nStep 894, loss: 3.0036213397979736\r\nStep 895, loss: 2.917707681655884\r\nStep 896, loss: 2.9085793495178223\r\nStep 897, loss: 2.9003405570983887\r\nStep 898, loss: 2.9152815341949463\r\nStep 899, loss: 2.8969173431396484\r\nStep 900, loss: 2.8934483528137207\r\nStep 901, loss: 2.897230386734009\r\nStep 902, loss: 2.891411781311035\r\nStep 903, loss: 2.881835699081421\r\nStep 904, loss: 2.881769895553589\r\nStep 905, loss: 2.8909554481506348\r\nStep 906, loss: 2.8742079734802246\r\nStep 907, loss: 2.882643938064575\r\nStep 908, loss: 2.873570680618286\r\nStep 909, loss: 2.9156081676483154\r\nStep 910, loss: 2.8585634231567383\r\nStep 911, loss: 2.8727359771728516\r\nStep 912, loss: 2.86765193939209\r\nStep 913, loss: 2.8567380905151367\r\nStep 914, loss: 2.859903573989868\r\nStep 915, loss: 2.856867551803589\r\nStep 916, loss: 2.8600616455078125\r\nStep 917, loss: 2.857943534851074\r\nStep 918, loss: 2.864003896713257\r\nStep 919, loss: 2.8395440578460693\r\nStep 920, loss: 2.8472559452056885\r\nStep 921, loss: 2.844433546066284\r\nStep 922, loss: 2.8204987049102783\r\nStep 923, loss: 2.9240589141845703\r\nStep 924, loss: 2.828986644744873\r\nStep 925, loss: 2.8238143920898438\r\nStep 926, loss: 2.818108320236206\r\nStep 927, loss: 2.8409006595611572\r\nStep 928, loss: 2.8181207180023193\r\nStep 929, loss: 2.849177122116089\r\nStep 930, loss: 2.8151674270629883\r\nStep 931, loss: 2.8135766983032227\r\nStep 932, loss: 2.7869365215301514\r\nStep 933, loss: 2.806673288345337\r\nStep 934, loss: 2.8063879013061523\r\nStep 935, loss: 2.8030834197998047\r\nStep 936, loss: 2.799488067626953\r\nStep 937, loss: 2.798468589782715\r\nStep 938, loss: 2.791680097579956\r\nStep 939, loss: 2.7892632484436035\r\nStep 940, loss: 2.7725791931152344\r\nStep 941, loss: 2.774951457977295\r\nStep 942, loss: 2.7889492511749268\r\nStep 943, loss: 2.7836148738861084\r\nStep 944, loss: 2.7702794075012207\r\nStep 945, loss: 2.775383710861206\r\nStep 946, loss: 2.7768595218658447\r\nStep 947, loss: 2.7534244060516357\r\nStep 948, loss: 2.8602499961853027\r\nStep 949, loss: 2.763651132583618\r\nStep 950, loss: 2.7593631744384766\r\nStep 951, loss: 2.7507669925689697\r\n",,terminal_output +2742,8497215,"TERMINAL",0,0,"1\t ",,terminal_output +2743,8498311,"TERMINAL",0,0,"2\t ",,terminal_output +2744,8499285,"TERMINAL",0,0,"3\t ",,terminal_output +2745,8500323,"TERMINAL",0,0,"4\t ",,terminal_output +2746,8501370,"TERMINAL",0,0,"6\t ",,terminal_output +2747,8502402,"TERMINAL",0,0,"7\t ",,terminal_output +2748,8503440,"TERMINAL",0,0,"8\t ",,terminal_output +2749,8503662,"TERMINAL",0,0,"Step 952, loss: 2.7997846603393555\r\nStep 953, loss: 2.749717950820923\r\nStep 954, loss: 2.750847578048706\r\nStep 955, loss: 2.741530418395996\r\nStep 956, loss: 2.7366607189178467\r\nStep 957, loss: 2.736522674560547\r\nStep 958, loss: 2.7298853397369385\r\nStep 959, loss: 2.7132301330566406\r\nStep 960, loss: 2.7249341011047363\r\nStep 961, loss: 2.72369122505188\r\nStep 962, loss: 2.7298743724823\r\nStep 963, loss: 2.7240216732025146\r\nStep 964, loss: 2.705383062362671\r\nStep 965, loss: 2.7190845012664795\r\nStep 966, loss: 2.7148654460906982\r\nStep 967, loss: 2.7052414417266846\r\nStep 968, loss: 2.7082836627960205\r\nStep 969, loss: 2.7021052837371826\r\nStep 970, loss: 2.7550175189971924\r\nStep 971, loss: 2.70028018951416\r\nStep 972, loss: 2.6888058185577393\r\nStep 973, loss: 2.7499876022338867\r\nStep 974, loss: 2.692136764526367\r\nStep 975, loss: 2.681094169616699\r\nStep 976, loss: 2.690741539001465\r\nStep 977, loss: 2.683913230895996\r\nStep 978, loss: 2.670283794403076\r\nStep 979, loss: 2.6750693321228027\r\nStep 980, loss: 2.669175863265991\r\nStep 981, loss: 2.67159104347229\r\nStep 982, loss: 2.667241096496582\r\nStep 983, loss: 2.6635067462921143\r\nStep 984, loss: 2.7399728298187256\r\nStep 985, loss: 2.6606640815734863\r\nStep 986, loss: 2.6579418182373047\r\nStep 987, loss: 2.6670823097229004\r\nStep 988, loss: 2.6498467922210693\r\nStep 989, loss: 2.6475841999053955\r\nStep 990, loss: 2.644059658050537\r\nStep 991, loss: 2.6469709873199463\r\nStep 992, loss: 2.6704254150390625\r\nStep 993, loss: 2.6296868324279785\r\nStep 994, loss: 2.6361887454986572\r\nStep 995, loss: 2.635483980178833\r\nStep 996, loss: 2.6377995014190674\r\nStep 997, loss: 2.6278982162475586\r\nStep 998, loss: 2.612067222595215\r\nStep 999, loss: 2.6224284172058105\r\nSaved checkpoint at step 1000\r\nStep 1000, loss: 2.62579607963562\r\nStep 1001, loss: 2.6142592430114746\r\nStep 1002, loss: 2.6130053997039795\r\nStep 1003, loss: 2.621941328048706\r\nStep 1004, loss: 2.5983779430389404\r\nStep 1005, loss: 2.5932180881500244\r\nStep 1006, loss: 2.6793057918548584\r\nStep 1007, loss: 2.5861830711364746\r\nStep 1008, loss: 2.5982282161712646\r\nStep 1009, loss: 2.5986690521240234\r\nStep 1010, loss: 2.592897415161133\r\nStep 1011, loss: 2.6270201206207275\r\nStep 1012, loss: 2.575294256210327\r\nStep 1013, loss: 2.5738370418548584\r\nStep 1014, loss: 2.5717062950134277\r\nStep 1015, loss: 2.575685501098633\r\nStep 1016, loss: 2.5576577186584473\r\nStep 1017, loss: 2.5726943016052246\r\nStep 1018, loss: 2.5550880432128906\r\nStep 1019, loss: 2.559271812438965\r\nStep 1020, loss: 2.5648744106292725\r\nStep 1021, loss: 2.560307025909424\r\nStep 1022, loss: 2.556380033493042\r\nStep 1023, loss: 2.5482420921325684\r\nStep 1024, loss: 2.562523603439331\r\nStep 1025, loss: 2.542625665664673\r\nStep 1026, loss: 2.5477395057678223\r\nStep 1027, loss: 2.54327654838562\r\nStep 1028, loss: 2.5357701778411865\r\nStep 1029, loss: 2.6014912128448486\r\nStep 1030, loss: 2.5485808849334717\r\nStep 1031, loss: 2.6083528995513916\r\nStep 1032, loss: 2.538877248764038\r\nStep 1033, loss: 2.5305094718933105\r\nStep 1034, loss: 2.521597146987915\r\nStep 1035, loss: 2.5235745906829834\r\nStep 1036, loss: 2.5177366733551025\r\nStep 1037, loss: 2.5254006385803223\r\nStep 1038, loss: 2.515406847000122\r\nStep 1039, loss: 2.5182547569274902\r\nStep 1040, loss: 2.509831428527832\r\nStep 1041, loss: 2.513935089111328\r\nStep 1042, loss: 2.5049304962158203\r\nStep 1043, loss: 2.5068488121032715\r\nStep 1044, loss: 2.498164653778076\r\nStep 1045, loss: 2.5039994716644287\r\nStep 1046, loss: 2.4909534454345703\r\nStep 1047, loss: 2.5006160736083984\r\nStep 1048, loss: 2.4848861694335938\r\nStep 1049, loss: 2.4995434284210205\r\nStep 1050, loss: 2.4851491451263428\r\nStep 1051, loss: 2.481431484222412\r\nStep 1052, loss: 2.476990222930908\r\nStep 1053, loss: 2.489863157272339\r\nStep 1054, loss: 2.4728827476501465\r\nStep 1055, loss: 2.4773941040039062\r\nStep 1056, loss: 2.603182792663574\r\nStep 1057, loss: 2.472409725189209\r\nStep 1058, loss: 2.4692673683166504\r\nStep 1059, loss: 2.469338893890381\r\nStep 1060, loss: 2.4681406021118164\r\nStep 1061, loss: 2.454946994781494\r\nStep 1062, loss: 2.4572036266326904\r\nStep 1063, loss: 2.4434049129486084\r\nStep 1064, loss: 2.4564218521118164\r\nStep 1065, loss: 2.448364734649658\r\nStep 1066, loss: 2.4403538703918457\r\nStep 1067, loss: 2.4466307163238525\r\nStep 1068, loss: 2.4433815479278564\r\nStep 1069, loss: 2.429745674133301\r\nStep 1070, loss: 2.4526727199554443\r\nStep 1071, loss: 2.4344351291656494\r\nStep 1072, loss: 2.4326441287994385\r\nStep 1073, loss: 2.4236114025115967\r\nStep 1074, loss: 2.4122354984283447\r\nStep 1075, loss: 2.4771924018859863\r\nStep 1076, loss: 2.498227834701538\r\nStep 1077, loss: 2.4192721843719482\r\nStep 1078, loss: 2.4159693717956543\r\nStep 1079, loss: 2.4000253677368164\r\nStep 1080, loss: 2.416328191757202\r\nStep 1081, loss: 2.4022104740142822\r\nStep 1082, loss: 2.4049606323242188\r\nStep 1083, loss: 2.386958599090576\r\nStep 1084, loss: 2.3954551219940186\r\nStep 1085, loss: 2.4132778644561768\r\nStep 1086, loss: 2.3958170413970947\r\nStep 1087, loss: 2.3938417434692383\r\nStep 1088, loss: 2.387744665145874\r\nStep 1089, loss: 2.4022481441497803\r\nStep 1090, loss: 2.383063316345215\r\nStep 1091, loss: 2.398932933807373\r\nStep 1092, loss: 2.379110097885132\r\nStep 1093, loss: 2.375033378601074\r\nStep 1094, loss: 2.3729465007781982\r\nStep 1095, loss: 2.3676857948303223\r\nStep 1096, loss: 2.3638761043548584\r\nStep 1097, loss: 2.4473748207092285\r\nStep 1098, loss: 2.3690202236175537\r\nStep 1099, loss: 2.358485460281372\r\nStep 1100, loss: 2.356776475906372\r\nStep 1101, loss: 2.350909471511841\r\nStep 1102, loss: 2.404448986053467\r\nStep 1103, loss: 2.3379507064819336\r\nStep 1104, loss: 2.3353428840637207\r\nStep 1105, loss: 2.3378913402557373\r\nStep 1106, loss: 2.3418872356414795\r\nStep 1107, loss: 2.3386192321777344\r\nStep 1108, loss: 2.33823823928833\r\nStep 1109, loss: 2.321782112121582\r\nStep 1110, loss: 2.34047794342041\r\nStep 1111, loss: 2.3273746967315674\r\nStep 1112, loss: 2.3197503089904785\r\nStep 1113, loss: 2.328583240509033\r\nStep 1114, loss: 2.328977346420288\r\nStep 1115, loss: 2.315302848815918\r\nStep 1116, loss: 2.3709681034088135\r\nStep 1117, loss: 2.313246011734009\r\nStep 1118, loss: 2.300600051879883\r\nStep 1119, loss: 2.307025194168091\r\nStep 1120, loss: 2.3173232078552246\r\nStep 1121, loss: 2.3903775215148926\r\nStep 1122, loss: 2.3028621673583984\r\nStep 1123, loss: 2.2924814224243164\r\nStep 1124, loss: 2.2834792137145996\r\nStep 1125, loss: 2.294154405593872\r\nStep 1126, loss: 2.2841897010803223\r\nStep 1127, loss: 2.2806098461151123\r\nStep 1128, loss: 2.284492254257202\r\nStep 1129, loss: 2.2780585289001465\r\nStep 1130, loss: 2.2713053226470947\r\nStep 1131, loss: 2.2769761085510254\r\nStep 1132, loss: 2.2864818572998047\r\nStep 1133, loss: 2.278355598449707\r\nStep 1134, loss: 2.2651398181915283\r\nStep 1135, loss: 2.2637624740600586\r\nStep 1136, loss: 2.2588934898376465\r\nStep 1137, loss: 2.2615604400634766\r\nStep 1138, loss: 2.2630980014801025\r\nStep 1139, loss: 2.2496793270111084\r\nStep 1140, loss: 2.3454906940460205\r\nStep 1141, loss: 2.2572708129882812\r\nStep 1142, loss: 2.2550759315490723\r\nStep 1143, loss: 2.251065731048584\r\nStep 1144, loss: 2.255527973175049\r\nStep 1145, loss: 2.2423408031463623\r\nStep 1146, loss: 2.2226648330688477\r\nStep 1147, loss: 2.2402567863464355\r\nStep 1148, loss: 2.2910008430480957\r\nStep 1149, loss: 2.2345499992370605\r\nStep 1150, loss: 2.2178263664245605\r\nStep 1151, loss: 2.226121187210083\r\nStep 1152, loss: 2.2218902111053467\r\nStep 1153, loss: 2.2164080142974854\r\nStep 1154, loss: 2.2198078632354736\r\nStep 1155, loss: 2.211597204208374\r\nStep 1156, loss: 2.2182602882385254\r\nStep 1157, loss: 2.205652952194214\r\nStep 1158, loss: 2.2017228603363037\r\nStep 1159, loss: 2.2078235149383545\r\nStep 1160, loss: 2.287843704223633\r\nStep 1161, loss: 2.1985435485839844\r\nStep 1162, loss: 2.2022032737731934\r\nStep 1163, loss: 2.219254732131958\r\nStep 1164, loss: 2.2051830291748047\r\nStep 1165, loss: 2.202455997467041\r\nStep 1166, loss: 2.1989641189575195\r\nStep 1167, loss: 2.1881930828094482\r\nStep 1168, loss: 2.1798622608184814\r\nStep 1169, loss: 2.1968541145324707\r\nStep 1170, loss: 2.1728620529174805\r\nStep 1171, loss: 2.1800458431243896\r\nStep 1172, loss: 2.1649703979492188\r\nStep 1173, loss: 2.2279467582702637\r\nStep 1174, loss: 2.1635286808013916\r\nStep 1175, loss: 2.176901340484619\r\nStep 1176, loss: 2.2295639514923096\r\nStep 1177, loss: 2.1644554138183594\r\nStep 1178, loss: 2.153655529022217\r\nStep 1179, loss: 2.1481971740722656\r\nStep 1180, loss: 2.1492602825164795\r\nStep 1181, loss: 2.1686105728149414\r\n",,terminal_output +2750,8504479,"TERMINAL",0,0,"9\t ",,terminal_output +2751,8505586,"TERMINAL",0,0,"2:00\t ",,terminal_output +2752,8506563,"TERMINAL",0,0,"1\t ",,terminal_output +2753,8507632,"TERMINAL",0,0,"2\t ",,terminal_output +2754,8508655,"TERMINAL",0,0,"3\t ",,terminal_output +2755,8509366,"TERMINAL",0,0,"Step 1182, loss: 2.1461148262023926\r\nStep 1183, loss: 2.1600279808044434\r\nStep 1184, loss: 2.1556637287139893\r\nStep 1185, loss: 2.141895055770874\r\nStep 1186, loss: 2.154371976852417\r\nStep 1187, loss: 2.132333993911743\r\nStep 1188, loss: 2.1390228271484375\r\nStep 1189, loss: 2.142042398452759\r\nStep 1190, loss: 2.1255698204040527\r\nStep 1191, loss: 2.1219329833984375\r\nStep 1192, loss: 2.1284923553466797\r\nStep 1193, loss: 2.1251230239868164\r\nStep 1194, loss: 2.1107993125915527\r\nStep 1195, loss: 2.1147079467773438\r\nStep 1196, loss: 2.201138973236084\r\nStep 1197, loss: 2.1207451820373535\r\nStep 1198, loss: 2.1102819442749023\r\nStep 1199, loss: 2.1237576007843018\r\nStep 1200, loss: 2.10891056060791\r\nStep 1201, loss: 2.1017425060272217\r\nStep 1202, loss: 2.1208627223968506\r\nStep 1203, loss: 2.1583030223846436\r\nStep 1204, loss: 2.101498603820801\r\nStep 1205, loss: 2.0986955165863037\r\nStep 1206, loss: 2.101982831954956\r\nStep 1207, loss: 2.086480140686035\r\nStep 1208, loss: 2.090900182723999\r\nStep 1209, loss: 2.069870710372925\r\nStep 1210, loss: 2.0723464488983154\r\nStep 1211, loss: 2.0820512771606445\r\nStep 1212, loss: 2.071640729904175\r\nStep 1213, loss: 2.064291477203369\r\nStep 1214, loss: 2.0691721439361572\r\nStep 1215, loss: 2.0789217948913574\r\nStep 1216, loss: 2.156344175338745\r\nStep 1217, loss: 2.1549630165100098\r\nStep 1218, loss: 2.0526905059814453\r\nStep 1219, loss: 2.061573028564453\r\nStep 1220, loss: 2.0697855949401855\r\nStep 1221, loss: 2.0418360233306885\r\nStep 1222, loss: 2.0521113872528076\r\nStep 1223, loss: 2.059547185897827\r\nStep 1224, loss: 2.0515077114105225\r\nStep 1225, loss: 2.0492050647735596\r\nStep 1226, loss: 2.0445451736450195\r\nStep 1227, loss: 2.0470144748687744\r\nStep 1228, loss: 2.031655788421631\r\nStep 1229, loss: 2.0292928218841553\r\nStep 1230, loss: 2.0304853916168213\r\nStep 1231, loss: 2.027062177658081\r\nStep 1232, loss: 2.0224075317382812\r\nStep 1233, loss: 2.027792453765869\r\nStep 1234, loss: 2.008570432662964\r\nStep 1235, loss: 2.0298564434051514\r\nStep 1236, loss: 2.0227930545806885\r\nStep 1237, loss: 2.0208287239074707\r\nStep 1238, loss: 2.0750374794006348\r\nStep 1239, loss: 2.016650915145874\r\nStep 1240, loss: 2.0115444660186768\r\nStep 1241, loss: 2.0091710090637207\r\nStep 1242, loss: 2.0846128463745117\r\nStep 1243, loss: 1.9931142330169678\r\nStep 1244, loss: 2.0046510696411133\r\nStep 1245, loss: 2.0004141330718994\r\nStep 1246, loss: 1.9958750009536743\r\nStep 1247, loss: 2.0064306259155273\r\nStep 1248, loss: 1.9932303428649902\r\nStep 1249, loss: 1.9862507581710815\r\nStep 1250, loss: 1.9930009841918945\r\nStep 1251, loss: 1.9790695905685425\r\nStep 1252, loss: 1.9822263717651367\r\nStep 1253, loss: 1.9782899618148804\r\nStep 1254, loss: 1.9581910371780396\r\nStep 1255, loss: 1.9748982191085815\r\nStep 1256, loss: 1.9635794162750244\r\nStep 1257, loss: 1.96467125415802\r\nStep 1258, loss: 2.0502288341522217\r\nStep 1259, loss: 1.9645037651062012\r\nStep 1260, loss: 1.9566267728805542\r\nStep 1261, loss: 1.9610122442245483\r\nStep 1262, loss: 1.9572957754135132\r\nStep 1263, loss: 1.947699785232544\r\nStep 1264, loss: 1.9536083936691284\r\nStep 1265, loss: 1.9439918994903564\r\nStep 1266, loss: 1.9391252994537354\r\nStep 1267, loss: 1.9465703964233398\r\nStep 1268, loss: 1.9381437301635742\r\nStep 1269, loss: 1.9451885223388672\r\nStep 1270, loss: 1.9362822771072388\r\nStep 1271, loss: 1.942479133605957\r\nStep 1272, loss: 2.0647544860839844\r\nStep 1273, loss: 1.9217901229858398\r\nStep 1274, loss: 1.918097972869873\r\nStep 1275, loss: 1.9207983016967773\r\nStep 1276, loss: 1.9112340211868286\r\nStep 1277, loss: 1.9173628091812134\r\nStep 1278, loss: 1.905621886253357\r\nStep 1279, loss: 1.9168869256973267\r\nStep 1280, loss: 1.9029619693756104\r\nStep 1281, loss: 1.9050939083099365\r\nStep 1282, loss: 1.8982113599777222\r\nStep 1283, loss: 1.9047168493270874\r\nStep 1284, loss: 1.8921551704406738\r\nStep 1285, loss: 1.905043125152588\r\nStep 1286, loss: 1.8943544626235962\r\nStep 1287, loss: 1.9059925079345703\r\nStep 1288, loss: 1.8885000944137573\r\nStep 1289, loss: 1.9689003229141235\r\nStep 1290, loss: 1.8825818300247192\r\nStep 1291, loss: 1.8828750848770142\r\nStep 1292, loss: 1.8741462230682373\r\nStep 1293, loss: 1.8820903301239014\r\nStep 1294, loss: 2.0371758937835693\r\nStep 1295, loss: 1.8813563585281372\r\nStep 1296, loss: 1.881657600402832\r\nStep 1297, loss: 1.8824207782745361\r\nStep 1298, loss: 1.865430235862732\r\nStep 1299, loss: 1.8692209720611572\r\nStep 1300, loss: 1.888992190361023\r\nStep 1301, loss: 1.8660868406295776\r\nStep 1302, loss: 1.8594869375228882\r\nStep 1303, loss: 1.8729610443115234\r\nStep 1304, loss: 1.8504626750946045\r\nStep 1305, loss: 1.8565737009048462\r\nStep 1306, loss: 1.8532053232192993\r\nStep 1307, loss: 1.9306327104568481\r\nStep 1308, loss: 1.936278223991394\r\nStep 1309, loss: 1.8516786098480225\r\nStep 1310, loss: 1.8422890901565552\r\nStep 1311, loss: 1.8456766605377197\r\nStep 1312, loss: 1.8394432067871094\r\nStep 1313, loss: 1.8354990482330322\r\nStep 1314, loss: 1.8273975849151611\r\nStep 1315, loss: 1.8948763608932495\r\nStep 1316, loss: 1.8393481969833374\r\nStep 1317, loss: 1.8186174631118774\r\nStep 1318, loss: 1.8183033466339111\r\nStep 1319, loss: 1.8423881530761719\r\nStep 1320, loss: 1.8336788415908813\r\nStep 1321, loss: 1.8008756637573242\r\nStep 1322, loss: 1.8221098184585571\r\nStep 1323, loss: 1.8055483102798462\r\nStep 1324, loss: 1.7986773252487183\r\nStep 1325, loss: 1.7975739240646362\r\nStep 1326, loss: 1.8053267002105713\r\nStep 1327, loss: 1.8110829591751099\r\nStep 1328, loss: 1.7973594665527344\r\nStep 1329, loss: 1.808693766593933\r\nStep 1330, loss: 1.8743174076080322\r\nStep 1331, loss: 1.888297438621521\r\nStep 1332, loss: 1.7995697259902954\r\nStep 1333, loss: 1.789908766746521\r\nStep 1334, loss: 1.7872180938720703\r\nStep 1335, loss: 1.7820018529891968\r\nStep 1336, loss: 1.7814911603927612\r\nStep 1337, loss: 1.7844513654708862\r\nStep 1338, loss: 1.7761837244033813\r\nStep 1339, loss: 1.7736401557922363\r\nStep 1340, loss: 1.7778724431991577\r\nStep 1341, loss: 1.7751766443252563\r\nStep 1342, loss: 1.7702853679656982\r\nStep 1343, loss: 1.762118935585022\r\nStep 1344, loss: 1.7645577192306519\r\nStep 1345, loss: 1.8347842693328857\r\nStep 1346, loss: 1.7554749250411987\r\nStep 1347, loss: 1.7580913305282593\r\nStep 1348, loss: 1.758215069770813\r\nStep 1349, loss: 1.7455673217773438\r\nStep 1350, loss: 1.7405400276184082\r\nStep 1351, loss: 1.7645169496536255\r\nStep 1352, loss: 1.7346869707107544\r\nStep 1353, loss: 1.746416449546814\r\nStep 1354, loss: 1.7319488525390625\r\nStep 1355, loss: 1.8256840705871582\r\nStep 1356, loss: 1.7397595643997192\r\nStep 1357, loss: 1.7273389101028442\r\nStep 1358, loss: 1.729748249053955\r\nStep 1359, loss: 1.9667606353759766\r\nStep 1360, loss: 1.7322371006011963\r\nStep 1361, loss: 1.7358412742614746\r\nStep 1362, loss: 1.746429204940796\r\nStep 1363, loss: 1.72173273563385\r\nStep 1364, loss: 1.7080377340316772\r\nStep 1365, loss: 1.7217028141021729\r\nStep 1366, loss: 1.7017738819122314\r\nStep 1367, loss: 1.715039610862732\r\nStep 1368, loss: 1.7089952230453491\r\nStep 1369, loss: 1.6952428817749023\r\nStep 1370, loss: 1.702636957168579\r\nStep 1371, loss: 1.6982178688049316\r\nStep 1372, loss: 1.697209119796753\r\nStep 1373, loss: 1.6975524425506592\r\nStep 1374, loss: 1.6937358379364014\r\nStep 1375, loss: 1.7868443727493286\r\nStep 1376, loss: 1.6945550441741943\r\nStep 1377, loss: 1.6829622983932495\r\nStep 1378, loss: 1.6818268299102783\r\nStep 1379, loss: 1.6838221549987793\r\nStep 1380, loss: 1.6824897527694702\r\nStep 1381, loss: 1.6901605129241943\r\nStep 1382, loss: 1.6796891689300537\r\nStep 1383, loss: 1.6655418872833252\r\nStep 1384, loss: 1.6727032661437988\r\nStep 1385, loss: 1.7277752161026\r\nStep 1386, loss: 1.6652811765670776\r\nStep 1387, loss: 1.669694185256958\r\nStep 1388, loss: 1.6503432989120483\r\nStep 1389, loss: 1.6672589778900146\r\nStep 1390, loss: 1.7476345300674438\r\nStep 1391, loss: 1.6562339067459106\r\nStep 1392, loss: 1.6573721170425415\r\nStep 1393, loss: 1.6555349826812744\r\nStep 1394, loss: 1.6563104391098022\r\nStep 1395, loss: 1.6501855850219727\r\nStep 1396, loss: 1.642444133758545\r\nStep 1397, loss: 1.649664282798767\r\nStep 1398, loss: 1.6437225341796875\r\nStep 1399, loss: 1.6301758289337158\r\nStep 1400, loss: 1.6375468969345093\r\nStep 1401, loss: 1.7201212644577026\r\nStep 1402, loss: 1.6339973211288452\r\nStep 1403, loss: 1.6354479789733887\r\nStep 1404, loss: 1.6254457235336304\r\nStep 1405, loss: 1.6238367557525635\r\nStep 1406, loss: 1.6269700527191162\r\nStep 1407, loss: 1.6271783113479614\r\nStep 1408, loss: 1.6236388683319092\r\nStep 1409, loss: 1.6247005462646484\r\nStep 1410, loss: 1.703533411026001\r\n",,terminal_output +2756,8509784,"TERMINAL",0,0,"4\t ",,terminal_output +2757,8510805,"TERMINAL",0,0,"5\t ",,terminal_output +2758,8511757,"TERMINAL",0,0,"6\t ",,terminal_output +2759,8512855,"TERMINAL",0,0,"7\t ",,terminal_output +2760,8513836,"TERMINAL",0,0,"8\t ",,terminal_output +2761,8514382,"TERMINAL",0,0,"Step 1411, loss: 1.6301096677780151\r\nStep 1412, loss: 1.6130642890930176\r\nStep 1413, loss: 1.6092276573181152\r\nStep 1414, loss: 1.6037098169326782\r\nStep 1415, loss: 1.603817105293274\r\nStep 1416, loss: 1.6079381704330444\r\nStep 1417, loss: 1.6011780500411987\r\nStep 1418, loss: 1.6056395769119263\r\nStep 1419, loss: 1.7999637126922607\r\nStep 1420, loss: 1.6055976152420044\r\nStep 1421, loss: 1.5836765766143799\r\nStep 1422, loss: 1.6017389297485352\r\nStep 1423, loss: 1.591356635093689\r\nStep 1424, loss: 1.5818482637405396\r\nStep 1425, loss: 1.5787668228149414\r\nStep 1426, loss: 1.576428771018982\r\nStep 1427, loss: 1.5723565816879272\r\nStep 1428, loss: 1.5732758045196533\r\nStep 1429, loss: 1.6748530864715576\r\nStep 1430, loss: 1.5899357795715332\r\nStep 1431, loss: 1.5756182670593262\r\nStep 1432, loss: 1.5761698484420776\r\nStep 1433, loss: 1.5761581659317017\r\nStep 1434, loss: 1.5672630071640015\r\nStep 1435, loss: 1.5699366331100464\r\nStep 1436, loss: 1.5601105690002441\r\nStep 1437, loss: 1.5489978790283203\r\nStep 1438, loss: 1.6438257694244385\r\nStep 1439, loss: 1.5596591234207153\r\nStep 1440, loss: 1.5541038513183594\r\nStep 1441, loss: 1.5488420724868774\r\nStep 1442, loss: 1.5376554727554321\r\nStep 1443, loss: 1.5341160297393799\r\nStep 1444, loss: 1.5546623468399048\r\nStep 1445, loss: 1.5510375499725342\r\nStep 1446, loss: 1.5432322025299072\r\nStep 1447, loss: 1.5296382904052734\r\nStep 1448, loss: 1.5401533842086792\r\nStep 1449, loss: 1.5440949201583862\r\nStep 1450, loss: 1.5351777076721191\r\nStep 1451, loss: 1.5238182544708252\r\nStep 1452, loss: 1.5309386253356934\r\nStep 1453, loss: 1.610527753829956\r\nStep 1454, loss: 1.604359745979309\r\nStep 1455, loss: 1.5357221364974976\r\nStep 1456, loss: 1.5338026285171509\r\nStep 1457, loss: 1.5150361061096191\r\nStep 1458, loss: 1.51834237575531\r\nStep 1459, loss: 1.5178166627883911\r\nStep 1460, loss: 1.5000675916671753\r\nStep 1461, loss: 1.5194787979125977\r\nStep 1462, loss: 1.5067546367645264\r\nStep 1463, loss: 1.5742841958999634\r\nStep 1464, loss: 1.5128026008605957\r\nStep 1465, loss: 1.501591444015503\r\nStep 1466, loss: 1.490031123161316\r\nStep 1467, loss: 1.5030615329742432\r\nStep 1468, loss: 1.4928689002990723\r\nStep 1469, loss: 1.4987040758132935\r\nStep 1470, loss: 1.488141655921936\r\nStep 1471, loss: 1.4874686002731323\r\nStep 1472, loss: 1.4864723682403564\r\nStep 1473, loss: 1.4927301406860352\r\nStep 1474, loss: 1.4774378538131714\r\nStep 1475, loss: 1.5741008520126343\r\nStep 1476, loss: 1.472409963607788\r\nStep 1477, loss: 1.476475477218628\r\nStep 1478, loss: 1.4730345010757446\r\nStep 1479, loss: 1.4733737707138062\r\nStep 1480, loss: 1.4773321151733398\r\nStep 1481, loss: 1.457149863243103\r\nStep 1482, loss: 1.5374374389648438\r\nStep 1483, loss: 1.4580304622650146\r\nStep 1484, loss: 1.4590259790420532\r\nStep 1485, loss: 1.4561288356781006\r\nStep 1486, loss: 1.436525583267212\r\nStep 1487, loss: 1.4561225175857544\r\nStep 1488, loss: 1.450204610824585\r\nStep 1489, loss: 1.4471312761306763\r\nStep 1490, loss: 1.445505142211914\r\nStep 1491, loss: 1.5281968116760254\r\nStep 1492, loss: 1.4531079530715942\r\nStep 1493, loss: 1.4511549472808838\r\nStep 1494, loss: 1.449227213859558\r\nStep 1495, loss: 1.4398713111877441\r\nStep 1496, loss: 1.4332908391952515\r\nStep 1497, loss: 1.4444303512573242\r\nStep 1498, loss: 1.4376487731933594\r\nStep 1499, loss: 1.4225661754608154\r\nStep 1500, loss: 1.4354257583618164\r\nStep 1501, loss: 1.4264285564422607\r\nStep 1502, loss: 1.426544427871704\r\nStep 1503, loss: 1.4864002466201782\r\nStep 1504, loss: 1.4232275485992432\r\nStep 1505, loss: 1.4190514087677002\r\nStep 1506, loss: 1.5054543018341064\r\nStep 1507, loss: 1.4134894609451294\r\nStep 1508, loss: 1.4175636768341064\r\nStep 1509, loss: 1.4135597944259644\r\nStep 1510, loss: 1.410888433456421\r\nStep 1511, loss: 1.4072614908218384\r\nStep 1512, loss: 1.4007594585418701\r\nStep 1513, loss: 1.3992942571640015\r\nStep 1514, loss: 1.4014194011688232\r\nStep 1515, loss: 1.4040285348892212\r\nStep 1516, loss: 1.404921054840088\r\nStep 1517, loss: 1.4780999422073364\r\nStep 1518, loss: 1.4005719423294067\r\nStep 1519, loss: 1.4027119874954224\r\nStep 1520, loss: 1.3840972185134888\r\nStep 1521, loss: 1.3999054431915283\r\nStep 1522, loss: 1.3829114437103271\r\nStep 1523, loss: 1.3781347274780273\r\nStep 1524, loss: 1.3861167430877686\r\nStep 1525, loss: 1.3856284618377686\r\nStep 1526, loss: 1.3774737119674683\r\nStep 1527, loss: 1.4650448560714722\r\nStep 1528, loss: 1.3910046815872192\r\nStep 1529, loss: 1.3655250072479248\r\nStep 1530, loss: 1.3686193227767944\r\nStep 1531, loss: 1.3717372417449951\r\nStep 1532, loss: 1.4888373613357544\r\nStep 1533, loss: 1.378780722618103\r\nStep 1534, loss: 1.3632453680038452\r\nStep 1535, loss: 1.3715457916259766\r\nStep 1536, loss: 1.3572782278060913\r\nStep 1537, loss: 1.3693796396255493\r\nStep 1538, loss: 1.3438113927841187\r\nStep 1539, loss: 1.4275165796279907\r\nStep 1540, loss: 1.3487389087677002\r\nStep 1541, loss: 1.3630285263061523\r\nStep 1542, loss: 1.341016411781311\r\nStep 1543, loss: 1.3332781791687012\r\nStep 1544, loss: 1.354549527168274\r\nStep 1545, loss: 1.3339176177978516\r\nStep 1546, loss: 1.3414068222045898\r\nStep 1547, loss: 1.424445390701294\r\nStep 1548, loss: 1.3479233980178833\r\nStep 1549, loss: 1.4191590547561646\r\nStep 1550, loss: 1.3339534997940063\r\nStep 1551, loss: 1.3304798603057861\r\nStep 1552, loss: 1.3272526264190674\r\nStep 1553, loss: 1.3289159536361694\r\nStep 1554, loss: 1.320695161819458\r\nStep 1555, loss: 1.3121440410614014\r\nStep 1556, loss: 1.3139842748641968\r\nStep 1557, loss: 1.323675513267517\r\nStep 1558, loss: 1.3093068599700928\r\nStep 1559, loss: 1.312829613685608\r\nStep 1560, loss: 1.304625391960144\r\nStep 1561, loss: 1.3153128623962402\r\nStep 1562, loss: 1.375686764717102\r\nStep 1563, loss: 1.3967183828353882\r\nStep 1564, loss: 1.3024230003356934\r\nStep 1565, loss: 1.307420253753662\r\nStep 1566, loss: 1.2930610179901123\r\nStep 1567, loss: 1.2858902215957642\r\nStep 1568, loss: 1.2863813638687134\r\nStep 1569, loss: 1.292761206626892\r\nStep 1570, loss: 1.2857739925384521\r\nStep 1571, loss: 1.3024885654449463\r\nStep 1572, loss: 1.2859091758728027\r\nStep 1573, loss: 1.287813425064087\r\nStep 1574, loss: 1.2958333492279053\r\nStep 1575, loss: 1.286295771598816\r\nStep 1576, loss: 1.28789484500885\r\nStep 1577, loss: 1.285409927368164\r\nStep 1578, loss: 1.2670167684555054\r\nStep 1579, loss: 1.2764036655426025\r\nStep 1580, loss: 1.3580801486968994\r\nStep 1581, loss: 1.2754911184310913\r\nStep 1582, loss: 1.2680541276931763\r\nStep 1583, loss: 1.2632626295089722\r\nStep 1584, loss: 1.26119065284729\r\nStep 1585, loss: 1.3526592254638672\r\nStep 1586, loss: 1.2600555419921875\r\nStep 1587, loss: 1.251711368560791\r\nStep 1588, loss: 1.2515569925308228\r\nStep 1589, loss: 1.254250407218933\r\nStep 1590, loss: 1.251937985420227\r\nStep 1591, loss: 1.2647521495819092\r\nStep 1592, loss: 1.244464635848999\r\nStep 1593, loss: 1.2474932670593262\r\nStep 1594, loss: 1.2387888431549072\r\nStep 1595, loss: 1.2395362854003906\r\nStep 1596, loss: 1.2336413860321045\r\nStep 1597, loss: 1.2488036155700684\r\nStep 1598, loss: 1.232612133026123\r\nStep 1599, loss: 1.3339787721633911\r\nStep 1600, loss: 1.2437795400619507\r\nStep 1601, loss: 1.241505742073059\r\nStep 1602, loss: 1.2510954141616821\r\nStep 1603, loss: 1.2282265424728394\r\nStep 1604, loss: 1.2275598049163818\r\nStep 1605, loss: 1.2225043773651123\r\nStep 1606, loss: 1.225530743598938\r\nStep 1607, loss: 1.3794116973876953\r\nStep 1608, loss: 1.2991845607757568\r\nStep 1609, loss: 1.215093970298767\r\nStep 1610, loss: 1.223601222038269\r\nStep 1611, loss: 1.2187005281448364\r\nStep 1612, loss: 1.2128242254257202\r\nStep 1613, loss: 1.2048320770263672\r\nStep 1614, loss: 1.205438494682312\r\nStep 1615, loss: 1.2013064622879028\r\nStep 1616, loss: 1.2171531915664673\r\nStep 1617, loss: 1.1983642578125\r\nStep 1618, loss: 1.1944153308868408\r\nStep 1619, loss: 1.2019575834274292\r\nStep 1620, loss: 1.197701096534729\r\nStep 1621, loss: 1.197914481163025\r\nStep 1622, loss: 1.1877881288528442\r\nStep 1623, loss: 1.1885789632797241\r\nStep 1624, loss: 1.1786848306655884\r\nStep 1625, loss: 1.196977138519287\r\nStep 1626, loss: 1.1844444274902344\r\nStep 1627, loss: 1.1829357147216797\r\nStep 1628, loss: 1.259438157081604\r\nStep 1629, loss: 1.1964062452316284\r\nStep 1630, loss: 1.1895968914031982\r\nStep 1631, loss: 1.1769605875015259\r\nStep 1632, loss: 1.1790674924850464\r\nStep 1633, loss: 1.1833016872406006\r\nStep 1634, loss: 1.1699211597442627\r\nStep 1635, loss: 1.175435185432434\r\nStep 1636, loss: 1.2554397583007812\r\nStep 1637, loss: 1.1660076379776\r\nStep 1638, loss: 1.1537678241729736\r\nStep 1639, loss: 1.159993052482605\r\n",,terminal_output +2762,8515008,"TERMINAL",0,0,"9\t ",,terminal_output +2763,8515919,"TERMINAL",0,0,"10\t ",,terminal_output +2764,8516959,"TERMINAL",0,0,"1\t ",,terminal_output +2765,8518079,"TERMINAL",0,0,"2\t ",,terminal_output +2766,8519097,"TERMINAL",0,0,"3\t ",,terminal_output +2767,8519818,"TERMINAL",0,0,"Step 1640, loss: 1.16188645362854\r\nStep 1641, loss: 1.1617001295089722\r\nStep 1642, loss: 1.1660685539245605\r\nStep 1643, loss: 1.1515240669250488\r\nStep 1644, loss: 1.2942557334899902\r\nStep 1645, loss: 1.1563798189163208\r\nStep 1646, loss: 1.15823495388031\r\nStep 1647, loss: 1.2331572771072388\r\nStep 1648, loss: 1.159799575805664\r\nStep 1649, loss: 1.1453050374984741\r\nStep 1650, loss: 1.1473041772842407\r\nStep 1651, loss: 1.1352790594100952\r\nStep 1652, loss: 1.1427582502365112\r\nStep 1653, loss: 1.1417266130447388\r\nStep 1654, loss: 1.1373786926269531\r\nStep 1655, loss: 1.1396502256393433\r\nStep 1656, loss: 1.132238745689392\r\nStep 1657, loss: 1.206689476966858\r\nStep 1658, loss: 1.1298949718475342\r\nStep 1659, loss: 1.1243247985839844\r\nStep 1660, loss: 1.1257169246673584\r\nStep 1661, loss: 1.1291242837905884\r\nStep 1662, loss: 1.1243059635162354\r\nStep 1663, loss: 1.125014305114746\r\nStep 1664, loss: 1.1184085607528687\r\nStep 1665, loss: 1.112244963645935\r\nStep 1666, loss: 1.2100229263305664\r\nStep 1667, loss: 1.1099988222122192\r\nStep 1668, loss: 1.1053754091262817\r\nStep 1669, loss: 1.112585425376892\r\nStep 1670, loss: 1.1120953559875488\r\nStep 1671, loss: 1.1163967847824097\r\nStep 1672, loss: 1.1045782566070557\r\nStep 1673, loss: 1.1035889387130737\r\nStep 1674, loss: 1.0884079933166504\r\nStep 1675, loss: 1.0976665019989014\r\nStep 1676, loss: 1.1028984785079956\r\nStep 1677, loss: 1.0994230508804321\r\nStep 1678, loss: 1.179951786994934\r\nStep 1679, loss: 1.102056622505188\r\nStep 1680, loss: 1.1782464981079102\r\nStep 1681, loss: 1.0960696935653687\r\nStep 1682, loss: 1.0902549028396606\r\nStep 1683, loss: 1.0749911069869995\r\nStep 1684, loss: 1.0821412801742554\r\nStep 1685, loss: 1.1616007089614868\r\nStep 1686, loss: 1.0806243419647217\r\nStep 1687, loss: 1.0811423063278198\r\nStep 1688, loss: 1.072160005569458\r\nStep 1689, loss: 1.0743541717529297\r\nStep 1690, loss: 1.071553111076355\r\nStep 1691, loss: 1.0711830854415894\r\nStep 1692, loss: 1.0691405534744263\r\nStep 1693, loss: 1.0669519901275635\r\nStep 1694, loss: 1.064153790473938\r\nStep 1695, loss: 1.0688326358795166\r\nStep 1696, loss: 1.066558837890625\r\nStep 1697, loss: 1.0645402669906616\r\nStep 1698, loss: 1.1433250904083252\r\nStep 1699, loss: 1.0526514053344727\r\nStep 1700, loss: 1.058968424797058\r\nStep 1701, loss: 1.0554354190826416\r\nStep 1702, loss: 1.0452711582183838\r\nStep 1703, loss: 1.0468705892562866\r\nStep 1704, loss: 1.053083062171936\r\nStep 1705, loss: 1.0493947267532349\r\nStep 1706, loss: 1.050018548965454\r\nStep 1707, loss: 1.0493272542953491\r\nStep 1708, loss: 1.1242414712905884\r\nStep 1709, loss: 1.0462677478790283\r\nStep 1710, loss: 1.0410798788070679\r\nStep 1711, loss: 1.0404365062713623\r\nStep 1712, loss: 1.0407153367996216\r\nStep 1713, loss: 1.0504106283187866\r\nStep 1714, loss: 1.0450489521026611\r\nStep 1715, loss: 1.0389525890350342\r\nStep 1716, loss: 1.035369634628296\r\nStep 1717, loss: 1.1106311082839966\r\nStep 1718, loss: 1.0205765962600708\r\nStep 1719, loss: 1.020915150642395\r\nStep 1720, loss: 1.0261667966842651\r\nStep 1721, loss: 1.02860689163208\r\nStep 1722, loss: 1.026590347290039\r\nStep 1723, loss: 1.0135035514831543\r\nStep 1724, loss: 1.0193556547164917\r\nStep 1725, loss: 1.1022531986236572\r\nStep 1726, loss: 1.0191341638565063\r\nStep 1727, loss: 0.9970090985298157\r\nStep 1728, loss: 1.0147827863693237\r\nStep 1729, loss: 1.0010994672775269\r\nStep 1730, loss: 1.0109883546829224\r\nStep 1731, loss: 1.007982850074768\r\nStep 1732, loss: 0.9973250031471252\r\nStep 1733, loss: 1.003853678703308\r\nStep 1734, loss: 0.9943264126777649\r\nStep 1735, loss: 1.075078010559082\r\nStep 1736, loss: 1.0013368129730225\r\nStep 1737, loss: 0.9942210912704468\r\nStep 1738, loss: 0.9873697757720947\r\nStep 1739, loss: 0.9878790378570557\r\nStep 1740, loss: 0.9873767495155334\r\nStep 1741, loss: 0.9858866333961487\r\nStep 1742, loss: 0.9831498861312866\r\nStep 1743, loss: 0.9867510795593262\r\nStep 1744, loss: 0.9937174320220947\r\nStep 1745, loss: 1.06839120388031\r\nStep 1746, loss: 0.9822352528572083\r\nStep 1747, loss: 0.9883080124855042\r\nStep 1748, loss: 0.9837055802345276\r\nStep 1749, loss: 0.9824689626693726\r\nStep 1750, loss: 0.9730369448661804\r\nStep 1751, loss: 0.961717963218689\r\nStep 1752, loss: 0.9807946681976318\r\nStep 1753, loss: 1.055015206336975\r\nStep 1754, loss: 0.9663900136947632\r\nStep 1755, loss: 0.9610560536384583\r\nStep 1756, loss: 0.9671326279640198\r\nStep 1757, loss: 0.9667078256607056\r\nStep 1758, loss: 0.9718667268753052\r\nStep 1759, loss: 0.96468585729599\r\nStep 1760, loss: 0.9569977521896362\r\nStep 1761, loss: 0.9598689675331116\r\nStep 1762, loss: 0.9549834132194519\r\nStep 1763, loss: 1.0304561853408813\r\nStep 1764, loss: 1.0234159231185913\r\nStep 1765, loss: 0.9658213257789612\r\nStep 1766, loss: 0.9492391347885132\r\nStep 1767, loss: 0.9482895731925964\r\nStep 1768, loss: 0.9441653490066528\r\nStep 1769, loss: 0.9502098560333252\r\nStep 1770, loss: 0.9408199787139893\r\nStep 1771, loss: 0.9322059750556946\r\nStep 1772, loss: 1.0253742933273315\r\nStep 1773, loss: 0.9367683529853821\r\nStep 1774, loss: 0.9439451098442078\r\nStep 1775, loss: 0.9404140710830688\r\nStep 1776, loss: 0.9286357760429382\r\nStep 1777, loss: 0.9354848861694336\r\nStep 1778, loss: 0.9333691000938416\r\nStep 1779, loss: 0.9363636374473572\r\nStep 1780, loss: 0.9266244173049927\r\nStep 1781, loss: 0.9221014380455017\r\nStep 1782, loss: 0.9280737638473511\r\nStep 1783, loss: 0.9276670813560486\r\nStep 1784, loss: 1.0047643184661865\r\nStep 1785, loss: 1.0002391338348389\r\nStep 1786, loss: 0.9214962720870972\r\nStep 1787, loss: 0.9227359890937805\r\nStep 1788, loss: 0.9258524775505066\r\nStep 1789, loss: 0.9055101871490479\r\nStep 1790, loss: 0.9243072271347046\r\nStep 1791, loss: 0.9110667109489441\r\nStep 1792, loss: 0.9193739891052246\r\nStep 1793, loss: 0.9028067588806152\r\nStep 1794, loss: 0.8949940204620361\r\nStep 1795, loss: 0.8925043344497681\r\nStep 1796, loss: 0.9095222353935242\r\nStep 1797, loss: 0.9108695387840271\r\nStep 1798, loss: 0.8937553763389587\r\nStep 1799, loss: 0.9031857848167419\r\nStep 1800, loss: 0.9741659760475159\r\nStep 1801, loss: 0.9724459052085876\r\nStep 1802, loss: 0.8993737101554871\r\nStep 1803, loss: 0.892319917678833\r\nStep 1804, loss: 0.8964664340019226\r\nStep 1805, loss: 0.9020946621894836\r\nStep 1806, loss: 0.8872877955436707\r\nStep 1807, loss: 0.8872864842414856\r\nStep 1808, loss: 1.033977746963501\r\nStep 1809, loss: 0.9031466245651245\r\nStep 1810, loss: 0.8912979364395142\r\nStep 1811, loss: 0.8873443007469177\r\nStep 1812, loss: 0.891738772392273\r\nStep 1813, loss: 0.8863613605499268\r\nStep 1814, loss: 0.876929759979248\r\nStep 1815, loss: 0.8748744130134583\r\nStep 1816, loss: 0.8836431503295898\r\nStep 1817, loss: 0.8863758444786072\r\nStep 1818, loss: 0.9610229134559631\r\nStep 1819, loss: 0.8731045722961426\r\nStep 1820, loss: 0.8790981769561768\r\nStep 1821, loss: 0.8726531863212585\r\nStep 1822, loss: 0.9493061304092407\r\nStep 1823, loss: 0.8738576769828796\r\nStep 1824, loss: 0.8615004420280457\r\nStep 1825, loss: 0.8646396994590759\r\nStep 1826, loss: 0.8719663619995117\r\nStep 1827, loss: 0.8776518106460571\r\nStep 1828, loss: 0.8638440370559692\r\nStep 1829, loss: 0.8523248434066772\r\nStep 1830, loss: 0.9386703968048096\r\nStep 1831, loss: 0.8585842251777649\r\nStep 1832, loss: 0.8557928204536438\r\nStep 1833, loss: 0.8497077822685242\r\nStep 1834, loss: 0.8476681113243103\r\nStep 1835, loss: 0.8426429033279419\r\nStep 1836, loss: 0.8445639610290527\r\nStep 1837, loss: 0.8446393609046936\r\nStep 1838, loss: 0.8436642289161682\r\nStep 1839, loss: 0.9207665324211121\r\nStep 1840, loss: 0.8381456732749939\r\nStep 1841, loss: 0.8304082155227661\r\nStep 1842, loss: 0.8341837525367737\r\nStep 1843, loss: 0.8270362019538879\r\nStep 1844, loss: 0.8447906970977783\r\nStep 1845, loss: 0.8396472930908203\r\nStep 1846, loss: 0.8294242024421692\r\nStep 1847, loss: 0.8340530395507812\r\nStep 1848, loss: 0.8288896083831787\r\nStep 1849, loss: 0.9030379056930542\r\nStep 1850, loss: 0.8340115547180176\r\nStep 1851, loss: 0.8363310098648071\r\nStep 1852, loss: 0.8187428116798401\r\nStep 1853, loss: 0.8207972049713135\r\nStep 1854, loss: 0.9109382629394531\r\nStep 1855, loss: 0.8185655474662781\r\nStep 1856, loss: 0.8224308490753174\r\nStep 1857, loss: 0.8155465722084045\r\nStep 1858, loss: 0.8161537051200867\r\nStep 1859, loss: 0.8095765113830566\r\nStep 1860, loss: 0.8121069669723511\r\nStep 1861, loss: 0.8157718181610107\r\nStep 1862, loss: 0.8160235285758972\r\nStep 1863, loss: 0.7959022521972656\r\nStep 1864, loss: 0.8078964352607727\r\nStep 1865, loss: 0.7932807207107544\r\nStep 1866, loss: 0.8773398399353027\r\nStep 1867, loss: 0.8793342709541321\r\n",,terminal_output +2768,8520071,"TERMINAL",0,0,"4\t ",,terminal_output +2769,8521140,"TERMINAL",0,0,"5\t ",,terminal_output +2770,8522148,"TERMINAL",0,0,"6\t ",,terminal_output +2771,8523296,"TERMINAL",0,0,"7\t ",,terminal_output +2772,8524236,"TERMINAL",0,0,"8\t ",,terminal_output +2773,8524431,"TERMINAL",0,0,"Step 1868, loss: 0.7981368899345398\r\nStep 1869, loss: 0.8031560182571411\r\nStep 1870, loss: 0.7991654872894287\r\nStep 1871, loss: 0.7934608459472656\r\nStep 1872, loss: 0.7941319346427917\r\nStep 1873, loss: 0.7923650741577148\r\nStep 1874, loss: 0.7928857803344727\r\nStep 1875, loss: 0.7974361777305603\r\nStep 1876, loss: 0.7948752641677856\r\nStep 1877, loss: 0.7961939573287964\r\nStep 1878, loss: 0.7869864702224731\r\nStep 1879, loss: 0.7842578291893005\r\nStep 1880, loss: 0.773926317691803\r\nStep 1881, loss: 0.7847232222557068\r\nStep 1882, loss: 0.9144803881645203\r\nStep 1883, loss: 0.8040279150009155\r\nStep 1884, loss: 0.7828233242034912\r\nStep 1885, loss: 0.7945436835289001\r\nStep 1886, loss: 0.8621106743812561\r\nStep 1887, loss: 0.7775026559829712\r\nStep 1888, loss: 0.784569263458252\r\nStep 1889, loss: 0.7666864395141602\r\nStep 1890, loss: 0.7719170451164246\r\nStep 1891, loss: 0.8560346364974976\r\nStep 1892, loss: 0.760599672794342\r\nStep 1893, loss: 0.773205041885376\r\nStep 1894, loss: 0.7572276592254639\r\nStep 1895, loss: 0.7601724863052368\r\nStep 1896, loss: 0.7538647055625916\r\nStep 1897, loss: 0.7672840356826782\r\nStep 1898, loss: 0.7663994431495667\r\nStep 1899, loss: 0.8310291767120361\r\nStep 1900, loss: 0.7617446184158325\r\nStep 1901, loss: 0.7666364908218384\r\nStep 1902, loss: 0.7489709854125977\r\nStep 1903, loss: 0.7522549629211426\r\nStep 1904, loss: 0.7493504881858826\r\nStep 1905, loss: 0.7512446641921997\r\nStep 1906, loss: 0.7397961020469666\r\nStep 1907, loss: 0.7468775510787964\r\nStep 1908, loss: 0.8270103931427002\r\nStep 1909, loss: 0.7473761439323425\r\nStep 1910, loss: 0.7430338263511658\r\nStep 1911, loss: 0.7422906160354614\r\nStep 1912, loss: 0.7487986087799072\r\nStep 1913, loss: 0.74245285987854\r\nStep 1914, loss: 0.7500817775726318\r\nStep 1915, loss: 0.7305583953857422\r\nStep 1916, loss: 0.8184480667114258\r\nStep 1917, loss: 0.7366307377815247\r\nStep 1918, loss: 0.7283251881599426\r\nStep 1919, loss: 0.7355674505233765\r\nStep 1920, loss: 0.7345721125602722\r\nStep 1921, loss: 0.735842227935791\r\nStep 1922, loss: 0.7205818295478821\r\nStep 1923, loss: 0.7915335297584534\r\nStep 1924, loss: 0.721540093421936\r\nStep 1925, loss: 0.7249675393104553\r\nStep 1926, loss: 0.7162864804267883\r\nStep 1927, loss: 0.7191547155380249\r\nStep 1928, loss: 0.7099131345748901\r\nStep 1929, loss: 0.7215911746025085\r\nStep 1930, loss: 0.7078325152397156\r\nStep 1931, loss: 0.713541567325592\r\nStep 1932, loss: 0.7203308939933777\r\nStep 1933, loss: 0.7944936752319336\r\nStep 1934, loss: 0.7038966417312622\r\nStep 1935, loss: 0.7144477367401123\r\nStep 1936, loss: 0.7128229141235352\r\nStep 1937, loss: 0.7161839008331299\r\nStep 1938, loss: 0.6972056031227112\r\nStep 1939, loss: 0.6995698809623718\r\nStep 1940, loss: 0.7018144130706787\r\nStep 1941, loss: 0.7027522325515747\r\nStep 1942, loss: 0.7105857133865356\r\nStep 1943, loss: 0.7753967642784119\r\nStep 1944, loss: 0.6974099278450012\r\nStep 1945, loss: 0.6973146200180054\r\nStep 1946, loss: 0.6958182454109192\r\nStep 1947, loss: 0.7015774846076965\r\nStep 1948, loss: 0.6844379305839539\r\nStep 1949, loss: 0.6881991028785706\r\nStep 1950, loss: 0.6953204274177551\r\nStep 1951, loss: 0.7724865078926086\r\nStep 1952, loss: 0.6942313313484192\r\nStep 1953, loss: 0.6836058497428894\r\nStep 1954, loss: 0.6872352361679077\r\nStep 1955, loss: 0.6987436413764954\r\nStep 1956, loss: 0.6896398067474365\r\nStep 1957, loss: 0.8182898759841919\r\nStep 1958, loss: 0.6869841814041138\r\nStep 1959, loss: 0.7014235258102417\r\nStep 1960, loss: 0.687301516532898\r\nStep 1961, loss: 0.6879339814186096\r\nStep 1962, loss: 0.6789869070053101\r\nStep 1963, loss: 0.7701600790023804\r\nStep 1964, loss: 0.756460428237915\r\nStep 1965, loss: 0.6754180192947388\r\nStep 1966, loss: 0.6796339750289917\r\nStep 1967, loss: 0.6762710809707642\r\nStep 1968, loss: 0.667529821395874\r\nStep 1969, loss: 0.6704533100128174\r\nStep 1970, loss: 0.6733936071395874\r\nStep 1971, loss: 0.6689598560333252\r\nStep 1972, loss: 0.666325569152832\r\nStep 1973, loss: 0.6654549837112427\r\nStep 1974, loss: 0.6578724980354309\r\nStep 1975, loss: 0.6567274332046509\r\nStep 1976, loss: 0.6601003408432007\r\nStep 1977, loss: 0.653091549873352\r\nStep 1978, loss: 0.6605668067932129\r\nStep 1979, loss: 0.7348406910896301\r\nStep 1980, loss: 0.7300178408622742\r\nStep 1981, loss: 0.6610791683197021\r\nStep 1982, loss: 0.6593403220176697\r\nStep 1983, loss: 0.6569502353668213\r\nStep 1984, loss: 0.652950644493103\r\nStep 1985, loss: 0.6472553610801697\r\nStep 1986, loss: 0.6440768837928772\r\nStep 1987, loss: 0.6428524851799011\r\nStep 1988, loss: 0.6394181847572327\r\nStep 1989, loss: 0.6457924842834473\r\nStep 1990, loss: 0.6497907638549805\r\nStep 1991, loss: 0.6447709202766418\r\nStep 1992, loss: 0.6412797570228577\r\nStep 1993, loss: 0.6263863444328308\r\nStep 1994, loss: 0.6338142156600952\r\nStep 1995, loss: 0.631901741027832\r\nStep 1996, loss: 0.7199881672859192\r\nStep 1997, loss: 0.6356508731842041\r\nStep 1998, loss: 0.6325950026512146\r\nStep 1999, loss: 0.6363793015480042\r\nSaved checkpoint at step 2000\r\n",,terminal_output +2774,8525274,"TERMINAL",0,0,"9\t ",,terminal_output +2775,8525950,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-overfit-3373280 at: https://wandb.ai/instant-uv/jafar/runs/m0b20qmd\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_162013-m0b20qmd/logs\r\n",,terminal_output +2776,8526306,"TERMINAL",0,0,"20\t ",,terminal_output +2777,8527349,"TERMINAL",0,0,"2\t ",,terminal_output +2778,8528222,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +2779,8528388,"TERMINAL",0,0,"3\t ",,terminal_output +2780,8529426,"TERMINAL",0,0,"4\t ",,terminal_output +2781,8530566,"TERMINAL",0,0,"5\t ",,terminal_output +2782,8531593,"TERMINAL",0,0,"6\t ",,terminal_output +2783,8532556,"TERMINAL",0,0,"7\t ",,terminal_output +2784,8533596,"TERMINAL",0,0,"8\t ",,terminal_output +2785,8534664,"TERMINAL",0,0,"9\t ",,terminal_output +2786,8535695,"TERMINAL",0,0,"30\t ",,terminal_output +2787,8536818,"TERMINAL",0,0,"1\t ",,terminal_output +2788,8537837,"TERMINAL",0,0,"2\t ",,terminal_output +2789,8538862,"TERMINAL",0,0,"3\t ",,terminal_output +2790,8539849,"TERMINAL",0,0,"4\t ",,terminal_output +2791,8540912,"TERMINAL",0,0,"5\t ",,terminal_output +2792,8541938,"TERMINAL",0,0,"6\t ",,terminal_output +2793,8543064,"TERMINAL",0,0,"7\t ",,terminal_output +2794,8544010,"TERMINAL",0,0,"8\t ",,terminal_output +2795,8545046,"TERMINAL",0,0,"9\t ",,terminal_output +2796,8546131,"TERMINAL",0,0,"40\t ",,terminal_output +2797,8547157,"TERMINAL",0,0,"1\t ",,terminal_output +2798,8548175,"TERMINAL",0,0,"2\t ",,terminal_output +2799,8549236,"TERMINAL",0,0,"3\t ",,terminal_output +2800,8550250,"TERMINAL",0,0,"4\t ",,terminal_output +2801,8551502,"TERMINAL",0,0,"5\t ",,terminal_output +2802,8552335,"TERMINAL",0,0,"6\t ",,terminal_output +2803,8553370,"TERMINAL",0,0,"8\t ",,terminal_output +2804,8554408,"TERMINAL",0,0,"9\t ",,terminal_output +2805,8555455,"TERMINAL",0,0,"50\t ",,terminal_output +2806,8556584,"TERMINAL",0,0,"1\t ",,terminal_output +2807,8557606,"TERMINAL",0,0,"2\t ",,terminal_output +2808,8558628,"TERMINAL",0,0,"3\t ",,terminal_output +2809,8559653,"TERMINAL",0,0,"4\t ",,terminal_output +2810,8560662,"TERMINAL",0,0,"5\t ",,terminal_output +2811,8561694,"TERMINAL",0,0,"6\t ",,terminal_output +2812,8562828,"TERMINAL",0,0,"7\t ",,terminal_output +2813,8563849,"TERMINAL",0,0,"8\t ",,terminal_output +2814,8564809,"TERMINAL",0,0,"9\t ",,terminal_output +2815,8565848,"TERMINAL",0,0,"3:00\t ",,terminal_output +2816,8566922,"TERMINAL",0,0,"1\t ",,terminal_output +2817,8567948,"TERMINAL",0,0,"2\t ",,terminal_output +2818,8568971,"TERMINAL",0,0,"3\t ",,terminal_output +2819,8570102,"TERMINAL",0,0,"4\t ",,terminal_output +2820,8571122,"TERMINAL",0,0,"5\t ",,terminal_output +2821,8572146,"TERMINAL",0,0,"6\t ",,terminal_output +2822,8573132,"TERMINAL",0,0,"7\t ",,terminal_output +2823,8574204,"TERMINAL",0,0,"8\t ",,terminal_output +2824,8575327,"TERMINAL",0,0,"9\t ",,terminal_output +2825,8576256,"TERMINAL",0,0,"10\t ",,terminal_output +2826,8577288,"TERMINAL",0,0,"1\t ",,terminal_output +2827,8578332,"TERMINAL",0,0,"2\t ",,terminal_output +2828,8579373,"TERMINAL",0,0,"4\t ",,terminal_output +2829,8580407,"TERMINAL",0,0,"5\t ",,terminal_output +2830,8581444,"TERMINAL",0,0,"6\t ",,terminal_output +2831,8582600,"TERMINAL",0,0,"7\t ",,terminal_output +2832,8583521,"TERMINAL",0,0,"8\t ",,terminal_output +2833,8584634,"TERMINAL",0,0,"9\t ",,terminal_output +2834,8585606,"TERMINAL",0,0,"20\t ",,terminal_output +2835,8586649,"TERMINAL",0,0,"1\t ",,terminal_output +2836,8587710,"TERMINAL",0,0,"2 4",,terminal_output +2837,8588742,"TERMINAL",0,0,"3\t ",,terminal_output +2838,8589799,"TERMINAL",0,0,"4\t ",,terminal_output +2839,8590890,"TERMINAL",0,0,"5\t ",,terminal_output +2840,8591847,"TERMINAL",0,0,"6\t ",,terminal_output +2841,8592938,"TERMINAL",0,0,"7\t ",,terminal_output +2842,8593955,"TERMINAL",0,0,"8\t ",,terminal_output +2843,8595081,"TERMINAL",0,0,"9\t ",,terminal_output +2844,8596108,"TERMINAL",0,0,"30\t ",,terminal_output +2845,8597130,"TERMINAL",0,0,"1\t ",,terminal_output +2846,8598154,"TERMINAL",0,0,"2\t ",,terminal_output +2847,8599177,"TERMINAL",0,0,"3\t ",,terminal_output +2848,8600159,"TERMINAL",0,0,"4\t ",,terminal_output +2849,8601239,"TERMINAL",0,0,"5\t ",,terminal_output +2850,8602352,"TERMINAL",0,0,"6\t ",,terminal_output +2851,8603304,"TERMINAL",0,0,"7\t ",,terminal_output +2852,8604362,"TERMINAL",0,0,"8\t ",,terminal_output +2853,8605377,"TERMINAL",0,0,"40\t ",,terminal_output +2854,8606422,"TERMINAL",0,0,"1\t ",,terminal_output +2855,8607472,"TERMINAL",0,0,"2\t ",,terminal_output +2856,8608502,"TERMINAL",0,0,"3\t ",,terminal_output +2857,8609624,"TERMINAL",0,0,"4\t ",,terminal_output +2858,8610648,"TERMINAL",0,0,"5\t ",,terminal_output +2859,8611627,"TERMINAL",0,0,"6\t ",,terminal_output +2860,8612658,"TERMINAL",0,0,"7\t ",,terminal_output +2861,8613705,"TERMINAL",0,0,"8\t ",,terminal_output +2862,8614742,"TERMINAL",0,0,"9\t ",,terminal_output +2863,8615785,"TERMINAL",0,0,"50\t ",,terminal_output +2864,8616830,"TERMINAL",0,0,"1\t ",,terminal_output +2865,8617864,"TERMINAL",0,0,"2\t ",,terminal_output +2866,8618906,"TERMINAL",0,0,"3\t ",,terminal_output +2867,8619965,"TERMINAL",0,0,"4\t ",,terminal_output +2868,8620991,"TERMINAL",0,0,"5\t ",,terminal_output +2869,8622117,"TERMINAL",0,0,"6\t ",,terminal_output +2870,8623137,"TERMINAL",0,0,"7\t ",,terminal_output +2871,8624165,"TERMINAL",0,0,"8\t ",,terminal_output +2872,8625289,"TERMINAL",0,0,"9\t ",,terminal_output +2873,8626231,"TERMINAL",0,0,"4:00\t ",,terminal_output +2874,8627235,"TERMINAL",0,0,"1\t ",,terminal_output +2875,8628278,"TERMINAL",0,0,"2\t ",,terminal_output +2876,8629365,"TERMINAL",0,0,"3\t ",,terminal_output +2877,8630357,"TERMINAL",0,0,"5\t ",,terminal_output +2878,8631394,"TERMINAL",0,0,"6\t ",,terminal_output +2879,8632438,"TERMINAL",0,0,"7\t ",,terminal_output +2880,8633476,"TERMINAL",0,0,"8\t ",,terminal_output +2881,8634609,"TERMINAL",0,0,"9\t ",,terminal_output +2882,8635632,"TERMINAL",0,0,"10\t ",,terminal_output +2883,8636660,"TERMINAL",0,0,"1\t ",,terminal_output +2884,8637635,"TERMINAL",0,0,"2\t ",,terminal_output +2885,8638677,"TERMINAL",0,0,"3\t ",,terminal_output +2886,8639726,"TERMINAL",0,0,"4\t ",,terminal_output +2887,8640855,"TERMINAL",0,0,"5\t ",,terminal_output +2888,8641876,"TERMINAL",0,0,"6\t ",,terminal_output +2889,8642904,"TERMINAL",0,0,"7\t ",,terminal_output +2890,8643884,"TERMINAL",0,0,"8\t ",,terminal_output +2891,8644925,"TERMINAL",0,0,"9\t ",,terminal_output +2892,8646000,"TERMINAL",0,0,"20\t ",,terminal_output +2893,8647104,"TERMINAL",0,0,"1\t ",,terminal_output +2894,8648048,"TERMINAL",0,0,"2\t ",,terminal_output +2895,8649094,"TERMINAL",0,0,"3\t ",,terminal_output +2896,8650182,"TERMINAL",0,0,"4\t ",,terminal_output +2897,8651195,"TERMINAL",0,0,"5\t ",,terminal_output +2898,8652223,"TERMINAL",0,0,"6\t ",,terminal_output +2899,8653348,"TERMINAL",0,0,"7\t ",,terminal_output +2900,8654299,"TERMINAL",0,0,"8\t ",,terminal_output +2901,8655337,"TERMINAL",0,0,"9\t ",,terminal_output +2902,8656388,"TERMINAL",0,0,"31\t ",,terminal_output +2903,8657421,"TERMINAL",0,0,"2\t ",,terminal_output +2904,8658461,"TERMINAL",0,0,"3\t ",,terminal_output +2905,8659498,"TERMINAL",0,0,"4\t ",,terminal_output +2906,8660547,"TERMINAL",0,0,"5\t ",,terminal_output +2907,8661642,"TERMINAL",0,0,"6\t ",,terminal_output +2908,8662621,"TERMINAL",0,0,"7\t ",,terminal_output +2909,8663662,"TERMINAL",0,0,"8\t ",,terminal_output +2910,8664715,"TERMINAL",0,0,"9\t ",,terminal_output +2911,8665844,"TERMINAL",0,0,"40\t ",,terminal_output +2912,8666867,"TERMINAL",0,0,"1\t ",,terminal_output +2913,8667894,"TERMINAL",0,0,"2\t ",,terminal_output +2914,8668912,"TERMINAL",0,0,"3\t ",,terminal_output +2915,8669887,"TERMINAL",0,0,"4\t ",,terminal_output +2916,8670980,"TERMINAL",0,0,"5\t ",,terminal_output +2917,8671989,"TERMINAL",0,0,"6\t ",,terminal_output +2918,8673111,"TERMINAL",0,0,"7\t ",,terminal_output +2919,8674137,"TERMINAL",0,0,"8\t ",,terminal_output +2920,8675089,"TERMINAL",0,0,"9\t ",,terminal_output +2921,8676183,"TERMINAL",0,0,"50\t ",,terminal_output +2922,8677207,"TERMINAL",0,0,"1\t ",,terminal_output +2923,8678233,"TERMINAL",0,0,"2\t ",,terminal_output +2924,8679291,"TERMINAL",0,0,"3\t ",,terminal_output +2925,8680381,"TERMINAL",0,0,"4\t ",,terminal_output +2926,8681329,"TERMINAL",0,0,"5\t ",,terminal_output +2927,8682361,"TERMINAL",0,0,"7\t ",,terminal_output +2928,8683401,"TERMINAL",0,0,"8\t ",,terminal_output +2929,8684445,"TERMINAL",0,0,"9\t ",,terminal_output +2930,8685504,"TERMINAL",0,0,"5:00\t ",,terminal_output +2931,8686526,"TERMINAL",0,0,"1\t ",,terminal_output +2932,8687568,"TERMINAL",0,0,"2\t ",,terminal_output +2933,8688606,"TERMINAL",0,0,"3\t ",,terminal_output +2934,8689656,"TERMINAL",0,0,"4\t ",,terminal_output +2935,8690725,"TERMINAL",0,0,"5\t ",,terminal_output +2936,8691723,"TERMINAL",0,0,"6\t ",,terminal_output +2937,8692763,"TERMINAL",0,0,"7\t ",,terminal_output +2938,8693899,"TERMINAL",0,0,"8\t ",,terminal_output +2939,8694839,"TERMINAL",0,0,"9\t ",,terminal_output +2940,8695876,"TERMINAL",0,0,"10\t ",,terminal_output +2941,8697037,"TERMINAL",0,0,"1\t ",,terminal_output +2942,8697954,"TERMINAL",0,0,"2\t ",,terminal_output +2943,8698994,"TERMINAL",0,0,"3\t ",,terminal_output +2944,8700073,"TERMINAL",0,0,"4\t ",,terminal_output +2945,8701170,"TERMINAL",0,0,"5\t ",,terminal_output +2946,8702110,"TERMINAL",0,0,"6\t ",,terminal_output +2947,8703221,"TERMINAL",0,0,"7\t ",,terminal_output +2948,8704244,"TERMINAL",0,0,"8\t ",,terminal_output +2949,8705268,"TERMINAL",0,0,"9\t ",,terminal_output +2950,8706303,"TERMINAL",0,0,"20\t ",,terminal_output +2951,8707313,"TERMINAL",0,0,"1\t ",,terminal_output +2952,8708343,"TERMINAL",0,0,"2\t ",,terminal_output +2953,8709385,"TERMINAL",0,0,"4\t ",,terminal_output +2954,8710424,"TERMINAL",0,0,"5\t ",,terminal_output +2955,8711463,"TERMINAL",0,0,"6\t ",,terminal_output +2956,8712538,"TERMINAL",0,0,"7\t ",,terminal_output +2957,8713562,"TERMINAL",0,0,"8\t ",,terminal_output +2958,8714577,"TERMINAL",0,0,"9\t ",,terminal_output +2959,8715616,"TERMINAL",0,0,"30\t ",,terminal_output +2960,8716655,"TERMINAL",0,0,"1\t ",,terminal_output +2961,8717760,"TERMINAL",0,0,"2\t ",,terminal_output +2962,8718739,"TERMINAL",0,0,"3\t ",,terminal_output +2963,8719808,"TERMINAL",0,0,"4\t ",,terminal_output +2964,8720834,"TERMINAL",0,0,"5\t ",,terminal_output +2965,8721956,"TERMINAL",0,0,"6\t ",,terminal_output +2966,8722981,"TERMINAL",0,0,"7\t ",,terminal_output +2967,8723935,"TERMINAL",0,0,"8\t ",,terminal_output +2968,8725031,"TERMINAL",0,0,"9\t ",,terminal_output +2969,8726017,"TERMINAL",0,0,"40\t ",,terminal_output +2970,8727051,"TERMINAL",0,0,"1\t ",,terminal_output +2971,8728108,"TERMINAL",0,0,"2\t ",,terminal_output +2972,8729139,"TERMINAL",0,0,"3\t ",,terminal_output +2973,8730255,"TERMINAL",0,0,"4\t ",,terminal_output +2974,8731221,"TERMINAL",0,0,"5\t ",,terminal_output +2975,8732265,"TERMINAL",0,0,"6\t ",,terminal_output +2976,8733338,"TERMINAL",0,0,"7\t ",,terminal_output +2977,8734339,"TERMINAL",0,0,"8\t ",,terminal_output +2978,8735377,"TERMINAL",0,0,"50\t ",,terminal_output +2979,8736415,"TERMINAL",0,0,"1\t ",,terminal_output +2980,8737453,"TERMINAL",0,0,"2\t ",,terminal_output +2981,8738494,"TERMINAL",0,0,"3\t ",,terminal_output +2982,8739537,"TERMINAL",0,0,"4\t ",,terminal_output +2983,8740595,"TERMINAL",0,0,"5\t ",,terminal_output +2984,8741614,"TERMINAL",0,0,"6\t ",,terminal_output +2985,8742651,"TERMINAL",0,0,"7\t ",,terminal_output +2986,8743698,"TERMINAL",0,0,"8\t ",,terminal_output +2987,8744795,"TERMINAL",0,0,"9\t ",,terminal_output +2988,8745774,"TERMINAL",0,0,"6:00\t ",,terminal_output +2989,8747561,"TERMINAL",0,0,"11",,terminal_output +2990,8748584,"TERMINAL",0,0,"3\t ",,terminal_output +2991,8749707,"TERMINAL",0,0,"4\t ",,terminal_output +2992,8750660,"TERMINAL",0,0,"5\t ",,terminal_output +2993,8751762,"TERMINAL",0,0,"6\t ",,terminal_output +2994,8752781,"TERMINAL",0,0,"7\t ",,terminal_output +2995,8753784,"TERMINAL",0,0,"87",,terminal_output +2996,8754822,"TERMINAL",0,0,"9\t ",,terminal_output +2997,8755954,"TERMINAL",0,0,"10\t ",,terminal_output +2998,8756974,"TERMINAL",0,0,"1\t ",,terminal_output +2999,8758004,"TERMINAL",0,0,"2\t ",,terminal_output +3000,8759028,"TERMINAL",0,0,"3\t ",,terminal_output +3001,8760049,"TERMINAL",0,0,"46",,terminal_output +3002,8761075,"TERMINAL",0,0,"5\t ",,terminal_output +3003,8762201,"TERMINAL",0,0,"6\t ",,terminal_output +3004,8763227,"TERMINAL",0,0,"7\t ",,terminal_output +3005,8764254,"TERMINAL",0,0,"8\t ",,terminal_output +3006,8765271,"TERMINAL",0,0,"9\t ",,terminal_output +3007,8766310,"TERMINAL",0,0,"200",,terminal_output +3008,8767315,"TERMINAL",0,0,"1\t ",,terminal_output +3009,8768349,"TERMINAL",0,0,"3\t ",,terminal_output +3010,8769387,"TERMINAL",0,0,"4\t ",,terminal_output +3011,8770431,"TERMINAL",0,0,"5\t ",,terminal_output +3012,8771469,"TERMINAL",0,0,"6\t ",,terminal_output +3013,8772554,"TERMINAL",0,0,"7\t ",,terminal_output +3014,8773558,"TERMINAL",0,0,"8\t ",,terminal_output +3015,8774601,"TERMINAL",0,0,"9\t ",,terminal_output +3016,8775719,"TERMINAL",0,0,"30\t ",,terminal_output +3017,8776682,"TERMINAL",0,0,"1\t ",,terminal_output +3018,8777768,"TERMINAL",0,0,"2\t ",,terminal_output +3019,8778791,"TERMINAL",0,0,"3\t ",,terminal_output +3020,8779816,"TERMINAL",0,0,"4\t ",,terminal_output +3021,8780941,"TERMINAL",0,0,"5\t ",,terminal_output +3022,8781968,"TERMINAL",0,0,"6\t ",,terminal_output +3023,8782989,"TERMINAL",0,0,"7\t ",,terminal_output +3024,8783969,"TERMINAL",0,0,"8\t ",,terminal_output +3025,8785038,"TERMINAL",0,0,"9\t ",,terminal_output +3026,8786060,"TERMINAL",0,0,"40\t ",,terminal_output +3027,8787093,"TERMINAL",0,0,"1\t ",,terminal_output +3028,8788211,"TERMINAL",0,0,"2\t ",,terminal_output +3029,8789174,"TERMINAL",0,0,"3\t ",,terminal_output +3030,8790216,"TERMINAL",0,0,"4\t ",,terminal_output +3031,8791284,"TERMINAL",0,0,"5\t ",,terminal_output +3032,8792349,"TERMINAL",0,0,"6\t ",,terminal_output +3033,8793433,"TERMINAL",0,0,"7\t ",,terminal_output +3034,8794384,"TERMINAL",0,0,"9\t ",,terminal_output +3035,8795415,"TERMINAL",0,0,"50\t ",,terminal_output +3036,8796454,"TERMINAL",0,0,"1\t ",,terminal_output +3037,8797498,"TERMINAL",0,0,"2\t ",,terminal_output +3038,8798551,"TERMINAL",0,0,"3\t ",,terminal_output +3039,8799580,"TERMINAL",0,0,"4\t ",,terminal_output +3040,8800705,"TERMINAL",0,0,"5\t ",,terminal_output +3041,8801664,"TERMINAL",0,0,"6\t ",,terminal_output +3042,8802751,"TERMINAL",0,0,"7\t ",,terminal_output +3043,8803775,"TERMINAL",0,0,"8\t ",,terminal_output +3044,8804841,"TERMINAL",0,0,"9\t ",,terminal_output +3045,8805927,"TERMINAL",0,0,"7:00\t ",,terminal_output +3046,8806864,"TERMINAL",0,0,"1\t ",,terminal_output +3047,8807902,"TERMINAL",0,0,"2\t ",,terminal_output +3048,8809000,"TERMINAL",0,0,"3\t ",,terminal_output +3049,8810023,"TERMINAL",0,0,"4\t ",,terminal_output +3050,8811026,"TERMINAL",0,0,"5\t ",,terminal_output +3051,8812171,"TERMINAL",0,0,"6\t ",,terminal_output +3052,8813201,"TERMINAL",0,0,"7\t ",,terminal_output +3053,8814220,"TERMINAL",0,0,"8\t ",,terminal_output +3054,8815195,"TERMINAL",0,0,"9\t ",,terminal_output +3055,8816233,"TERMINAL",0,0,"10\t ",,terminal_output +3056,8817276,"TERMINAL",0,0,"1\t ",,terminal_output +3057,8818437,"TERMINAL",0,0,"2\t ",,terminal_output +3058,8819358,"TERMINAL",0,0,"4\t ",,terminal_output +3059,8820399,"TERMINAL",0,0,"5\t ",,terminal_output +3060,8821447,"TERMINAL",0,0,"6\t ",,terminal_output +3061,8822482,"TERMINAL",0,0,"7\t ",,terminal_output +3062,8823521,"TERMINAL",0,0,"8\t ",,terminal_output +3063,8824569,"TERMINAL",0,0,"9\t ",,terminal_output +3064,8825691,"TERMINAL",0,0,"20\t ",,terminal_output +3065,8826714,"TERMINAL",0,0,"1\t ",,terminal_output +3066,8827740,"TERMINAL",0,0,"2\t ",,terminal_output +3067,8828766,"TERMINAL",0,0,"3\t ",,terminal_output +3068,8829784,"TERMINAL",0,0,"4\t ",,terminal_output +3069,8830829,"TERMINAL",0,0,"5\t ",,terminal_output +3070,8831934,"TERMINAL",0,0,"6\t ",,terminal_output +3071,8832961,"TERMINAL",0,0,"7\t ",,terminal_output +3072,8833993,"TERMINAL",0,0,"8\t ",,terminal_output +3073,8834973,"TERMINAL",0,0,"9\t ",,terminal_output +3074,8836034,"TERMINAL",0,0,"30\t ",,terminal_output +3075,8837061,"TERMINAL",0,0,"1\t ",,terminal_output +3076,8838096,"TERMINAL",0,0,"2\t ",,terminal_output +3077,8839206,"TERMINAL",0,0,"3\t ",,terminal_output +3078,8840229,"TERMINAL",0,0,"4\t ",,terminal_output +3079,8841254,"TERMINAL",0,0,"5\t ",,terminal_output +3080,8842279,"TERMINAL",0,0,"6\t ",,terminal_output +3081,8843298,"TERMINAL",0,0,"7\t ",,terminal_output +3082,8844368,"TERMINAL",0,0,"8\t ",,terminal_output +3083,8845373,"TERMINAL",0,0,"40\t ",,terminal_output +3084,8846407,"TERMINAL",0,0,"1\t ",,terminal_output +3085,8847449,"TERMINAL",0,0,"2\t ",,terminal_output +3086,8848476,"TERMINAL",0,0,"3\t ",,terminal_output +3087,8849516,"TERMINAL",0,0,"4\t ",,terminal_output +3088,8850554,"TERMINAL",0,0,"5\t ",,terminal_output +3089,8851591,"TERMINAL",0,0,"6\t ",,terminal_output +3090,8852723,"TERMINAL",0,0,"7\t ",,terminal_output +3091,8853674,"TERMINAL",0,0,"8\t ",,terminal_output +3092,8854776,"TERMINAL",0,0,"9\t ",,terminal_output +3093,8855803,"TERMINAL",0,0,"50\t ",,terminal_output +3094,8856818,"TERMINAL",0,0,"1\t ",,terminal_output +3095,8857843,"TERMINAL",0,0,"2\t ",,terminal_output +3096,8858968,"TERMINAL",0,0,"3\t ",,terminal_output +3097,8859925,"TERMINAL",0,0,"4\t ",,terminal_output +3098,8861018,"TERMINAL",0,0,"5\t ",,terminal_output +3099,8862047,"TERMINAL",0,0,"6\t ",,terminal_output +3100,8863044,"TERMINAL",0,0,"7\t ",,terminal_output +3101,8864089,"TERMINAL",0,0,"8\t ",,terminal_output +3102,8865218,"TERMINAL",0,0,"9\t ",,terminal_output +3103,8866239,"TERMINAL",0,0,"8:00\t ",,terminal_output +3104,8867266,"TERMINAL",0,0,"1\t ",,terminal_output +3105,8868300,"TERMINAL",0,0,"2\t ",,terminal_output +3106,8869278,"TERMINAL",0,0,"3\t ",,terminal_output +3107,8870327,"TERMINAL",0,0,"4\t ",,terminal_output +3108,8871366,"TERMINAL",0,0,"6\t ",,terminal_output +3109,8872405,"TERMINAL",0,0,"7\t ",,terminal_output +3110,8873451,"TERMINAL",0,0,"8\t ",,terminal_output +3111,8874495,"TERMINAL",0,0,"9\t ",,terminal_output +3112,8875557,"TERMINAL",0,0,"10\t ",,terminal_output +3113,8876685,"TERMINAL",0,0,"1\t ",,terminal_output +3114,8877632,"TERMINAL",0,0,"2\t ",,terminal_output +3115,8878658,"TERMINAL",0,0,"3\t ",,terminal_output +3116,8879762,"TERMINAL",0,0,"4\t ",,terminal_output +3117,8880785,"TERMINAL",0,0,"5\t ",,terminal_output +3118,8881809,"TERMINAL",0,0,"6\t ",,terminal_output +3119,8882929,"TERMINAL",0,0,"7\t ",,terminal_output +3120,8883862,"TERMINAL",0,0,"8\t ",,terminal_output +3121,8884903,"TERMINAL",0,0,"9\t ",,terminal_output +3122,8886001,"TERMINAL",0,0,"20\t ",,terminal_output +3123,8887027,"TERMINAL",0,0,"1\t ",,terminal_output +3124,8888048,"TERMINAL",0,0,"2\t ",,terminal_output +3125,8889076,"TERMINAL",0,0,"3\t ",,terminal_output +3126,8890098,"TERMINAL",0,0,"4\t ",,terminal_output +3127,8891226,"TERMINAL",0,0,"5\t ",,terminal_output +3128,8892167,"TERMINAL",0,0,"6\t ",,terminal_output +3129,8893212,"TERMINAL",0,0,"7\t ",,terminal_output +3130,8894300,"TERMINAL",0,0,"8\t ",,terminal_output +3131,8895298,"TERMINAL",0,0,"9\t ",,terminal_output +3132,8896367,"TERMINAL",0,0,"30\t ",,terminal_output +3133,8897374,"TERMINAL",0,0,"2\t ",,terminal_output +3134,8898407,"TERMINAL",0,0,"3\t ",,terminal_output +3135,8899448,"TERMINAL",0,0,"4\t ",,terminal_output +3136,8900483,"TERMINAL",0,0,"5\t ",,terminal_output +3137,8901524,"TERMINAL",0,0,"6\t ",,terminal_output +3138,8902590,"TERMINAL",0,0,"7\t ",,terminal_output +3139,8903605,"TERMINAL",0,0,"8\t ",,terminal_output +3140,8904747,"TERMINAL",0,0,"9\t ",,terminal_output +3141,8905763,"TERMINAL",0,0,"40\t ",,terminal_output +3142,8906758,"TERMINAL",0,0,"1\t ",,terminal_output +3143,8907816,"TERMINAL",0,0,"2\t ",,terminal_output +3144,8908134,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +3145,8908839,"TERMINAL",0,0,"3\t ",,terminal_output +3146,8909846,"TERMINAL",0,0,"4\t ",,terminal_output +3147,8910518,"slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",0,0,"",shellscript,tab +3148,8910887,"TERMINAL",0,0,"5\t ",,terminal_output +3149,8910949,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +3150,8912011,"TERMINAL",0,0,"6\t ",,terminal_output +3151,8912736,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1085,0,"",shellscript,selection_mouse +3152,8912965,"TERMINAL",0,0,"7\t ",,terminal_output +3153,8913914,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1084,1,"",shellscript,content +3154,8914002,"TERMINAL",0,0,"8\t ",,terminal_output +3155,8914134,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1084,0,"5",shellscript,content +3156,8914135,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1085,0,"",shellscript,selection_keyboard +3157,8915083,"TERMINAL",0,0,"9\t ",,terminal_output +3158,8916086,"TERMINAL",0,0,"50\t ",,terminal_output +3159,8917172,"TERMINAL",0,0,"1\t ",,terminal_output +3160,8917172,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --seed=69 \\r\n --num_steps=5000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n ",,terminal_output +3161,8917286,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3162,8917473,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3163,8918173,"TERMINAL",0,0,"2\t ",,terminal_output +3164,8919284,"TERMINAL",0,0,"3\t ",,terminal_output +3165,8920322,"TERMINAL",0,0,"4\t ",,terminal_output +3166,8921229,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3167,8921336,"TERMINAL",0,0,"5\t ",,terminal_output +3168,8922368,"TERMINAL",0,0,"6\t ",,terminal_output +3169,8923518,"TERMINAL",0,0,"8\t ",,terminal_output +3170,8923539,"TERMINAL",0,0,"2025-07-24 16:28:58.209485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3171,8924425,"TERMINAL",0,0,"9\t ",,terminal_output +3172,8925469,"TERMINAL",0,0,"9:00\t ",,terminal_output +3173,8926509,"TERMINAL",0,0,"1\t ",,terminal_output +3174,8927582,"TERMINAL",0,0,"2\t ",,terminal_output +3175,8928603,"TERMINAL",0,0,"3\t ",,terminal_output +3176,8929634,"TERMINAL",0,0,"4\t ",,terminal_output +3177,8930750,"TERMINAL",0,0,"5\t ",,terminal_output +3178,8931774,"TERMINAL",0,0,"6\t ",,terminal_output +3179,8932622,"TERMINAL",0,0,"2025-07-24 16:29:07.291841: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3180,8932776,"TERMINAL",0,0,"7\t ",,terminal_output +3181,8933787,"TERMINAL",0,0,"8\t ",,terminal_output +3182,8934849,"TERMINAL",0,0,"9\t ",,terminal_output +3183,8935593,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((patches, action_pad), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, -1] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +3184,8935876,"TERMINAL",0,0,"10\t ",,terminal_output +3185,8936972,"TERMINAL",0,0,"1\t ",,terminal_output +3186,8937952,"TERMINAL",0,0,"2\t ",,terminal_output +3187,8938994,"TERMINAL",0,0,"3\t ",,terminal_output +3188,8940077,"TERMINAL",0,0,"4\t ",,terminal_output +3189,8941098,"TERMINAL",0,0,"5\t ",,terminal_output +3190,8942119,"TERMINAL",0,0,"6\t ",,terminal_output +3191,8943159,"TERMINAL",0,0,"7\t ",,terminal_output +3192,8944274,"TERMINAL",0,0,"8\t ",,terminal_output +3193,8945241,"TERMINAL",0,0,"9\t ",,terminal_output +3194,8946317,"TERMINAL",0,0,"20\t ",,terminal_output +3195,8947385,"TERMINAL",0,0,"1\t ",,terminal_output +3196,8948518,"train_dynamics.py",0,0,"",python,tab +3197,8948572,"TERMINAL",0,0,"3\t ",,terminal_output +3198,8949409,"TERMINAL",0,0,"4\t ",,terminal_output +3199,8950364,"TERMINAL",0,0,"2025-07-24 16:29:25.043827: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3200,8950490,"TERMINAL",0,0,"5\t ",,terminal_output +3201,8951485,"TERMINAL",0,0,"6\t ",,terminal_output +3202,8952520,"TERMINAL",0,0,"7\t ",,terminal_output +3203,8952588,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3204,8953289,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_162927-e16cv2fa\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-overfit-3373280\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/e16cv2fa\r\n",,terminal_output +3205,8953565,"TERMINAL",0,0,"86",,terminal_output +3206,8954638,"TERMINAL",0,0,"9\t ",,terminal_output +3207,8955429,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/001000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 2000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/002000/metrics/metrics not found.\r\n",,terminal_output +3208,8955642,"TERMINAL",0,0,"30\t ",,terminal_output +3209,8956760,"TERMINAL",0,0,"1\t ",,terminal_output +3210,8957743,"TERMINAL",0,0,"2\t ",,terminal_output +3211,8958767,"TERMINAL",0,0,"3\t ",,terminal_output +3212,8959874,"TERMINAL",0,0,"4\t ",,terminal_output +3213,8960864,"TERMINAL",0,0,"5\t ",,terminal_output +3214,8961986,"TERMINAL",0,0,"6\t ",,terminal_output +3215,8962097,"models/lam.py",0,0,"",python,tab +3216,8962935,"TERMINAL",0,0,"7\t ",,terminal_output +3217,8963967,"TERMINAL",0,0,"8\t ",,terminal_output +3218,8965059,"TERMINAL",0,0,"9\t ",,terminal_output +3219,8966084,"TERMINAL",0,0,"40\t ",,terminal_output +3220,8967075,"TERMINAL",0,0,"15",,terminal_output +3221,8968119,"TERMINAL",0,0,"2\t ",,terminal_output +3222,8969158,"TERMINAL",0,0,"3\t ",,terminal_output +3223,8970277,"TERMINAL",0,0,"4\t ",,terminal_output +3224,8971252,"TERMINAL",0,0,"5\t ",,terminal_output +3225,8972277,"TERMINAL",0,0,"6\t ",,terminal_output +3226,8973352,"TERMINAL",0,0,"7\t ",,terminal_output +3227,8974399,"TERMINAL",0,0,"9\t ",,terminal_output +3228,8975400,"TERMINAL",0,0,"50\t ",,terminal_output +3229,8976432,"TERMINAL",0,0,"1\t ",,terminal_output +3230,8977473,"TERMINAL",0,0,"2\t ",,terminal_output +3231,8978580,"TERMINAL",0,0,"3\t ",,terminal_output +3232,8979556,"TERMINAL",0,0,"4\t ",,terminal_output +3233,8980620,"TERMINAL",0,0,"5\t ",,terminal_output +3234,8981631,"TERMINAL",0,0,"6\t ",,terminal_output +3235,8982672,"TERMINAL",0,0,"7\t ",,terminal_output +3236,8983799,"TERMINAL",0,0,"8\t ",,terminal_output +3237,8984823,"TERMINAL",0,0,"9\t ",,terminal_output +3238,8985800,"TERMINAL",0,0,"30:00\t ",,terminal_output +3239,8986866,"TERMINAL",0,0,"1\t ",,terminal_output +3240,8987489,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +3241,8987870,"TERMINAL",0,0,"2\t ",,terminal_output +3242,8988915,"TERMINAL",0,0,"3\t ",,terminal_output +3243,8989949,"TERMINAL",0,0,"4\t ",,terminal_output +3244,8991066,"TERMINAL",0,0,"5\t ",,terminal_output +3245,8992090,"TERMINAL",0,0,"6\t ",,terminal_output +3246,8993117,"TERMINAL",0,0,"7\t ",,terminal_output +3247,8994103,"TERMINAL",0,0,"8\t ",,terminal_output +3248,8995139,"TERMINAL",0,0,"9\t ",,terminal_output +3249,8995201,"TERMINAL",0,0,"2025-07-24 16:30:09.866091: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:30:09.866519: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:30:09.868404: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:30:09.868419: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:30:09.869168: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3250,8996291,"TERMINAL",0,0,"10\t ",,terminal_output +3251,8997239,"TERMINAL",0,0,"1\t ",,terminal_output +3252,8998318,"TERMINAL",0,0,"2\t ",,terminal_output +3253,8999388,"TERMINAL",0,0,"3\t ",,terminal_output +3254,9000421,"TERMINAL",0,0,"4\t ",,terminal_output +3255,9001421,"TERMINAL",0,0,"6\t ",,terminal_output +3256,9002414,"TERMINAL",0,0,"7\t ",,terminal_output +3257,9003461,"TERMINAL",0,0,"8\t ",,terminal_output +3258,9004489,"TERMINAL",0,0,"9\t ",,terminal_output +3259,9005533,"TERMINAL",0,0,"20\t ",,terminal_output +3260,9006577,"TERMINAL",0,0,"1\t ",,terminal_output +3261,9007617,"TERMINAL",0,0,"2\t ",,terminal_output +3262,9008682,"TERMINAL",0,0,"3\t ",,terminal_output +3263,9009809,"TERMINAL",0,0,"4\t ",,terminal_output +3264,9010756,"TERMINAL",0,0,"5\t ",,terminal_output +3265,9011853,"TERMINAL",0,0,"6\t ",,terminal_output +3266,9012876,"TERMINAL",0,0,"7\t ",,terminal_output +3267,9013903,"TERMINAL",0,0,"8\t ",,terminal_output +3268,9014913,"TERMINAL",0,0,"9\t ",,terminal_output +3269,9016058,"TERMINAL",0,0,"30\t ",,terminal_output +3270,9017077,"TERMINAL",0,0,"1\t ",,terminal_output +3271,9018083,"TERMINAL",0,0,"2\t ",,terminal_output +3272,9019086,"TERMINAL",0,0,"3\t ",,terminal_output +3273,9020151,"TERMINAL",0,0,"4\t ",,terminal_output +3274,9021162,"TERMINAL",0,0,"5\t ",,terminal_output +3275,9022211,"TERMINAL",0,0,"6\t ",,terminal_output +3276,9023246,"TERMINAL",0,0,"7\t ",,terminal_output +3277,9024282,"TERMINAL",0,0,"8\t ",,terminal_output +3278,9025370,"TERMINAL",0,0,"9\t ",,terminal_output +3279,9026370,"TERMINAL",0,0,"41\t ",,terminal_output +3280,9027424,"TERMINAL",0,0,"2\t ",,terminal_output +3281,9028450,"TERMINAL",0,0,"3\t ",,terminal_output +3282,9029084,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 1603072, 'total': 52583120}\r\nStep 0, loss: 8.674175262451172\r\nStep 1, loss: 8.668670654296875\r\nStep 2, loss: 8.574604988098145\r\nStep 3, loss: 8.406447410583496\r\nStep 4, loss: 8.203638076782227\r\nStep 5, loss: 8.023332595825195\r\nStep 6, loss: 7.9559502601623535\r\nStep 7, loss: 7.877413749694824\r\nStep 8, loss: 7.824586868286133\r\nStep 9, loss: 7.750744819641113\r\nStep 10, loss: 7.717357158660889\r\nStep 11, loss: 7.670135498046875\r\nStep 12, loss: 7.609409332275391\r\nStep 13, loss: 7.5954365730285645\r\nStep 14, loss: 7.551942348480225\r\nStep 15, loss: 7.526211738586426\r\nStep 16, loss: 7.5134429931640625\r\nStep 17, loss: 7.464913368225098\r\nStep 18, loss: 7.451257705688477\r\nStep 19, loss: 7.427399158477783\r\nStep 20, loss: 7.402737617492676\r\nStep 21, loss: 7.386663436889648\r\nStep 22, loss: 7.363436222076416\r\nStep 23, loss: 7.353790760040283\r\nStep 24, loss: 7.326400279998779\r\nStep 25, loss: 7.30203104019165\r\nStep 26, loss: 7.278313159942627\r\nStep 27, loss: 7.2651567459106445\r\nStep 28, loss: 7.24293851852417\r\nStep 29, loss: 7.198199272155762\r\nStep 30, loss: 7.190091609954834\r\nStep 31, loss: 7.158174514770508\r\nStep 32, loss: 7.129848480224609\r\nStep 33, loss: 7.11536979675293\r\nStep 34, loss: 7.098193168640137\r\nStep 35, loss: 7.075440406799316\r\nStep 36, loss: 7.052376747131348\r\nStep 37, loss: 7.009790897369385\r\nStep 38, loss: 7.0155439376831055\r\nStep 39, loss: 6.986753463745117\r\nStep 40, loss: 6.964963436126709\r\nStep 41, loss: 6.954635143280029\r\nStep 42, loss: 6.922980308532715\r\nStep 43, loss: 6.895662307739258\r\nStep 44, loss: 6.902007102966309\r\nStep 45, loss: 6.879478454589844\r\nStep 46, loss: 6.846864700317383\r\nStep 47, loss: 6.839035511016846\r\nStep 48, loss: 6.83180046081543\r\nStep 49, loss: 6.798099517822266\r\nStep 50, loss: 6.789432048797607\r\nStep 51, loss: 6.766775131225586\r\nStep 52, loss: 6.748760223388672\r\nStep 53, loss: 6.728295803070068\r\nStep 54, loss: 6.7151875495910645\r\nStep 55, loss: 6.703790664672852\r\nStep 56, loss: 6.684081554412842\r\nStep 57, loss: 6.669717311859131\r\nStep 58, loss: 6.655113697052002\r\nStep 59, loss: 6.640040874481201\r\nStep 60, loss: 6.622447967529297\r\nStep 61, loss: 6.614474773406982\r\nStep 62, loss: 6.592879295349121\r\nStep 63, loss: 6.564940452575684\r\nStep 64, loss: 6.555487155914307\r\nStep 65, loss: 6.545904636383057\r\nStep 66, loss: 6.525449752807617\r\nStep 67, loss: 6.507303714752197\r\nStep 68, loss: 6.5033793449401855\r\nStep 69, loss: 6.485145568847656\r\nStep 70, loss: 6.4783759117126465\r\nStep 71, loss: 6.460371494293213\r\nStep 72, loss: 6.444807052612305\r\nStep 73, loss: 6.427517414093018\r\nStep 74, loss: 6.418549537658691\r\nStep 75, loss: 6.398351192474365\r\nStep 76, loss: 6.393786907196045\r\nStep 77, loss: 6.384186744689941\r\nStep 78, loss: 6.378942966461182\r\nStep 79, loss: 6.351716995239258\r\nStep 80, loss: 6.343936443328857\r\nStep 81, loss: 6.335512638092041\r\nStep 82, loss: 6.316738128662109\r\nStep 83, loss: 6.306550025939941\r\nStep 84, loss: 6.290176868438721\r\nStep 85, loss: 6.2806715965271\r\nStep 86, loss: 6.256862163543701\r\nStep 87, loss: 6.258372783660889\r\nStep 88, loss: 6.248790740966797\r\nStep 89, loss: 6.229119300842285\r\nStep 90, loss: 6.2132439613342285\r\nStep 91, loss: 6.208461761474609\r\nStep 92, loss: 6.195178985595703\r\nStep 93, loss: 6.188503742218018\r\nStep 94, loss: 6.172586917877197\r\nStep 95, loss: 6.166547775268555\r\nStep 96, loss: 6.154397010803223\r\nStep 97, loss: 6.1473588943481445\r\nStep 98, loss: 6.132950305938721\r\nStep 99, loss: 6.128696918487549\r\nStep 100, loss: 6.1219048500061035\r\nStep 101, loss: 6.106919288635254\r\nStep 102, loss: 6.106714725494385\r\nStep 103, loss: 6.09316873550415\r\nStep 104, loss: 6.074863910675049\r\nStep 105, loss: 6.072986125946045\r\nStep 106, loss: 6.0674357414245605\r\nStep 107, loss: 6.049893379211426\r\nStep 108, loss: 6.042277812957764\r\nStep 109, loss: 6.038697242736816\r\nStep 110, loss: 6.028180122375488\r\nStep 111, loss: 6.021430015563965\r\nStep 112, loss: 6.013563632965088\r\nStep 113, loss: 6.013474464416504\r\nStep 114, loss: 5.998447418212891\r\nStep 115, loss: 5.991537570953369\r\nStep 116, loss: 5.97799825668335\r\nStep 117, loss: 5.977831840515137\r\nStep 118, loss: 5.965835094451904\r\nStep 119, loss: 5.957108497619629\r\nStep 120, loss: 5.959916114807129\r\nStep 121, loss: 5.94785213470459\r\nStep 122, loss: 5.9382805824279785\r\nStep 123, loss: 5.929416656494141\r\nStep 124, loss: 5.923861980438232\r\nStep 125, loss: 5.919905662536621\r\nStep 126, loss: 5.905158519744873\r\nStep 127, loss: 5.902342319488525\r\nStep 128, loss: 5.893922805786133\r\nStep 129, loss: 5.892301559448242\r\nStep 130, loss: 5.887509346008301\r\nStep 131, loss: 5.877335071563721\r\nStep 132, loss: 5.879994869232178\r\nStep 133, loss: 5.8668622970581055\r\nStep 134, loss: 5.862127304077148\r\nStep 135, loss: 5.852532386779785\r\nStep 136, loss: 5.849078178405762\r\nStep 137, loss: 5.8382673263549805\r\nStep 138, loss: 5.833400249481201\r\nStep 139, loss: 5.829483509063721\r\nStep 140, loss: 5.836887359619141\r\nStep 141, loss: 5.811771392822266\r\nStep 142, loss: 5.806736946105957\r\nStep 143, loss: 5.798159599304199\r\nStep 144, loss: 5.805481433868408\r\nStep 145, loss: 5.821232318878174\r\nStep 146, loss: 5.790335655212402\r\nStep 147, loss: 5.782346725463867\r\nStep 148, loss: 5.772625923156738\r\nStep 149, loss: 5.770407676696777\r\nStep 150, loss: 5.763734817504883\r\nStep 151, loss: 5.7650322914123535\r\nStep 152, loss: 5.785005569458008\r\nStep 153, loss: 5.770752906799316\r\nStep 154, loss: 5.749398231506348\r\nStep 155, loss: 5.737312316894531\r\nStep 156, loss: 5.73727560043335\r\nStep 157, loss: 5.727344989776611\r\nStep 158, loss: 5.720314025878906\r\nStep 159, loss: 5.715433120727539\r\nStep 160, loss: 5.7099690437316895\r\nStep 161, loss: 5.708059787750244\r\nStep 162, loss: 5.704171180725098\r\nStep 163, loss: 5.703192710876465\r\nStep 164, loss: 5.706217288970947\r\nStep 165, loss: 5.690544128417969\r\nStep 166, loss: 5.678704738616943\r\nStep 167, loss: 5.674036502838135\r\nStep 168, loss: 5.671774864196777\r\nStep 169, loss: 5.671382427215576\r\nStep 170, loss: 5.6651082038879395\r\nStep 171, loss: 5.658195495605469\r\nStep 172, loss: 5.649429798126221\r\nStep 173, loss: 5.649345874786377\r\nStep 174, loss: 5.644377708435059\r\nStep 175, loss: 5.643950939178467\r\nStep 176, loss: 5.630153179168701\r\nStep 177, loss: 5.630523681640625\r\nStep 178, loss: 5.618230819702148\r\nStep 179, loss: 5.613284111022949\r\nStep 180, loss: 5.634624004364014\r\nStep 181, loss: 5.608833312988281\r\nStep 182, loss: 5.609475612640381\r\nStep 183, loss: 5.597111225128174\r\nStep 184, loss: 5.59860897064209\r\nStep 185, loss: 5.5839691162109375\r\nStep 186, loss: 5.586994647979736\r\nStep 187, loss: 5.5755391120910645\r\nStep 188, loss: 5.574591159820557\r\nStep 189, loss: 5.562205791473389\r\nStep 190, loss: 5.566235065460205\r\nStep 191, loss: 5.560031890869141\r\nStep 192, loss: 5.546197891235352\r\nStep 193, loss: 5.556899070739746\r\nStep 194, loss: 5.540331840515137\r\nStep 195, loss: 5.530277252197266\r\nStep 196, loss: 5.526039123535156\r\nStep 197, loss: 5.519392967224121\r\nStep 198, loss: 5.53727912902832\r\nStep 199, loss: 5.525381565093994\r\nStep 200, loss: 5.510133743286133\r\nStep 201, loss: 5.50672721862793\r\nStep 202, loss: 5.500278949737549\r\nStep 203, loss: 5.497273921966553\r\nStep 204, loss: 5.491747856140137\r\nStep 205, loss: 5.487244129180908\r\nStep 206, loss: 5.475861549377441\r\nStep 207, loss: 5.472113609313965\r\nStep 208, loss: 5.474236488342285\r\nStep 209, loss: 5.469985485076904\r\nStep 210, loss: 5.459021091461182\r\nStep 211, loss: 5.46146821975708\r\nStep 212, loss: 5.451879978179932\r\nStep 213, loss: 5.4452433586120605\r\nStep 214, loss: 5.440789699554443\r\nStep 215, loss: 5.44459342956543\r\nStep 216, loss: 5.4394989013671875\r\nStep 217, loss: 5.431442737579346\r\nStep 218, loss: 5.421899318695068\r\nStep 219, loss: 5.422116756439209\r\nStep 220, loss: 5.419055461883545\r\nStep 221, loss: 5.398890495300293\r\nStep 222, loss: 5.413272857666016\r\nStep 223, loss: 5.396838665008545\r\nStep 224, loss: 5.395040512084961\r\nStep 225, loss: 5.400702476501465\r\nStep 226, loss: 5.389471054077148\r\nStep 227, loss: 5.384243011474609\r\nStep 228, loss: 5.3896355628967285\r\nStep 229, loss: 5.381005764007568\r\nStep 230, loss: 5.3701372146606445\r\nStep 231, loss: 5.3595075607299805\r\nStep 232, loss: 5.35382604598999\r\nStep 233, loss: 5.347311019897461\r\nStep 234, loss: 5.352721214294434\r\nStep 235, loss: 5.343062877655029\r\nStep 236, loss: 5.340795516967773\r\nStep 237, loss: 5.334693908691406\r\n",,terminal_output +3283,9029487,"TERMINAL",0,0,"4\t ",,terminal_output +3284,9030521,"TERMINAL",0,0,"5\t ",,terminal_output +3285,9031618,"TERMINAL",0,0,"6\t ",,terminal_output +3286,9032644,"TERMINAL",0,0,"7\t ",,terminal_output +3287,9033772,"TERMINAL",0,0,"8\t ",,terminal_output +3288,9034693,"TERMINAL",0,0,"Step 238, loss: 5.329957008361816\r\nStep 239, loss: 5.327491283416748\r\nStep 240, loss: 5.326003551483154\r\nStep 241, loss: 5.3174357414245605\r\nStep 242, loss: 5.315793514251709\r\nStep 243, loss: 5.30813455581665\r\nStep 244, loss: 5.309324264526367\r\nStep 245, loss: 5.301596164703369\r\nStep 246, loss: 5.2856316566467285\r\nStep 247, loss: 5.32129430770874\r\nStep 248, loss: 5.280337810516357\r\nStep 249, loss: 5.278120994567871\r\nStep 250, loss: 5.284300327301025\r\nStep 251, loss: 5.27766227722168\r\nStep 252, loss: 5.272886276245117\r\nStep 253, loss: 5.266940116882324\r\nStep 254, loss: 5.260121822357178\r\nStep 255, loss: 5.249101161956787\r\nStep 256, loss: 5.245233535766602\r\nStep 257, loss: 5.249699592590332\r\nStep 258, loss: 5.258293151855469\r\nStep 259, loss: 5.232039928436279\r\nStep 260, loss: 5.2281270027160645\r\nStep 261, loss: 5.228611469268799\r\nStep 262, loss: 5.220524787902832\r\nStep 263, loss: 5.217748641967773\r\nStep 264, loss: 5.22506856918335\r\nStep 265, loss: 5.210155487060547\r\nStep 266, loss: 5.209973335266113\r\nStep 267, loss: 5.1942644119262695\r\nStep 268, loss: 5.189417362213135\r\nStep 269, loss: 5.184019088745117\r\nStep 270, loss: 5.182864665985107\r\nStep 271, loss: 5.170227527618408\r\nStep 272, loss: 5.175075531005859\r\nStep 273, loss: 5.197876930236816\r\nStep 274, loss: 5.1592888832092285\r\nStep 275, loss: 5.158276081085205\r\nStep 276, loss: 5.151877403259277\r\nStep 277, loss: 5.162196159362793\r\nStep 278, loss: 5.149541854858398\r\nStep 279, loss: 5.138991355895996\r\nStep 280, loss: 5.133667945861816\r\nStep 281, loss: 5.124479293823242\r\nStep 282, loss: 5.12913703918457\r\nStep 283, loss: 5.1357598304748535\r\nStep 284, loss: 5.126632213592529\r\nStep 285, loss: 5.110583782196045\r\nStep 286, loss: 5.109063625335693\r\nStep 287, loss: 5.1196489334106445\r\nStep 288, loss: 5.098417282104492\r\nStep 289, loss: 5.097197532653809\r\nStep 290, loss: 5.092851638793945\r\nStep 291, loss: 5.086235523223877\r\nStep 292, loss: 5.085799217224121\r\nStep 293, loss: 5.080358028411865\r\nStep 294, loss: 5.069177627563477\r\nStep 295, loss: 5.104903221130371\r\nStep 296, loss: 5.078104496002197\r\nStep 297, loss: 5.061412811279297\r\nStep 298, loss: 5.06168794631958\r\nStep 299, loss: 5.052941799163818\r\nStep 300, loss: 5.05249547958374\r\nStep 301, loss: 5.044524669647217\r\nStep 302, loss: 5.041052341461182\r\nStep 303, loss: 5.029895305633545\r\nStep 304, loss: 5.0298848152160645\r\nStep 305, loss: 5.023686408996582\r\nStep 306, loss: 5.025036811828613\r\nStep 307, loss: 5.0169854164123535\r\nStep 308, loss: 5.013189315795898\r\nStep 309, loss: 5.009254455566406\r\nStep 310, loss: 5.004693031311035\r\nStep 311, loss: 5.004340171813965\r\nStep 312, loss: 4.994485855102539\r\nStep 313, loss: 5.0150675773620605\r\nStep 314, loss: 5.01185417175293\r\nStep 315, loss: 4.9907755851745605\r\nStep 316, loss: 4.978274345397949\r\nStep 317, loss: 4.971336364746094\r\nStep 318, loss: 4.9563751220703125\r\nStep 319, loss: 4.956000804901123\r\nStep 320, loss: 4.950499534606934\r\nStep 321, loss: 4.952856540679932\r\nStep 322, loss: 4.94676399230957\r\nStep 323, loss: 4.942606449127197\r\nStep 324, loss: 4.957691192626953\r\nStep 325, loss: 4.933017253875732\r\nStep 326, loss: 4.927815914154053\r\nStep 327, loss: 4.924768447875977\r\nStep 328, loss: 4.913426399230957\r\nStep 329, loss: 4.9052042961120605\r\nStep 330, loss: 4.911599159240723\r\nStep 331, loss: 4.905434608459473\r\nStep 332, loss: 4.903126239776611\r\nStep 333, loss: 4.902874946594238\r\nStep 334, loss: 4.882922649383545\r\nStep 335, loss: 4.893468856811523\r\nStep 336, loss: 4.880719184875488\r\nStep 337, loss: 4.876325607299805\r\nStep 338, loss: 4.866249084472656\r\nStep 339, loss: 4.8952155113220215\r\nStep 340, loss: 4.866044044494629\r\nStep 341, loss: 4.858565807342529\r\nStep 342, loss: 4.8488054275512695\r\nStep 343, loss: 4.848433971405029\r\nStep 344, loss: 4.876358509063721\r\nStep 345, loss: 4.857144832611084\r\nStep 346, loss: 4.844009876251221\r\nStep 347, loss: 4.841489791870117\r\nStep 348, loss: 4.842748641967773\r\nStep 349, loss: 4.8255696296691895\r\nStep 350, loss: 4.823897361755371\r\nStep 351, loss: 4.8137946128845215\r\nStep 352, loss: 4.809632778167725\r\nStep 353, loss: 4.810863018035889\r\nStep 354, loss: 4.806398868560791\r\nStep 355, loss: 4.82789421081543\r\nStep 356, loss: 4.796984672546387\r\nStep 357, loss: 4.7887115478515625\r\nStep 358, loss: 4.790220737457275\r\nStep 359, loss: 4.78953218460083\r\nStep 360, loss: 4.7696051597595215\r\nStep 361, loss: 4.7737908363342285\r\nStep 362, loss: 4.768120288848877\r\nStep 363, loss: 4.766651153564453\r\nStep 364, loss: 4.757592678070068\r\nStep 365, loss: 4.76331901550293\r\nStep 366, loss: 4.775140762329102\r\nStep 367, loss: 4.742700099945068\r\nStep 368, loss: 4.743465900421143\r\nStep 369, loss: 4.733345985412598\r\nStep 370, loss: 4.736976146697998\r\nStep 371, loss: 4.722981929779053\r\nStep 372, loss: 4.747642993927002\r\nStep 373, loss: 4.717221736907959\r\nStep 374, loss: 4.72883939743042\r\nStep 375, loss: 4.704955101013184\r\nStep 376, loss: 4.704885959625244\r\nStep 377, loss: 4.704352855682373\r\nStep 378, loss: 4.692562103271484\r\nStep 379, loss: 4.6947855949401855\r\nStep 380, loss: 4.692863464355469\r\nStep 381, loss: 4.691582202911377\r\nStep 382, loss: 4.6886091232299805\r\nStep 383, loss: 4.684060573577881\r\nStep 384, loss: 4.680605411529541\r\nStep 385, loss: 4.686710357666016\r\nStep 386, loss: 4.680304527282715\r\nStep 387, loss: 4.665266513824463\r\nStep 388, loss: 4.654778957366943\r\nStep 389, loss: 4.659664630889893\r\nStep 390, loss: 4.642502784729004\r\nStep 391, loss: 4.637691020965576\r\nStep 392, loss: 4.641426086425781\r\nStep 393, loss: 4.6408796310424805\r\nStep 394, loss: 4.628902912139893\r\nStep 395, loss: 4.650570392608643\r\nStep 396, loss: 4.637500762939453\r\nStep 397, loss: 4.627742767333984\r\nStep 398, loss: 4.629310607910156\r\nStep 399, loss: 4.618185043334961\r\nStep 400, loss: 4.616523742675781\r\nStep 401, loss: 4.607940673828125\r\nStep 402, loss: 4.605892181396484\r\nStep 403, loss: 4.622358798980713\r\nStep 404, loss: 4.600706577301025\r\nStep 405, loss: 4.593591690063477\r\nStep 406, loss: 4.5770039558410645\r\nStep 407, loss: 4.586292743682861\r\nStep 408, loss: 4.576440811157227\r\nStep 409, loss: 4.564899921417236\r\nStep 410, loss: 4.562723636627197\r\nStep 411, loss: 4.551421165466309\r\nStep 412, loss: 4.558500289916992\r\nStep 413, loss: 4.568382740020752\r\nStep 414, loss: 4.547826290130615\r\nStep 415, loss: 4.5493011474609375\r\nStep 416, loss: 4.537762641906738\r\nStep 417, loss: 4.538228511810303\r\nStep 418, loss: 4.524601936340332\r\nStep 419, loss: 4.537634372711182\r\nStep 420, loss: 4.521946907043457\r\nStep 421, loss: 4.517554759979248\r\nStep 422, loss: 4.520114421844482\r\nStep 423, loss: 4.515247821807861\r\nStep 424, loss: 4.514492511749268\r\nStep 425, loss: 4.510465145111084\r\nStep 426, loss: 4.535045146942139\r\nStep 427, loss: 4.491199970245361\r\nStep 428, loss: 4.494980812072754\r\nStep 429, loss: 4.488420009613037\r\nStep 430, loss: 4.517583847045898\r\nStep 431, loss: 4.488105773925781\r\nStep 432, loss: 4.47454833984375\r\nStep 433, loss: 4.482129096984863\r\nStep 434, loss: 4.47510290145874\r\nStep 435, loss: 4.477695465087891\r\nStep 436, loss: 4.465596675872803\r\nStep 437, loss: 4.465971946716309\r\nStep 438, loss: 4.483786106109619\r\nStep 439, loss: 4.46391487121582\r\nStep 440, loss: 4.442028045654297\r\nStep 441, loss: 4.443118095397949\r\nStep 442, loss: 4.440573215484619\r\nStep 443, loss: 4.432990074157715\r\nStep 444, loss: 4.422598838806152\r\nStep 445, loss: 4.430192470550537\r\nStep 446, loss: 4.448519706726074\r\nStep 447, loss: 4.4124345779418945\r\nStep 448, loss: 4.445275783538818\r\nStep 449, loss: 4.407471656799316\r\nStep 450, loss: 4.413254737854004\r\nStep 451, loss: 4.401000022888184\r\nStep 452, loss: 4.400097370147705\r\nStep 453, loss: 4.3922438621521\r\nStep 454, loss: 4.3765339851379395\r\nStep 455, loss: 4.376810550689697\r\nStep 456, loss: 4.393165111541748\r\nStep 457, loss: 4.3800787925720215\r\nStep 458, loss: 4.363004684448242\r\nStep 459, loss: 4.3820624351501465\r\nStep 460, loss: 4.36853551864624\r\nStep 461, loss: 4.373488903045654\r\nStep 462, loss: 4.402946949005127\r\nStep 463, loss: 4.378390312194824\r\nStep 464, loss: 4.35142707824707\r\nStep 465, loss: 4.350417137145996\r\nStep 466, loss: 4.347110748291016\r\nStep 467, loss: 4.331931114196777\r\nStep 468, loss: 4.333713531494141\r\nStep 469, loss: 4.34257173538208\r\nStep 470, loss: 4.32764196395874\r\nStep 471, loss: 4.328501224517822\r\nStep 472, loss: 4.318349838256836\r\nStep 473, loss: 4.323432445526123\r\nStep 474, loss: 4.302000045776367\r\nStep 475, loss: 4.306225776672363\r\nStep 476, loss: 4.311520099639893\r\nStep 477, loss: 4.291138172149658\r\n",,terminal_output +3289,9034709,"TERMINAL",0,0,"9\t ",,terminal_output +3290,9035719,"TERMINAL",0,0,"50\t ",,terminal_output +3291,9036840,"TERMINAL",0,0,"1\t ",,terminal_output +3292,9037862,"TERMINAL",0,0,"2\t ",,terminal_output +3293,9038890,"TERMINAL",0,0,"3\t ",,terminal_output +3294,9039881,"TERMINAL",0,0,"4\t ",,terminal_output +3295,9040429,"TERMINAL",0,0,"Step 478, loss: 4.314309597015381\r\nStep 479, loss: 4.326601982116699\r\nStep 480, loss: 4.290407180786133\r\nStep 481, loss: 4.282851219177246\r\nStep 482, loss: 4.277844429016113\r\nStep 483, loss: 4.267113208770752\r\nStep 484, loss: 4.270807266235352\r\nStep 485, loss: 4.285476207733154\r\nStep 486, loss: 4.273164749145508\r\nStep 487, loss: 4.260318279266357\r\nStep 488, loss: 4.259771823883057\r\nStep 489, loss: 4.246504783630371\r\nStep 490, loss: 4.242241859436035\r\nStep 491, loss: 4.255734920501709\r\nStep 492, loss: 4.235747814178467\r\nStep 493, loss: 4.238321304321289\r\nStep 494, loss: 4.23118257522583\r\nStep 495, loss: 4.237157821655273\r\nStep 496, loss: 4.224691867828369\r\nStep 497, loss: 4.257751941680908\r\nStep 498, loss: 4.206289291381836\r\nStep 499, loss: 4.222168445587158\r\nStep 500, loss: 4.212905406951904\r\nStep 501, loss: 4.211968898773193\r\nStep 502, loss: 4.221551895141602\r\nStep 503, loss: 4.190032005310059\r\nStep 504, loss: 4.188676834106445\r\nStep 505, loss: 4.188112735748291\r\nStep 506, loss: 4.192103862762451\r\nStep 507, loss: 4.174623489379883\r\nStep 508, loss: 4.190797805786133\r\nStep 509, loss: 4.173737525939941\r\nStep 510, loss: 4.198996543884277\r\nStep 511, loss: 4.165807723999023\r\nStep 512, loss: 4.16094970703125\r\nStep 513, loss: 4.148315906524658\r\nStep 514, loss: 4.153014183044434\r\nStep 515, loss: 4.1647796630859375\r\nStep 516, loss: 4.152920722961426\r\nStep 517, loss: 4.141209602355957\r\nStep 518, loss: 4.135449409484863\r\nStep 519, loss: 4.16567325592041\r\nStep 520, loss: 4.127534866333008\r\nStep 521, loss: 4.12393045425415\r\nStep 522, loss: 4.115723609924316\r\nStep 523, loss: 4.13126277923584\r\nStep 524, loss: 4.111210346221924\r\nStep 525, loss: 4.104122638702393\r\nStep 526, loss: 4.109250545501709\r\nStep 527, loss: 4.127070426940918\r\nStep 528, loss: 4.099292278289795\r\nStep 529, loss: 4.096903324127197\r\nStep 530, loss: 4.0936102867126465\r\nStep 531, loss: 4.086356163024902\r\nStep 532, loss: 4.098677635192871\r\nStep 533, loss: 4.072143077850342\r\nStep 534, loss: 4.078601360321045\r\nStep 535, loss: 4.074349880218506\r\nStep 536, loss: 4.0652594566345215\r\nStep 537, loss: 4.090595245361328\r\nStep 538, loss: 4.062178611755371\r\nStep 539, loss: 4.063747406005859\r\nStep 540, loss: 4.057856559753418\r\nStep 541, loss: 4.045647144317627\r\nStep 542, loss: 4.076050281524658\r\nStep 543, loss: 4.043745517730713\r\nStep 544, loss: 4.048328399658203\r\nStep 545, loss: 4.037389755249023\r\nStep 546, loss: 4.037398338317871\r\nStep 547, loss: 4.03232479095459\r\nStep 548, loss: 4.033538341522217\r\nStep 549, loss: 4.011363983154297\r\nStep 550, loss: 4.024710178375244\r\nStep 551, loss: 4.020174503326416\r\nStep 552, loss: 4.016357898712158\r\nStep 553, loss: 4.012247562408447\r\nStep 554, loss: 4.0308756828308105\r\nStep 555, loss: 3.9944472312927246\r\nStep 556, loss: 4.002566814422607\r\nStep 557, loss: 3.9964582920074463\r\nStep 558, loss: 3.9873955249786377\r\nStep 559, loss: 4.018288612365723\r\nStep 560, loss: 3.9719834327697754\r\nStep 561, loss: 3.978053092956543\r\nStep 562, loss: 3.9708244800567627\r\nStep 563, loss: 3.979743480682373\r\nStep 564, loss: 3.9815804958343506\r\nStep 565, loss: 3.9609739780426025\r\nStep 566, loss: 3.9583261013031006\r\nStep 567, loss: 3.952641248703003\r\nStep 568, loss: 3.954911231994629\r\nStep 569, loss: 3.9726226329803467\r\nStep 570, loss: 3.948828935623169\r\nStep 571, loss: 3.945371150970459\r\nStep 572, loss: 3.9462060928344727\r\nStep 573, loss: 3.938913106918335\r\nStep 574, loss: 3.941828489303589\r\nStep 575, loss: 3.9351422786712646\r\nStep 576, loss: 3.9510412216186523\r\nStep 577, loss: 3.927370309829712\r\nStep 578, loss: 3.9175424575805664\r\nStep 579, loss: 3.912045478820801\r\nStep 580, loss: 3.9112837314605713\r\nStep 581, loss: 3.900571346282959\r\nStep 582, loss: 3.9072000980377197\r\nStep 583, loss: 3.889176845550537\r\nStep 584, loss: 3.8997855186462402\r\nStep 585, loss: 3.9246644973754883\r\nStep 586, loss: 3.9035208225250244\r\nStep 587, loss: 3.8814516067504883\r\nStep 588, loss: 3.8724162578582764\r\nStep 589, loss: 3.8815596103668213\r\nStep 590, loss: 3.8769497871398926\r\nStep 591, loss: 3.876450777053833\r\nStep 592, loss: 3.8641340732574463\r\nStep 593, loss: 3.867258071899414\r\nStep 594, loss: 3.8706564903259277\r\nStep 595, loss: 3.8888463973999023\r\nStep 596, loss: 3.85465407371521\r\nStep 597, loss: 3.858907699584961\r\nStep 598, loss: 3.837369680404663\r\nStep 599, loss: 3.8734798431396484\r\nStep 600, loss: 3.839829683303833\r\nStep 601, loss: 3.8525009155273438\r\nStep 602, loss: 3.8282861709594727\r\nStep 603, loss: 3.8215436935424805\r\nStep 604, loss: 3.8238518238067627\r\nStep 605, loss: 3.8206372261047363\r\nStep 606, loss: 3.821307897567749\r\nStep 607, loss: 3.8110625743865967\r\nStep 608, loss: 3.8322155475616455\r\nStep 609, loss: 3.8554508686065674\r\nStep 610, loss: 3.8200318813323975\r\nStep 611, loss: 3.8106765747070312\r\nStep 612, loss: 3.8077502250671387\r\nStep 613, loss: 3.8086133003234863\r\nStep 614, loss: 3.7949867248535156\r\nStep 615, loss: 3.8060169219970703\r\nStep 616, loss: 3.78348708152771\r\nStep 617, loss: 3.774686098098755\r\nStep 618, loss: 3.7849783897399902\r\nStep 619, loss: 3.7647829055786133\r\nStep 620, loss: 3.797109365463257\r\nStep 621, loss: 3.7594316005706787\r\nStep 622, loss: 3.763437032699585\r\nStep 623, loss: 3.757110118865967\r\nStep 624, loss: 3.746163845062256\r\nStep 625, loss: 3.747652292251587\r\nStep 626, loss: 3.7395496368408203\r\nStep 627, loss: 3.7519023418426514\r\nStep 628, loss: 3.7464494705200195\r\nStep 629, loss: 3.732440233230591\r\nStep 630, loss: 3.7304317951202393\r\nStep 631, loss: 3.733001470565796\r\nStep 632, loss: 3.7347540855407715\r\nStep 633, loss: 3.7235419750213623\r\nStep 634, loss: 3.717325210571289\r\nStep 635, loss: 3.7430996894836426\r\nStep 636, loss: 3.7174947261810303\r\nStep 637, loss: 3.7316136360168457\r\nStep 638, loss: 3.7072579860687256\r\nStep 639, loss: 3.7047579288482666\r\nStep 640, loss: 3.6957626342773438\r\nStep 641, loss: 3.696861982345581\r\nStep 642, loss: 3.688952922821045\r\nStep 643, loss: 3.6914167404174805\r\nStep 644, loss: 3.7036588191986084\r\nStep 645, loss: 3.677731513977051\r\nStep 646, loss: 3.6923255920410156\r\nStep 647, loss: 3.6663036346435547\r\nStep 648, loss: 3.662778377532959\r\nStep 649, loss: 3.6769049167633057\r\nStep 650, loss: 3.661261796951294\r\nStep 651, loss: 3.6712427139282227\r\nStep 652, loss: 3.6775758266448975\r\nStep 653, loss: 3.6525919437408447\r\nStep 654, loss: 3.643244981765747\r\nStep 655, loss: 3.6773970127105713\r\nStep 656, loss: 3.648023843765259\r\nStep 657, loss: 3.6396985054016113\r\nStep 658, loss: 3.638261079788208\r\nStep 659, loss: 3.642169237136841\r\nStep 660, loss: 3.6328351497650146\r\nStep 661, loss: 3.618271589279175\r\nStep 662, loss: 3.619826078414917\r\nStep 663, loss: 3.6203789710998535\r\nStep 664, loss: 3.626800537109375\r\nStep 665, loss: 3.646486282348633\r\nStep 666, loss: 3.613283395767212\r\nStep 667, loss: 3.612022876739502\r\nStep 668, loss: 3.612678050994873\r\nStep 669, loss: 3.6105027198791504\r\nStep 670, loss: 3.600921154022217\r\nStep 671, loss: 3.659362554550171\r\nStep 672, loss: 3.587954044342041\r\nStep 673, loss: 3.5877671241760254\r\nStep 674, loss: 3.5853867530822754\r\nStep 675, loss: 3.5882909297943115\r\nStep 676, loss: 3.573401927947998\r\nStep 677, loss: 3.5738282203674316\r\nStep 678, loss: 3.5834217071533203\r\nStep 679, loss: 3.5788910388946533\r\nStep 680, loss: 3.565194845199585\r\nStep 681, loss: 3.5582921504974365\r\nStep 682, loss: 3.558091402053833\r\nStep 683, loss: 3.5502562522888184\r\nStep 684, loss: 3.5794517993927\r\nStep 685, loss: 3.5509228706359863\r\nStep 686, loss: 3.5397703647613525\r\nStep 687, loss: 3.5540502071380615\r\nStep 688, loss: 3.5620830059051514\r\nStep 689, loss: 3.5329854488372803\r\nStep 690, loss: 3.538100481033325\r\nStep 691, loss: 3.525324821472168\r\nStep 692, loss: 3.521021842956543\r\nStep 693, loss: 3.5201730728149414\r\nStep 694, loss: 3.5187795162200928\r\nStep 695, loss: 3.528630256652832\r\nStep 696, loss: 3.5150933265686035\r\nStep 697, loss: 3.509812116622925\r\nStep 698, loss: 3.518571615219116\r\nStep 699, loss: 3.5009477138519287\r\nStep 700, loss: 3.532482147216797\r\nStep 701, loss: 3.4867610931396484\r\nStep 702, loss: 3.4880757331848145\r\nStep 703, loss: 3.4876437187194824\r\nStep 704, loss: 3.484063148498535\r\nStep 705, loss: 3.5179061889648438\r\nStep 706, loss: 3.477332353591919\r\nStep 707, loss: 3.4678215980529785\r\nStep 708, loss: 3.4823379516601562\r\nStep 709, loss: 3.4607625007629395\r\nStep 710, loss: 3.467406749725342\r\nStep 711, loss: 3.462925672531128\r\nStep 712, loss: 3.4629709720611572\r\nStep 713, loss: 3.4512975215911865\r\nStep 714, loss: 3.4514760971069336\r\nStep 715, loss: 3.4484856128692627\r\n",,terminal_output +3296,9040917,"TERMINAL",0,0,"5\t ",,terminal_output +3297,9041951,"TERMINAL",0,0,"6\t ",,terminal_output +3298,9043005,"TERMINAL",0,0,"7\t ",,terminal_output +3299,9044109,"TERMINAL",0,0,"8\t ",,terminal_output +3300,9045123,"TERMINAL",0,0,"9\t ",,terminal_output +3301,9045578,"TERMINAL",0,0,"Step 716, loss: 3.4827418327331543\r\nStep 717, loss: 3.4390041828155518\r\nStep 718, loss: 3.4290707111358643\r\nStep 719, loss: 3.440384864807129\r\nStep 720, loss: 3.4281837940216064\r\nStep 721, loss: 3.431058168411255\r\nStep 722, loss: 3.420827627182007\r\nStep 723, loss: 3.423978805541992\r\nStep 724, loss: 3.4170196056365967\r\nStep 725, loss: 3.422020196914673\r\nStep 726, loss: 3.4185116291046143\r\nStep 727, loss: 3.5309736728668213\r\nStep 728, loss: 3.4139482975006104\r\nStep 729, loss: 3.414008140563965\r\nStep 730, loss: 3.3956668376922607\r\nStep 731, loss: 3.393888473510742\r\nStep 732, loss: 3.3907995223999023\r\nStep 733, loss: 3.3955178260803223\r\nStep 734, loss: 3.386759042739868\r\nStep 735, loss: 3.39005970954895\r\nStep 736, loss: 3.381746768951416\r\nStep 737, loss: 3.3846046924591064\r\nStep 738, loss: 3.377262830734253\r\nStep 739, loss: 3.3793249130249023\r\nStep 740, loss: 3.4027421474456787\r\nStep 741, loss: 3.4033026695251465\r\nStep 742, loss: 3.368133544921875\r\nStep 743, loss: 3.372753858566284\r\nStep 744, loss: 3.3654427528381348\r\nStep 745, loss: 3.3667449951171875\r\nStep 746, loss: 3.3653838634490967\r\nStep 747, loss: 3.3426952362060547\r\nStep 748, loss: 3.343393087387085\r\nStep 749, loss: 3.3432047367095947\r\nStep 750, loss: 3.3484272956848145\r\nStep 751, loss: 3.3318066596984863\r\nStep 752, loss: 3.3350818157196045\r\nStep 753, loss: 3.317584276199341\r\nStep 754, loss: 3.33187198638916\r\nStep 755, loss: 3.3292386531829834\r\nStep 756, loss: 3.3088386058807373\r\nStep 757, loss: 3.3191659450531006\r\nStep 758, loss: 3.3127615451812744\r\nStep 759, loss: 3.313042163848877\r\nStep 760, loss: 3.319514751434326\r\nStep 761, loss: 3.317599296569824\r\nStep 762, loss: 3.3391637802124023\r\nStep 763, loss: 3.3018765449523926\r\nStep 764, loss: 3.302367925643921\r\nStep 765, loss: 3.2980265617370605\r\nStep 766, loss: 3.2809202671051025\r\nStep 767, loss: 3.2893714904785156\r\nStep 768, loss: 3.2820847034454346\r\nStep 769, loss: 3.2791385650634766\r\nStep 770, loss: 3.2791662216186523\r\nStep 771, loss: 3.282317638397217\r\nStep 772, loss: 3.2676658630371094\r\nStep 773, loss: 3.2851243019104004\r\nStep 774, loss: 3.3804306983947754\r\nStep 775, loss: 3.2656960487365723\r\nStep 776, loss: 3.263430595397949\r\nStep 777, loss: 3.2605700492858887\r\nStep 778, loss: 3.254237174987793\r\nStep 779, loss: 3.2638156414031982\r\nStep 780, loss: 3.252349853515625\r\nStep 781, loss: 3.2560296058654785\r\nStep 782, loss: 3.2577083110809326\r\nStep 783, loss: 3.2790071964263916\r\nStep 784, loss: 3.235039234161377\r\nStep 785, loss: 3.2558913230895996\r\nStep 786, loss: 3.224114418029785\r\nStep 787, loss: 3.240689277648926\r\nStep 788, loss: 3.227062463760376\r\nStep 789, loss: 3.234194755554199\r\nStep 790, loss: 3.2171621322631836\r\nStep 791, loss: 3.217768430709839\r\nStep 792, loss: 3.2179272174835205\r\nStep 793, loss: 3.205587148666382\r\nStep 794, loss: 3.2119531631469727\r\nStep 795, loss: 3.2051801681518555\r\nStep 796, loss: 3.23117733001709\r\nStep 797, loss: 3.196369171142578\r\nStep 798, loss: 3.2024753093719482\r\nStep 799, loss: 3.1872072219848633\r\nStep 800, loss: 3.18768572807312\r\nStep 801, loss: 3.1950759887695312\r\nStep 802, loss: 3.187474489212036\r\nStep 803, loss: 3.1672749519348145\r\nStep 804, loss: 3.184466600418091\r\nStep 805, loss: 3.2121145725250244\r\nStep 806, loss: 3.1720833778381348\r\nStep 807, loss: 3.166978359222412\r\nStep 808, loss: 3.1691746711730957\r\nStep 809, loss: 3.171675443649292\r\nStep 810, loss: 3.1650638580322266\r\nStep 811, loss: 3.1593499183654785\r\nStep 812, loss: 3.156541347503662\r\nStep 813, loss: 3.1482303142547607\r\nStep 814, loss: 3.1323773860931396\r\nStep 815, loss: 3.1541030406951904\r\nStep 816, loss: 3.1437785625457764\r\nStep 817, loss: 3.1282708644866943\r\nStep 818, loss: 3.238917112350464\r\nStep 819, loss: 3.147490978240967\r\nStep 820, loss: 3.123344898223877\r\nStep 821, loss: 3.1265556812286377\r\nStep 822, loss: 3.1331381797790527\r\nStep 823, loss: 3.1258838176727295\r\nStep 824, loss: 3.1142003536224365\r\nStep 825, loss: 3.1121456623077393\r\nStep 826, loss: 3.1076133251190186\r\nStep 827, loss: 3.1192715167999268\r\nStep 828, loss: 3.1162354946136475\r\nStep 829, loss: 3.1066477298736572\r\nStep 830, loss: 3.097081422805786\r\nStep 831, loss: 3.0981380939483643\r\nStep 832, loss: 3.1022324562072754\r\nStep 833, loss: 3.092754602432251\r\nStep 834, loss: 3.0865120887756348\r\nStep 835, loss: 3.072474241256714\r\nStep 836, loss: 3.088092803955078\r\nStep 837, loss: 3.11157488822937\r\nStep 838, loss: 3.0781538486480713\r\nStep 839, loss: 3.0604407787323\r\nStep 840, loss: 3.071019411087036\r\nStep 841, loss: 3.0607845783233643\r\nStep 842, loss: 3.0602893829345703\r\nStep 843, loss: 3.0491433143615723\r\nStep 844, loss: 3.0461509227752686\r\nStep 845, loss: 3.0509390830993652\r\nStep 846, loss: 3.1001129150390625\r\nStep 847, loss: 3.0538034439086914\r\nStep 848, loss: 3.0552620887756348\r\nStep 849, loss: 3.04366397857666\r\nStep 850, loss: 3.0392954349517822\r\nStep 851, loss: 3.033743381500244\r\nStep 852, loss: 3.0288753509521484\r\nStep 853, loss: 3.025601625442505\r\nStep 854, loss: 3.0251235961914062\r\nStep 855, loss: 3.016740560531616\r\nStep 856, loss: 3.025286912918091\r\nStep 857, loss: 3.1249094009399414\r\nStep 858, loss: 3.0250582695007324\r\nStep 859, loss: 3.0110082626342773\r\nStep 860, loss: 3.012786388397217\r\nStep 861, loss: 3.0033740997314453\r\nStep 862, loss: 3.039743185043335\r\nStep 863, loss: 3.0048141479492188\r\nStep 864, loss: 2.9907033443450928\r\nStep 865, loss: 3.0088939666748047\r\nStep 866, loss: 3.0023276805877686\r\nStep 867, loss: 2.984252691268921\r\nStep 868, loss: 2.9909415245056152\r\nStep 869, loss: 2.9956510066986084\r\nStep 870, loss: 2.97843861579895\r\nStep 871, loss: 2.964433193206787\r\nStep 872, loss: 2.9643633365631104\r\nStep 873, loss: 2.96313738822937\r\nStep 874, loss: 2.97294282913208\r\nStep 875, loss: 2.9973020553588867\r\nStep 876, loss: 2.9597437381744385\r\nStep 877, loss: 2.9685614109039307\r\nStep 878, loss: 2.973392963409424\r\nStep 879, loss: 2.959679126739502\r\nStep 880, loss: 2.9465479850769043\r\nStep 881, loss: 2.9416134357452393\r\nStep 882, loss: 2.9525461196899414\r\nStep 883, loss: 2.9581754207611084\r\nStep 884, loss: 2.9527337551116943\r\nStep 885, loss: 2.9462881088256836\r\nStep 886, loss: 2.9453673362731934\r\nStep 887, loss: 2.935330629348755\r\nStep 888, loss: 2.9222521781921387\r\nStep 889, loss: 2.918553590774536\r\nStep 890, loss: 2.9276626110076904\r\nStep 891, loss: 2.9258244037628174\r\nStep 892, loss: 2.936419725418091\r\nStep 893, loss: 2.914576292037964\r\nStep 894, loss: 2.914613723754883\r\nStep 895, loss: 2.910684108734131\r\nStep 896, loss: 2.9436793327331543\r\nStep 897, loss: 2.9689412117004395\r\nStep 898, loss: 2.906726360321045\r\nStep 899, loss: 2.8930859565734863\r\nStep 900, loss: 2.890533685684204\r\nStep 901, loss: 2.890345335006714\r\nStep 902, loss: 2.887587070465088\r\nStep 903, loss: 2.8759307861328125\r\nStep 904, loss: 2.878283977508545\r\nStep 905, loss: 2.8857903480529785\r\nStep 906, loss: 2.8710453510284424\r\nStep 907, loss: 2.8798508644104004\r\nStep 908, loss: 2.8657538890838623\r\nStep 909, loss: 2.9267420768737793\r\nStep 910, loss: 2.856778383255005\r\nStep 911, loss: 2.8660941123962402\r\nStep 912, loss: 2.8639955520629883\r\nStep 913, loss: 2.8533971309661865\r\nStep 914, loss: 2.853278636932373\r\nStep 915, loss: 2.8546555042266846\r\nStep 916, loss: 2.8575448989868164\r\nStep 917, loss: 2.8519699573516846\r\nStep 918, loss: 2.8615152835845947\r\nStep 919, loss: 2.8357956409454346\r\nStep 920, loss: 2.8447961807250977\r\nStep 921, loss: 2.8402888774871826\r\nStep 922, loss: 2.8600261211395264\r\nStep 923, loss: 2.829059600830078\r\nStep 924, loss: 2.823862075805664\r\nStep 925, loss: 2.81754994392395\r\nStep 926, loss: 2.812476634979248\r\nStep 927, loss: 2.8375916481018066\r\nStep 928, loss: 2.8082704544067383\r\nStep 929, loss: 2.809657335281372\r\nStep 930, loss: 2.810702323913574\r\nStep 931, loss: 2.805044412612915\r\nStep 932, loss: 2.7827587127685547\r\nStep 933, loss: 2.8048272132873535\r\nStep 934, loss: 2.9192752838134766\r\nStep 935, loss: 2.796173572540283\r\nStep 936, loss: 2.79939603805542\r\nStep 937, loss: 2.7985594272613525\r\nStep 938, loss: 2.7874011993408203\r\nStep 939, loss: 2.788325071334839\r\nStep 940, loss: 2.7677526473999023\r\nStep 941, loss: 2.7750816345214844\r\nStep 942, loss: 2.8279623985290527\r\nStep 943, loss: 2.7795581817626953\r\nStep 944, loss: 2.769453525543213\r\nStep 945, loss: 2.77130389213562\r\nStep 946, loss: 2.772047996520996\r\nStep 947, loss: 2.7506046295166016\r\nStep 948, loss: 2.7733211517333984\r\nStep 949, loss: 2.7584316730499268\r\nStep 950, loss: 2.755765199661255\r\nStep 951, loss: 2.749742031097412\r\nStep 952, loss: 2.7595608234405518\r\n",,terminal_output +3302,9046161,"TERMINAL",0,0,"1:00\t ",,terminal_output +3303,9047182,"TERMINAL",0,0,"1\t ",,terminal_output +3304,9048203,"TERMINAL",0,0,"2\t ",,terminal_output +3305,9049363,"TERMINAL",0,0,"3\t ",,terminal_output +3306,9050285,"TERMINAL",0,0,"4\t ",,terminal_output +3307,9050922,"TERMINAL",0,0,"Step 953, loss: 2.742349147796631\r\nStep 954, loss: 2.746356725692749\r\nStep 955, loss: 2.741588592529297\r\nStep 956, loss: 2.731782913208008\r\nStep 957, loss: 2.733583450317383\r\nStep 958, loss: 2.7271320819854736\r\nStep 959, loss: 2.709921360015869\r\nStep 960, loss: 2.721615791320801\r\nStep 961, loss: 2.719254493713379\r\nStep 962, loss: 2.777717113494873\r\nStep 963, loss: 2.721083641052246\r\nStep 964, loss: 2.70217227935791\r\nStep 965, loss: 2.713838577270508\r\nStep 966, loss: 2.7135937213897705\r\nStep 967, loss: 2.6995253562927246\r\nStep 968, loss: 2.6986958980560303\r\nStep 969, loss: 2.6995882987976074\r\nStep 970, loss: 2.6980905532836914\r\nStep 971, loss: 2.6911463737487793\r\nStep 972, loss: 2.6869988441467285\r\nStep 973, loss: 2.6903786659240723\r\nStep 974, loss: 2.6884610652923584\r\nStep 975, loss: 2.679518222808838\r\nStep 976, loss: 2.681838035583496\r\nStep 977, loss: 2.6821365356445312\r\nStep 978, loss: 2.6673073768615723\r\nStep 979, loss: 2.7218384742736816\r\nStep 980, loss: 2.671125888824463\r\nStep 981, loss: 2.6694273948669434\r\nStep 982, loss: 2.664454698562622\r\nStep 983, loss: 2.659787178039551\r\nStep 984, loss: 2.6561341285705566\r\nStep 985, loss: 2.6568479537963867\r\nStep 986, loss: 2.6544830799102783\r\nStep 987, loss: 2.6629281044006348\r\nStep 988, loss: 2.6952061653137207\r\nStep 989, loss: 2.6449668407440186\r\nStep 990, loss: 2.6382699012756348\r\nStep 991, loss: 2.6439027786254883\r\nStep 992, loss: 2.626304864883423\r\nStep 993, loss: 2.742353677749634\r\nStep 994, loss: 2.629061222076416\r\nStep 995, loss: 2.6364548206329346\r\nStep 996, loss: 2.6356842517852783\r\nStep 997, loss: 2.622459650039673\r\nStep 998, loss: 2.6129472255706787\r\nStep 999, loss: 2.620009660720825\r\nSaved checkpoint at step 1000\r\nStep 1000, loss: 2.6216213703155518\r\nStep 1001, loss: 2.6105775833129883\r\nStep 1002, loss: 2.6111576557159424\r\nStep 1003, loss: 2.61885929107666\r\nStep 1004, loss: 2.59566593170166\r\nStep 1005, loss: 2.5903689861297607\r\nStep 1006, loss: 2.593644618988037\r\nStep 1007, loss: 2.5824172496795654\r\nStep 1008, loss: 2.5947213172912598\r\nStep 1009, loss: 2.5978355407714844\r\nStep 1010, loss: 2.635629892349243\r\nStep 1011, loss: 2.5856571197509766\r\nStep 1012, loss: 2.5726919174194336\r\nStep 1013, loss: 2.5726046562194824\r\nStep 1014, loss: 2.5686261653900146\r\nStep 1015, loss: 2.574437379837036\r\nStep 1016, loss: 2.5538694858551025\r\nStep 1017, loss: 2.5713987350463867\r\nStep 1018, loss: 2.5504117012023926\r\nStep 1019, loss: 2.5570900440216064\r\nStep 1020, loss: 2.562052011489868\r\nStep 1021, loss: 2.5580921173095703\r\nStep 1022, loss: 2.5556936264038086\r\nStep 1023, loss: 2.5444748401641846\r\nStep 1024, loss: 2.6686503887176514\r\nStep 1025, loss: 2.5448951721191406\r\nStep 1026, loss: 2.5443711280822754\r\nStep 1027, loss: 2.5448999404907227\r\nStep 1028, loss: 2.5338544845581055\r\nStep 1029, loss: 2.5461249351501465\r\nStep 1030, loss: 2.5406837463378906\r\nStep 1031, loss: 2.5263988971710205\r\nStep 1032, loss: 2.531461477279663\r\nStep 1033, loss: 2.587472915649414\r\nStep 1034, loss: 2.5165743827819824\r\nStep 1035, loss: 2.527808666229248\r\nStep 1036, loss: 2.511593818664551\r\nStep 1037, loss: 2.525951385498047\r\nStep 1038, loss: 2.5127522945404053\r\nStep 1039, loss: 2.5163276195526123\r\nStep 1040, loss: 2.5083043575286865\r\nStep 1041, loss: 2.512036085128784\r\nStep 1042, loss: 2.501476287841797\r\nStep 1043, loss: 2.5073680877685547\r\nStep 1044, loss: 2.498732566833496\r\nStep 1045, loss: 2.5017025470733643\r\nStep 1046, loss: 2.4906890392303467\r\nStep 1047, loss: 2.4975924491882324\r\nStep 1048, loss: 2.486015558242798\r\nStep 1049, loss: 2.49591064453125\r\nStep 1050, loss: 2.4842474460601807\r\nStep 1051, loss: 2.4810588359832764\r\nStep 1052, loss: 2.4747955799102783\r\nStep 1053, loss: 2.4896254539489746\r\nStep 1054, loss: 2.471397638320923\r\nStep 1055, loss: 2.472452163696289\r\nStep 1056, loss: 2.4728217124938965\r\nStep 1057, loss: 2.4676969051361084\r\nStep 1058, loss: 2.466958522796631\r\nStep 1059, loss: 2.466485023498535\r\nStep 1060, loss: 2.4620413780212402\r\nStep 1061, loss: 2.499037742614746\r\nStep 1062, loss: 2.557554244995117\r\nStep 1063, loss: 2.4471476078033447\r\nStep 1064, loss: 2.4544639587402344\r\nStep 1065, loss: 2.442636728286743\r\nStep 1066, loss: 2.4383647441864014\r\nStep 1067, loss: 2.440136432647705\r\nStep 1068, loss: 2.4416046142578125\r\nStep 1069, loss: 2.4260339736938477\r\nStep 1070, loss: 2.4463207721710205\r\nStep 1071, loss: 2.42936372756958\r\nStep 1072, loss: 2.430643320083618\r\nStep 1073, loss: 2.41681170463562\r\nStep 1074, loss: 2.4098222255706787\r\nStep 1075, loss: 2.4174420833587646\r\nStep 1076, loss: 2.416834831237793\r\nStep 1077, loss: 2.415316581726074\r\nStep 1078, loss: 2.4147231578826904\r\nStep 1079, loss: 2.394000291824341\r\nStep 1080, loss: 2.4144704341888428\r\nStep 1081, loss: 2.400419235229492\r\nStep 1082, loss: 2.398200750350952\r\nStep 1083, loss: 2.383587598800659\r\nStep 1084, loss: 2.3896636962890625\r\nStep 1085, loss: 2.4636313915252686\r\nStep 1086, loss: 2.394101858139038\r\nStep 1087, loss: 2.384946584701538\r\nStep 1088, loss: 2.3842861652374268\r\nStep 1089, loss: 2.403897285461426\r\nStep 1090, loss: 2.377153158187866\r\nStep 1091, loss: 2.404075860977173\r\nStep 1092, loss: 2.3752641677856445\r\nStep 1093, loss: 2.37152099609375\r\nStep 1094, loss: 2.3721303939819336\r\nStep 1095, loss: 2.47298264503479\r\nStep 1096, loss: 2.3569188117980957\r\nStep 1097, loss: 2.364516019821167\r\nStep 1098, loss: 2.3627402782440186\r\nStep 1099, loss: 2.353154182434082\r\nStep 1100, loss: 2.361208438873291\r\nStep 1101, loss: 2.3500382900238037\r\nStep 1102, loss: 2.3534746170043945\r\nStep 1103, loss: 2.340796947479248\r\nStep 1104, loss: 2.332432746887207\r\nStep 1105, loss: 2.3366761207580566\r\nStep 1106, loss: 2.3414840698242188\r\nStep 1107, loss: 2.333902597427368\r\nStep 1108, loss: 2.3373029232025146\r\nStep 1109, loss: 2.316862106323242\r\nStep 1110, loss: 2.338226556777954\r\nStep 1111, loss: 2.3261780738830566\r\nStep 1112, loss: 2.4391627311706543\r\nStep 1113, loss: 2.339341640472412\r\nStep 1114, loss: 2.332104444503784\r\nStep 1115, loss: 2.3156185150146484\r\nStep 1116, loss: 2.3147614002227783\r\nStep 1117, loss: 2.312528371810913\r\nStep 1118, loss: 2.306088447570801\r\nStep 1119, loss: 2.304727792739868\r\nStep 1120, loss: 2.318479299545288\r\nStep 1121, loss: 2.3019163608551025\r\nStep 1122, loss: 2.304661989212036\r\nStep 1123, loss: 2.3470988273620605\r\nStep 1124, loss: 2.278986930847168\r\nStep 1125, loss: 2.293384313583374\r\nStep 1126, loss: 2.2854766845703125\r\nStep 1127, loss: 2.2783753871917725\r\nStep 1128, loss: 2.281419515609741\r\nStep 1129, loss: 2.278022527694702\r\nStep 1130, loss: 2.2705724239349365\r\nStep 1131, loss: 2.278308153152466\r\nStep 1132, loss: 2.3867623805999756\r\nStep 1133, loss: 2.2763733863830566\r\nStep 1134, loss: 2.2659802436828613\r\nStep 1135, loss: 2.2658274173736572\r\nStep 1136, loss: 2.25758957862854\r\nStep 1137, loss: 2.2622270584106445\r\nStep 1138, loss: 2.2624399662017822\r\nStep 1139, loss: 2.2535877227783203\r\nStep 1140, loss: 2.2609024047851562\r\nStep 1141, loss: 2.254535675048828\r\nStep 1142, loss: 2.2517387866973877\r\nStep 1143, loss: 2.2507941722869873\r\nStep 1144, loss: 2.2497096061706543\r\nStep 1145, loss: 2.2435216903686523\r\nStep 1146, loss: 2.2226362228393555\r\nStep 1147, loss: 2.236966848373413\r\nStep 1148, loss: 2.2328531742095947\r\nStep 1149, loss: 2.233680248260498\r\nStep 1150, loss: 2.2185568809509277\r\nStep 1151, loss: 2.277336835861206\r\nStep 1152, loss: 2.222127676010132\r\nStep 1153, loss: 2.2182562351226807\r\nStep 1154, loss: 2.2189831733703613\r\nStep 1155, loss: 2.2112057209014893\r\nStep 1156, loss: 2.218982458114624\r\nStep 1157, loss: 2.207132577896118\r\nStep 1158, loss: 2.204676628112793\r\nStep 1159, loss: 2.3155758380889893\r\nStep 1160, loss: 2.2093307971954346\r\nStep 1161, loss: 2.1955912113189697\r\nStep 1162, loss: 2.206028938293457\r\nStep 1163, loss: 2.2200088500976562\r\nStep 1164, loss: 2.203850746154785\r\nStep 1165, loss: 2.203896999359131\r\nStep 1166, loss: 2.198291540145874\r\nStep 1167, loss: 2.191648483276367\r\nStep 1168, loss: 2.1738104820251465\r\nStep 1169, loss: 2.200791835784912\r\nStep 1170, loss: 2.175122022628784\r\nStep 1171, loss: 2.171715497970581\r\nStep 1172, loss: 2.1692087650299072\r\nStep 1173, loss: 2.1736412048339844\r\nStep 1174, loss: 2.1525280475616455\r\nStep 1175, loss: 2.179588794708252\r\nStep 1176, loss: 2.1529009342193604\r\nStep 1177, loss: 2.156433582305908\r\nStep 1178, loss: 2.1528096199035645\r\nStep 1179, loss: 2.15285062789917\r\nStep 1180, loss: 2.1464555263519287\r\nStep 1181, loss: 2.1670961380004883\r\nStep 1182, loss: 2.21094012260437\r\nStep 1183, loss: 2.1532933712005615\r\n",,terminal_output +3308,9051326,"TERMINAL",0,0,"5\t ",,terminal_output +3309,9052364,"TERMINAL",0,0,"7\t ",,terminal_output +3310,9053447,"TERMINAL",0,0,"8\t ",,terminal_output +3311,9054443,"TERMINAL",0,0,"9\t ",,terminal_output +3312,9055489,"TERMINAL",0,0,"10\t ",,terminal_output +3313,9056403,"TERMINAL",0,0,"Step 1184, loss: 2.2514052391052246\r\nStep 1185, loss: 2.147404193878174\r\nStep 1186, loss: 2.1515114307403564\r\nStep 1187, loss: 2.1290485858917236\r\nStep 1188, loss: 2.1460909843444824\r\nStep 1189, loss: 2.140225887298584\r\nStep 1190, loss: 2.1221625804901123\r\nStep 1191, loss: 2.125298500061035\r\nStep 1192, loss: 2.129639148712158\r\nStep 1193, loss: 2.1247873306274414\r\nStep 1194, loss: 2.1112961769104004\r\nStep 1195, loss: 2.113997220993042\r\nStep 1196, loss: 2.1140453815460205\r\nStep 1197, loss: 2.117347002029419\r\nStep 1198, loss: 2.115858316421509\r\nStep 1199, loss: 2.1222898960113525\r\nStep 1200, loss: 2.109649658203125\r\nStep 1201, loss: 2.102611780166626\r\nStep 1202, loss: 2.1225717067718506\r\nStep 1203, loss: 2.0998215675354004\r\nStep 1204, loss: 2.0982844829559326\r\nStep 1205, loss: 2.1056876182556152\r\nStep 1206, loss: 2.0925838947296143\r\nStep 1207, loss: 2.0876612663269043\r\nStep 1208, loss: 2.091925859451294\r\nStep 1209, loss: 2.0659148693084717\r\nStep 1210, loss: 2.0733702182769775\r\nStep 1211, loss: 2.1821141242980957\r\nStep 1212, loss: 2.074366331100464\r\nStep 1213, loss: 2.0650079250335693\r\nStep 1214, loss: 2.069375991821289\r\nStep 1215, loss: 2.079472541809082\r\nStep 1216, loss: 2.0701870918273926\r\nStep 1217, loss: 2.06697940826416\r\nStep 1218, loss: 2.0540292263031006\r\nStep 1219, loss: 2.0589005947113037\r\nStep 1220, loss: 2.0649616718292236\r\nStep 1221, loss: 2.0437276363372803\r\nStep 1222, loss: 2.0482170581817627\r\nStep 1223, loss: 2.119849920272827\r\nStep 1224, loss: 2.049506664276123\r\nStep 1225, loss: 2.047489643096924\r\nStep 1226, loss: 2.044463634490967\r\nStep 1227, loss: 2.046253204345703\r\nStep 1228, loss: 2.031317949295044\r\nStep 1229, loss: 2.0302340984344482\r\nStep 1230, loss: 2.0289366245269775\r\nStep 1231, loss: 2.026320457458496\r\nStep 1232, loss: 2.0249838829040527\r\nStep 1233, loss: 2.025986671447754\r\nStep 1234, loss: 2.007997989654541\r\nStep 1235, loss: 2.029738187789917\r\nStep 1236, loss: 2.0218770503997803\r\nStep 1237, loss: 2.124213695526123\r\nStep 1238, loss: 2.021282434463501\r\nStep 1239, loss: 2.013103723526001\r\nStep 1240, loss: 2.0152406692504883\r\nStep 1241, loss: 2.0092873573303223\r\nStep 1242, loss: 2.0007762908935547\r\nStep 1243, loss: 1.9925522804260254\r\nStep 1244, loss: 2.0079152584075928\r\nStep 1245, loss: 1.9972655773162842\r\nStep 1246, loss: 1.9972015619277954\r\nStep 1247, loss: 2.008730411529541\r\nStep 1248, loss: 1.9950155019760132\r\nStep 1249, loss: 1.9845744371414185\r\nStep 1250, loss: 1.9956821203231812\r\nStep 1251, loss: 1.9749215841293335\r\nStep 1252, loss: 1.9830570220947266\r\nStep 1253, loss: 1.9774562120437622\r\nStep 1254, loss: 1.9599684476852417\r\nStep 1255, loss: 1.9761961698532104\r\nStep 1256, loss: 1.9614485502243042\r\nStep 1257, loss: 1.9683395624160767\r\nStep 1258, loss: 2.1874966621398926\r\nStep 1259, loss: 1.9649968147277832\r\nStep 1260, loss: 1.9574106931686401\r\nStep 1261, loss: 1.9695760011672974\r\nStep 1262, loss: 1.9560608863830566\r\nStep 1263, loss: 1.9517951011657715\r\nStep 1264, loss: 1.9498109817504883\r\nStep 1265, loss: 1.9482011795043945\r\nStep 1266, loss: 1.943386197090149\r\nStep 1267, loss: 1.9461164474487305\r\nStep 1268, loss: 1.9416450262069702\r\nStep 1269, loss: 2.005887031555176\r\nStep 1270, loss: 1.9338712692260742\r\nStep 1271, loss: 1.9459421634674072\r\nStep 1272, loss: 1.9193332195281982\r\nStep 1273, loss: 1.920301079750061\r\nStep 1274, loss: 1.9174983501434326\r\nStep 1275, loss: 1.9198366403579712\r\nStep 1276, loss: 1.9116724729537964\r\nStep 1277, loss: 1.9140080213546753\r\nStep 1278, loss: 1.907044529914856\r\nStep 1279, loss: 1.9155265092849731\r\nStep 1280, loss: 2.007279634475708\r\nStep 1281, loss: 1.9103505611419678\r\nStep 1282, loss: 1.8940449953079224\r\nStep 1283, loss: 1.9111343622207642\r\nStep 1284, loss: 1.8931865692138672\r\nStep 1285, loss: 1.905342936515808\r\nStep 1286, loss: 1.895726203918457\r\nStep 1287, loss: 1.9009449481964111\r\nStep 1288, loss: 1.890424370765686\r\nStep 1289, loss: 1.8810595273971558\r\nStep 1290, loss: 1.8850408792495728\r\nStep 1291, loss: 1.8803645372390747\r\nStep 1292, loss: 1.8774715662002563\r\nStep 1293, loss: 1.8799711465835571\r\nStep 1294, loss: 1.8985449075698853\r\nStep 1295, loss: 1.8698173761367798\r\nStep 1296, loss: 1.8767036199569702\r\nStep 1297, loss: 1.9764724969863892\r\nStep 1298, loss: 1.8609592914581299\r\nStep 1299, loss: 1.8631672859191895\r\nStep 1300, loss: 1.8785043954849243\r\nStep 1301, loss: 1.925824522972107\r\nStep 1302, loss: 1.8569217920303345\r\nStep 1303, loss: 1.8573517799377441\r\nStep 1304, loss: 1.8556374311447144\r\nStep 1305, loss: 1.8467161655426025\r\nStep 1306, loss: 1.8473272323608398\r\nStep 1307, loss: 1.8498902320861816\r\nStep 1308, loss: 1.8460050821304321\r\nStep 1309, loss: 1.847381591796875\r\nStep 1310, loss: 1.8385757207870483\r\nStep 1311, loss: 1.8457484245300293\r\nStep 1312, loss: 1.8370357751846313\r\nStep 1313, loss: 1.8352510929107666\r\nStep 1314, loss: 1.82010817527771\r\nStep 1315, loss: 1.8383429050445557\r\nStep 1316, loss: 1.8350903987884521\r\nStep 1317, loss: 1.8154176473617554\r\nStep 1318, loss: 1.9146051406860352\r\nStep 1319, loss: 1.8427915573120117\r\nStep 1320, loss: 1.8298358917236328\r\nStep 1321, loss: 1.8012243509292603\r\nStep 1322, loss: 1.8204755783081055\r\nStep 1323, loss: 1.8084367513656616\r\nStep 1324, loss: 1.7954695224761963\r\nStep 1325, loss: 1.7977830171585083\r\nStep 1326, loss: 1.8029056787490845\r\nStep 1327, loss: 1.8041409254074097\r\nStep 1328, loss: 1.7989355325698853\r\nStep 1329, loss: 1.8043099641799927\r\nStep 1330, loss: 1.7887426614761353\r\nStep 1331, loss: 1.7985695600509644\r\nStep 1332, loss: 1.7984368801116943\r\nStep 1333, loss: 1.7844665050506592\r\nStep 1334, loss: 1.7774420976638794\r\nStep 1335, loss: 1.779616355895996\r\nStep 1336, loss: 1.775593638420105\r\nStep 1337, loss: 1.7803224325180054\r\nStep 1338, loss: 1.7738412618637085\r\nStep 1339, loss: 1.8789796829223633\r\nStep 1340, loss: 1.7750718593597412\r\nStep 1341, loss: 1.772544264793396\r\nStep 1342, loss: 1.7678182125091553\r\nStep 1343, loss: 1.7600904703140259\r\nStep 1344, loss: 1.761574625968933\r\nStep 1345, loss: 1.7705440521240234\r\nStep 1346, loss: 1.8197379112243652\r\nStep 1347, loss: 1.7583271265029907\r\nStep 1348, loss: 1.7595548629760742\r\nStep 1349, loss: 1.740911841392517\r\nStep 1350, loss: 1.7404437065124512\r\nStep 1351, loss: 1.7632251977920532\r\nStep 1352, loss: 1.7320446968078613\r\nStep 1353, loss: 1.7447603940963745\r\nStep 1354, loss: 1.7305363416671753\r\nStep 1355, loss: 1.7363570928573608\r\nStep 1356, loss: 1.7361942529678345\r\nStep 1357, loss: 1.724202275276184\r\nStep 1358, loss: 1.7295283079147339\r\nStep 1359, loss: 1.7388250827789307\r\nStep 1360, loss: 1.9576064348220825\r\nStep 1361, loss: 1.7379661798477173\r\nStep 1362, loss: 1.7354979515075684\r\nStep 1363, loss: 1.7152379751205444\r\nStep 1364, loss: 1.703550100326538\r\nStep 1365, loss: 1.716672658920288\r\nStep 1366, loss: 1.6985822916030884\r\nStep 1367, loss: 1.8208657503128052\r\nStep 1368, loss: 1.706235647201538\r\nStep 1369, loss: 1.6950387954711914\r\nStep 1370, loss: 1.7006388902664185\r\nStep 1371, loss: 1.6974055767059326\r\nStep 1372, loss: 1.6985304355621338\r\nStep 1373, loss: 1.6945011615753174\r\nStep 1374, loss: 1.6912798881530762\r\nStep 1375, loss: 1.6981420516967773\r\nStep 1376, loss: 1.6931859254837036\r\nStep 1377, loss: 1.6817814111709595\r\nStep 1378, loss: 1.6780462265014648\r\nStep 1379, loss: 1.6835598945617676\r\nStep 1380, loss: 1.6775177717208862\r\nStep 1381, loss: 1.6888843774795532\r\nStep 1382, loss: 1.6742277145385742\r\nStep 1383, loss: 1.6638822555541992\r\nStep 1384, loss: 1.6693477630615234\r\nStep 1385, loss: 1.6563372611999512\r\nStep 1386, loss: 1.663394808769226\r\nStep 1387, loss: 1.7685822248458862\r\nStep 1388, loss: 1.6520717144012451\r\nStep 1389, loss: 1.6660429239273071\r\nStep 1390, loss: 1.6538387537002563\r\nStep 1391, loss: 1.655356764793396\r\nStep 1392, loss: 1.6537481546401978\r\nStep 1393, loss: 1.6541476249694824\r\nStep 1394, loss: 1.6573584079742432\r\nStep 1395, loss: 1.7131267786026\r\nStep 1396, loss: 1.6445809602737427\r\nStep 1397, loss: 1.6492037773132324\r\nStep 1398, loss: 1.6457405090332031\r\nStep 1399, loss: 1.6298296451568604\r\nStep 1400, loss: 1.642886757850647\r\nStep 1401, loss: 1.6311414241790771\r\nStep 1402, loss: 1.6331113576889038\r\nStep 1403, loss: 1.636167049407959\r\nStep 1404, loss: 1.6243436336517334\r\nStep 1405, loss: 1.6220959424972534\r\nStep 1406, loss: 1.6238009929656982\r\nStep 1407, loss: 1.6229277849197388\r\nStep 1408, loss: 1.7241841554641724\r\nStep 1409, loss: 1.6209965944290161\r\nStep 1410, loss: 1.6202497482299805\r\nStep 1411, loss: 1.6238616704940796\r\nStep 1412, loss: 1.6161155700683594\r\n",,terminal_output +3314,9056517,"TERMINAL",0,0,"1\t ",,terminal_output +3315,9057555,"TERMINAL",0,0,"2\t ",,terminal_output +3316,9058597,"TERMINAL",0,0,"3\t ",,terminal_output +3317,9059677,"TERMINAL",0,0,"4\t ",,terminal_output +3318,9060682,"TERMINAL",0,0,"5\t ",,terminal_output +3319,9061425,"TERMINAL",0,0,"Step 1413, loss: 1.6087722778320312\r\nStep 1414, loss: 1.6026263236999512\r\nStep 1415, loss: 1.6023073196411133\r\nStep 1416, loss: 1.6062771081924438\r\nStep 1417, loss: 1.598672866821289\r\nStep 1418, loss: 1.7310802936553955\r\nStep 1419, loss: 1.612272024154663\r\nStep 1420, loss: 1.5961713790893555\r\nStep 1421, loss: 1.5863245725631714\r\nStep 1422, loss: 1.6004047393798828\r\nStep 1423, loss: 1.591257095336914\r\nStep 1424, loss: 1.5884228944778442\r\nStep 1425, loss: 1.5790733098983765\r\nStep 1426, loss: 1.574141502380371\r\nStep 1427, loss: 1.5734047889709473\r\nStep 1428, loss: 1.574835181236267\r\nStep 1429, loss: 1.583267331123352\r\nStep 1430, loss: 1.6945130825042725\r\nStep 1431, loss: 1.5749009847640991\r\nStep 1432, loss: 1.5777760744094849\r\nStep 1433, loss: 1.5713144540786743\r\nStep 1434, loss: 1.5640742778778076\r\nStep 1435, loss: 1.5654590129852295\r\nStep 1436, loss: 1.5578243732452393\r\nStep 1437, loss: 1.542662501335144\r\nStep 1438, loss: 1.5535138845443726\r\nStep 1439, loss: 1.5589416027069092\r\nStep 1440, loss: 1.5524581670761108\r\nStep 1441, loss: 1.5460255146026611\r\nStep 1442, loss: 1.5374630689620972\r\nStep 1443, loss: 1.532065987586975\r\nStep 1444, loss: 1.5541841983795166\r\nStep 1445, loss: 1.6170743703842163\r\nStep 1446, loss: 1.6459671258926392\r\nStep 1447, loss: 1.5326168537139893\r\nStep 1448, loss: 1.5393948554992676\r\nStep 1449, loss: 1.5401291847229004\r\nStep 1450, loss: 1.535132884979248\r\nStep 1451, loss: 1.5218124389648438\r\nStep 1452, loss: 1.530403971672058\r\nStep 1453, loss: 1.529153823852539\r\nStep 1454, loss: 1.5178229808807373\r\nStep 1455, loss: 1.5303112268447876\r\nStep 1456, loss: 1.5316901206970215\r\nStep 1457, loss: 1.517547845840454\r\nStep 1458, loss: 1.5143694877624512\r\nStep 1459, loss: 1.5137922763824463\r\nStep 1460, loss: 1.5040327310562134\r\nStep 1461, loss: 1.5191434621810913\r\nStep 1462, loss: 1.5034468173980713\r\nStep 1463, loss: 1.5051584243774414\r\nStep 1464, loss: 1.5107675790786743\r\nStep 1465, loss: 1.6048098802566528\r\nStep 1466, loss: 1.4903076887130737\r\nStep 1467, loss: 1.5073039531707764\r\nStep 1468, loss: 1.4899312257766724\r\nStep 1469, loss: 1.5010255575180054\r\nStep 1470, loss: 1.4886444807052612\r\nStep 1471, loss: 1.4858546257019043\r\nStep 1472, loss: 1.4880692958831787\r\nStep 1473, loss: 1.4909329414367676\r\nStep 1474, loss: 1.4800207614898682\r\nStep 1475, loss: 1.491972804069519\r\nStep 1476, loss: 1.468887448310852\r\nStep 1477, loss: 1.4766310453414917\r\nStep 1478, loss: 1.4732328653335571\r\nStep 1479, loss: 1.473517656326294\r\nStep 1480, loss: 1.4727214574813843\r\nStep 1481, loss: 1.5573270320892334\r\nStep 1482, loss: 1.4519988298416138\r\nStep 1483, loss: 1.4561915397644043\r\nStep 1484, loss: 1.4576293230056763\r\nStep 1485, loss: 1.458225131034851\r\nStep 1486, loss: 1.431461215019226\r\nStep 1487, loss: 1.4518665075302124\r\nStep 1488, loss: 1.5197923183441162\r\nStep 1489, loss: 1.4439533948898315\r\nStep 1490, loss: 1.4444804191589355\r\nStep 1491, loss: 1.4452991485595703\r\nStep 1492, loss: 1.4471299648284912\r\nStep 1493, loss: 1.4499481916427612\r\nStep 1494, loss: 1.4476739168167114\r\nStep 1495, loss: 1.437790870666504\r\nStep 1496, loss: 1.4314683675765991\r\nStep 1497, loss: 1.4401185512542725\r\nStep 1498, loss: 1.5386220216751099\r\nStep 1499, loss: 1.4194409847259521\r\nStep 1500, loss: 1.430286169052124\r\nStep 1501, loss: 1.4212864637374878\r\nStep 1502, loss: 1.4255434274673462\r\nStep 1503, loss: 1.4163250923156738\r\nStep 1504, loss: 1.4189941883087158\r\nStep 1505, loss: 1.4191075563430786\r\nStep 1506, loss: 1.4198188781738281\r\nStep 1507, loss: 1.4087233543395996\r\nStep 1508, loss: 1.4143967628479004\r\nStep 1509, loss: 1.411943793296814\r\nStep 1510, loss: 1.4053165912628174\r\nStep 1511, loss: 1.4058213233947754\r\nStep 1512, loss: 1.401185154914856\r\nStep 1513, loss: 1.3930326700210571\r\nStep 1514, loss: 1.3995357751846313\r\nStep 1515, loss: 1.4017797708511353\r\nStep 1516, loss: 1.3985692262649536\r\nStep 1517, loss: 1.3948734998703003\r\nStep 1518, loss: 1.5002323389053345\r\nStep 1519, loss: 1.39754319190979\r\nStep 1520, loss: 1.3826816082000732\r\nStep 1521, loss: 1.3997584581375122\r\nStep 1522, loss: 1.3802518844604492\r\nStep 1523, loss: 1.3776522874832153\r\nStep 1524, loss: 1.3849002122879028\r\nStep 1525, loss: 1.382143259048462\r\nStep 1526, loss: 1.3733817338943481\r\nStep 1527, loss: 1.3814204931259155\r\nStep 1528, loss: 1.3813207149505615\r\nStep 1529, loss: 1.3624072074890137\r\nStep 1530, loss: 1.3689574003219604\r\nStep 1531, loss: 1.362549901008606\r\nStep 1532, loss: 1.3567017316818237\r\nStep 1533, loss: 1.4658576250076294\r\nStep 1534, loss: 1.36507248878479\r\nStep 1535, loss: 1.3603605031967163\r\nStep 1536, loss: 1.355003833770752\r\nStep 1537, loss: 1.3554668426513672\r\nStep 1538, loss: 1.3460973501205444\r\nStep 1539, loss: 1.3404535055160522\r\nStep 1540, loss: 1.3402961492538452\r\nStep 1541, loss: 1.3555190563201904\r\nStep 1542, loss: 1.4194215536117554\r\nStep 1543, loss: 1.3318098783493042\r\nStep 1544, loss: 1.3488194942474365\r\nStep 1545, loss: 1.3311128616333008\r\nStep 1546, loss: 1.3387782573699951\r\nStep 1547, loss: 1.332859992980957\r\nStep 1548, loss: 1.3430042266845703\r\nStep 1549, loss: 1.3436697721481323\r\nStep 1550, loss: 1.3270114660263062\r\nStep 1551, loss: 1.3200368881225586\r\nStep 1552, loss: 1.4296122789382935\r\nStep 1553, loss: 1.3234409093856812\r\nStep 1554, loss: 1.3141577243804932\r\nStep 1555, loss: 1.3103095293045044\r\nStep 1556, loss: 1.3106328248977661\r\nStep 1557, loss: 1.3190805912017822\r\nStep 1558, loss: 1.3069589138031006\r\nStep 1559, loss: 1.3118096590042114\r\nStep 1560, loss: 1.3010789155960083\r\nStep 1561, loss: 1.3145140409469604\r\nStep 1562, loss: 1.2916896343231201\r\nStep 1563, loss: 1.3053261041641235\r\nStep 1564, loss: 1.3066322803497314\r\nStep 1565, loss: 1.3060895204544067\r\nStep 1566, loss: 1.2932778596878052\r\nStep 1567, loss: 1.2862190008163452\r\nStep 1568, loss: 1.2841360569000244\r\nStep 1569, loss: 1.2925876379013062\r\nStep 1570, loss: 1.2841483354568481\r\nStep 1571, loss: 1.2947267293930054\r\nStep 1572, loss: 1.2864670753479004\r\nStep 1573, loss: 1.3850587606430054\r\nStep 1574, loss: 1.2943183183670044\r\nStep 1575, loss: 1.2886611223220825\r\nStep 1576, loss: 1.2827823162078857\r\nStep 1577, loss: 1.2831140756607056\r\nStep 1578, loss: 1.2647260427474976\r\nStep 1579, loss: 1.2725751399993896\r\nStep 1580, loss: 1.2677335739135742\r\nStep 1581, loss: 1.2706165313720703\r\nStep 1582, loss: 1.2657322883605957\r\nStep 1583, loss: 1.2562412023544312\r\nStep 1584, loss: 1.257921814918518\r\nStep 1585, loss: 1.26457941532135\r\nStep 1586, loss: 1.2576831579208374\r\nStep 1587, loss: 1.2457044124603271\r\nStep 1588, loss: 1.247615098953247\r\nStep 1589, loss: 1.3508491516113281\r\nStep 1590, loss: 1.3532425165176392\r\nStep 1591, loss: 1.2567756175994873\r\nStep 1592, loss: 1.2447798252105713\r\nStep 1593, loss: 1.2440370321273804\r\nStep 1594, loss: 1.2341337203979492\r\nStep 1595, loss: 1.2381149530410767\r\nStep 1596, loss: 1.2266567945480347\r\nStep 1597, loss: 1.2467401027679443\r\nStep 1598, loss: 1.2268397808074951\r\nStep 1599, loss: 1.2440836429595947\r\nStep 1600, loss: 1.2371234893798828\r\nStep 1601, loss: 1.2363048791885376\r\nStep 1602, loss: 1.2486282587051392\r\nStep 1603, loss: 1.2202116250991821\r\nStep 1604, loss: 1.2271302938461304\r\nStep 1605, loss: 1.2202308177947998\r\nStep 1606, loss: 1.2211642265319824\r\nStep 1607, loss: 1.2193094491958618\r\nStep 1608, loss: 1.2126164436340332\r\nStep 1609, loss: 1.2117056846618652\r\nStep 1610, loss: 1.2179136276245117\r\nStep 1611, loss: 1.2158995866775513\r\nStep 1612, loss: 1.209560751914978\r\nStep 1613, loss: 1.203174352645874\r\nStep 1614, loss: 1.1996241807937622\r\nStep 1615, loss: 1.199424147605896\r\nStep 1616, loss: 1.2108306884765625\r\nStep 1617, loss: 1.2969918251037598\r\nStep 1618, loss: 1.195237159729004\r\nStep 1619, loss: 1.199118971824646\r\nStep 1620, loss: 1.1971514225006104\r\nStep 1621, loss: 1.1957592964172363\r\nStep 1622, loss: 1.1836928129196167\r\nStep 1623, loss: 1.186079978942871\r\nStep 1624, loss: 1.1756891012191772\r\nStep 1625, loss: 1.1933389902114868\r\nStep 1626, loss: 1.1811907291412354\r\nStep 1627, loss: 1.177831768989563\r\nStep 1628, loss: 1.1701387166976929\r\nStep 1629, loss: 1.1908719539642334\r\nStep 1630, loss: 1.1843504905700684\r\nStep 1631, loss: 1.1708614826202393\r\nStep 1632, loss: 1.1714924573898315\r\nStep 1633, loss: 1.2788830995559692\r\nStep 1634, loss: 1.172526240348816\r\nStep 1635, loss: 1.168342113494873\r\nStep 1636, loss: 1.1739929914474487\r\nStep 1637, loss: 1.158413290977478\r\nStep 1638, loss: 1.1512421369552612\r\nStep 1639, loss: 1.1585488319396973\r\nStep 1640, loss: 1.1585663557052612\r\n",,terminal_output +3320,9061827,"TERMINAL",0,0,"6\t ",,terminal_output +3321,9062849,"TERMINAL",0,0,"7\t ",,terminal_output +3322,9063875,"TERMINAL",0,0,"8\t ",,terminal_output +3323,9064848,"TERMINAL",0,0,"9\t ",,terminal_output +3324,9065951,"TERMINAL",0,0,"20\t ",,terminal_output +3325,9066756,"TERMINAL",0,0,"Step 1641, loss: 1.1575082540512085\r\nStep 1642, loss: 1.1623197793960571\r\nStep 1643, loss: 1.1490757465362549\r\nStep 1644, loss: 1.145233392715454\r\nStep 1645, loss: 1.1396836042404175\r\nStep 1646, loss: 1.1457051038742065\r\nStep 1647, loss: 1.1572554111480713\r\nStep 1648, loss: 1.2759195566177368\r\nStep 1649, loss: 1.1453298330307007\r\nStep 1650, loss: 1.1437950134277344\r\nStep 1651, loss: 1.1348096132278442\r\nStep 1652, loss: 1.1407928466796875\r\nStep 1653, loss: 1.1393671035766602\r\nStep 1654, loss: 1.1337950229644775\r\nStep 1655, loss: 1.2426669597625732\r\nStep 1656, loss: 1.1291465759277344\r\nStep 1657, loss: 1.1153610944747925\r\nStep 1658, loss: 1.1227707862854004\r\nStep 1659, loss: 1.1255298852920532\r\nStep 1660, loss: 1.118461012840271\r\nStep 1661, loss: 1.1216777563095093\r\nStep 1662, loss: 1.1200631856918335\r\nStep 1663, loss: 1.118934154510498\r\nStep 1664, loss: 1.1120585203170776\r\nStep 1665, loss: 1.1082597970962524\r\nStep 1666, loss: 1.1189956665039062\r\nStep 1667, loss: 1.1025173664093018\r\nStep 1668, loss: 1.0999367237091064\r\nStep 1669, loss: 1.2114607095718384\r\nStep 1670, loss: 1.1073620319366455\r\nStep 1671, loss: 1.1064236164093018\r\nStep 1672, loss: 1.1020593643188477\r\nStep 1673, loss: 1.0943208932876587\r\nStep 1674, loss: 1.0835918188095093\r\nStep 1675, loss: 1.0953891277313232\r\nStep 1676, loss: 1.0933635234832764\r\nStep 1677, loss: 1.0920809507369995\r\nStep 1678, loss: 1.0938409566879272\r\nStep 1679, loss: 1.1751666069030762\r\nStep 1680, loss: 1.0918182134628296\r\nStep 1681, loss: 1.0905475616455078\r\nStep 1682, loss: 1.1852188110351562\r\nStep 1683, loss: 1.073499321937561\r\nStep 1684, loss: 1.0783085823059082\r\nStep 1685, loss: 1.0718263387680054\r\nStep 1686, loss: 1.0742924213409424\r\nStep 1687, loss: 1.0737119913101196\r\nStep 1688, loss: 1.0668221712112427\r\nStep 1689, loss: 1.0683817863464355\r\nStep 1690, loss: 1.0658117532730103\r\nStep 1691, loss: 1.0660737752914429\r\nStep 1692, loss: 1.0616272687911987\r\nStep 1693, loss: 1.067017674446106\r\nStep 1694, loss: 1.057036280632019\r\nStep 1695, loss: 1.0613422393798828\r\nStep 1696, loss: 1.066922903060913\r\nStep 1697, loss: 1.0565149784088135\r\nStep 1698, loss: 1.1640924215316772\r\nStep 1699, loss: 1.047710657119751\r\nStep 1700, loss: 1.0517164468765259\r\nStep 1701, loss: 1.054610013961792\r\nStep 1702, loss: 1.0416030883789062\r\nStep 1703, loss: 1.0418587923049927\r\nStep 1704, loss: 1.0473809242248535\r\nStep 1705, loss: 1.0443155765533447\r\nStep 1706, loss: 1.0442721843719482\r\nStep 1707, loss: 1.0429277420043945\r\nStep 1708, loss: 1.0399023294448853\r\nStep 1709, loss: 1.038165807723999\r\nStep 1710, loss: 1.0345982313156128\r\nStep 1711, loss: 1.0352866649627686\r\nStep 1712, loss: 1.0346559286117554\r\nStep 1713, loss: 1.2773017883300781\r\nStep 1714, loss: 1.045367956161499\r\nStep 1715, loss: 1.0411508083343506\r\nStep 1716, loss: 1.0367709398269653\r\nStep 1717, loss: 1.0227259397506714\r\nStep 1718, loss: 1.0185867547988892\r\nStep 1719, loss: 1.014642596244812\r\nStep 1720, loss: 1.0197434425354004\r\nStep 1721, loss: 1.026314377784729\r\nStep 1722, loss: 1.0181000232696533\r\nStep 1723, loss: 1.1157170534133911\r\nStep 1724, loss: 1.0141574144363403\r\nStep 1725, loss: 1.0174330472946167\r\nStep 1726, loss: 1.0138992071151733\r\nStep 1727, loss: 0.9918619394302368\r\nStep 1728, loss: 1.0107418298721313\r\nStep 1729, loss: 0.9952065944671631\r\nStep 1730, loss: 1.0064640045166016\r\nStep 1731, loss: 1.0054000616073608\r\nStep 1732, loss: 0.9933749437332153\r\nStep 1733, loss: 1.0018892288208008\r\nStep 1734, loss: 0.9894110560417175\r\nStep 1735, loss: 0.9878826141357422\r\nStep 1736, loss: 1.0006074905395508\r\nStep 1737, loss: 0.9889177083969116\r\nStep 1738, loss: 1.0929206609725952\r\nStep 1739, loss: 0.9878960847854614\r\nStep 1740, loss: 0.9838866591453552\r\nStep 1741, loss: 0.9833965301513672\r\nStep 1742, loss: 0.9826670289039612\r\nStep 1743, loss: 0.9810364842414856\r\nStep 1744, loss: 0.9878240823745728\r\nStep 1745, loss: 0.9814496636390686\r\nStep 1746, loss: 0.9778909683227539\r\nStep 1747, loss: 0.9853712916374207\r\nStep 1748, loss: 0.9782893657684326\r\nStep 1749, loss: 0.9824138879776001\r\nStep 1750, loss: 0.9626322388648987\r\nStep 1751, loss: 1.063523530960083\r\nStep 1752, loss: 0.9761264324188232\r\nStep 1753, loss: 0.9648455381393433\r\nStep 1754, loss: 0.9596285820007324\r\nStep 1755, loss: 0.9551953077316284\r\nStep 1756, loss: 0.9639601111412048\r\nStep 1757, loss: 0.9567497968673706\r\nStep 1758, loss: 1.1254595518112183\r\nStep 1759, loss: 0.9770886898040771\r\nStep 1760, loss: 0.9580786228179932\r\nStep 1761, loss: 0.9613488912582397\r\nStep 1762, loss: 0.9647582769393921\r\nStep 1763, loss: 0.9487085342407227\r\nStep 1764, loss: 0.9428144693374634\r\nStep 1765, loss: 0.9568584561347961\r\nStep 1766, loss: 0.952717661857605\r\nStep 1767, loss: 0.9403550624847412\r\nStep 1768, loss: 1.0372955799102783\r\nStep 1769, loss: 0.9473881721496582\r\nStep 1770, loss: 0.9364010095596313\r\nStep 1771, loss: 0.9271436929702759\r\nStep 1772, loss: 0.9434659481048584\r\nStep 1773, loss: 0.9356627464294434\r\nStep 1774, loss: 0.9386414885520935\r\nStep 1775, loss: 0.9320327043533325\r\nStep 1776, loss: 0.9249137043952942\r\nStep 1777, loss: 0.9340690970420837\r\nStep 1778, loss: 0.9273337721824646\r\nStep 1779, loss: 0.9277576208114624\r\nStep 1780, loss: 0.9223271608352661\r\nStep 1781, loss: 0.9189529418945312\r\nStep 1782, loss: 0.915803074836731\r\nStep 1783, loss: 1.028090238571167\r\nStep 1784, loss: 0.9180362820625305\r\nStep 1785, loss: 0.9141557216644287\r\nStep 1786, loss: 0.9120405912399292\r\nStep 1787, loss: 0.9194408655166626\r\nStep 1788, loss: 0.9220506548881531\r\nStep 1789, loss: 0.8990026116371155\r\nStep 1790, loss: 0.9154289960861206\r\nStep 1791, loss: 0.9112024903297424\r\nStep 1792, loss: 0.9121266603469849\r\nStep 1793, loss: 0.8971863985061646\r\nStep 1794, loss: 0.8919577598571777\r\nStep 1795, loss: 0.8868368864059448\r\nStep 1796, loss: 0.9041306972503662\r\nStep 1797, loss: 0.9064286947250366\r\nStep 1798, loss: 0.8876680135726929\r\nStep 1799, loss: 0.9016667604446411\r\nStep 1800, loss: 0.8877162933349609\r\nStep 1801, loss: 0.9951990246772766\r\nStep 1802, loss: 0.8920632600784302\r\nStep 1803, loss: 0.8897810578346252\r\nStep 1804, loss: 0.8883768916130066\r\nStep 1805, loss: 0.8987430334091187\r\nStep 1806, loss: 0.8828654885292053\r\nStep 1807, loss: 0.8838382363319397\r\nStep 1808, loss: 0.8885656595230103\r\nStep 1809, loss: 0.8862701058387756\r\nStep 1810, loss: 0.8820780515670776\r\nStep 1811, loss: 0.8712248206138611\r\nStep 1812, loss: 0.8799262642860413\r\nStep 1813, loss: 0.8782516717910767\r\nStep 1814, loss: 0.9712745547294617\r\nStep 1815, loss: 0.8668939471244812\r\nStep 1816, loss: 0.8768219351768494\r\nStep 1817, loss: 0.876340389251709\r\nStep 1818, loss: 0.876940131187439\r\nStep 1819, loss: 0.8597384095191956\r\nStep 1820, loss: 0.8755766153335571\r\nStep 1821, loss: 0.8613888621330261\r\nStep 1822, loss: 0.8674113750457764\r\nStep 1823, loss: 0.8636924028396606\r\nStep 1824, loss: 0.8523051738739014\r\nStep 1825, loss: 0.8587470054626465\r\nStep 1826, loss: 0.8656541705131531\r\nStep 1827, loss: 0.8688967227935791\r\nStep 1828, loss: 0.8599430918693542\r\nStep 1829, loss: 0.8434479832649231\r\nStep 1830, loss: 0.8520375490188599\r\nStep 1831, loss: 0.9511395692825317\r\nStep 1832, loss: 0.8548341989517212\r\nStep 1833, loss: 0.8453274369239807\r\nStep 1834, loss: 0.8406016230583191\r\nStep 1835, loss: 0.8386405110359192\r\nStep 1836, loss: 0.8389255404472351\r\nStep 1837, loss: 0.8428724408149719\r\nStep 1838, loss: 0.8377772569656372\r\nStep 1839, loss: 0.8427668809890747\r\nStep 1840, loss: 0.8336983919143677\r\nStep 1841, loss: 0.9654953479766846\r\nStep 1842, loss: 0.8485478162765503\r\nStep 1843, loss: 0.8341248631477356\r\nStep 1844, loss: 0.8480097651481628\r\nStep 1845, loss: 0.837020993232727\r\nStep 1846, loss: 0.8379396796226501\r\nStep 1847, loss: 0.8319339156150818\r\nStep 1848, loss: 0.9217866063117981\r\nStep 1849, loss: 0.8317819237709045\r\nStep 1850, loss: 0.8330875635147095\r\nStep 1851, loss: 0.832604169845581\r\nStep 1852, loss: 0.8178485035896301\r\nStep 1853, loss: 0.8140307068824768\r\nStep 1854, loss: 0.8318454027175903\r\nStep 1855, loss: 0.8156373500823975\r\nStep 1856, loss: 0.8187299966812134\r\nStep 1857, loss: 0.8146570324897766\r\nStep 1858, loss: 0.8142596483230591\r\nStep 1859, loss: 0.803876519203186\r\nStep 1860, loss: 0.9086700677871704\r\nStep 1861, loss: 0.8158981204032898\r\nStep 1862, loss: 0.8103066682815552\r\nStep 1863, loss: 0.7962865233421326\r\nStep 1864, loss: 0.801090657711029\r\nStep 1865, loss: 0.7913706302642822\r\nStep 1866, loss: 0.7950624823570251\r\nStep 1867, loss: 0.7908596992492676\r\nStep 1868, loss: 0.7982431054115295\r\n",,terminal_output +3326,9066931,"TERMINAL",0,0,"1\t ",,terminal_output +3327,9067957,"TERMINAL",0,0,"2\t ",,terminal_output +3328,9069093,"TERMINAL",0,0,"3\t ",,terminal_output +3329,9070041,"TERMINAL",0,0,"4\t ",,terminal_output +3330,9071141,"TERMINAL",0,0,"5\t ",,terminal_output +3331,9071657,"TERMINAL",0,0,"Step 1869, loss: 0.8013994097709656\r\nStep 1870, loss: 0.7943992018699646\r\nStep 1871, loss: 0.7964866161346436\r\nStep 1872, loss: 0.7887362837791443\r\nStep 1873, loss: 0.7904248833656311\r\nStep 1874, loss: 0.8990494012832642\r\nStep 1875, loss: 0.7985700964927673\r\nStep 1876, loss: 0.7889529466629028\r\nStep 1877, loss: 0.7965757846832275\r\nStep 1878, loss: 0.7859654426574707\r\nStep 1879, loss: 0.7801169157028198\r\nStep 1880, loss: 0.775775134563446\r\nStep 1881, loss: 0.7798635363578796\r\nStep 1882, loss: 0.7717139720916748\r\nStep 1883, loss: 0.78225177526474\r\nStep 1884, loss: 0.7686631679534912\r\nStep 1885, loss: 0.7760396599769592\r\nStep 1886, loss: 0.7769378423690796\r\nStep 1887, loss: 0.7714008688926697\r\nStep 1888, loss: 0.7722853422164917\r\nStep 1889, loss: 0.8671911954879761\r\nStep 1890, loss: 0.7611005306243896\r\nStep 1891, loss: 0.7675574421882629\r\nStep 1892, loss: 0.7573772668838501\r\nStep 1893, loss: 0.7656370401382446\r\nStep 1894, loss: 0.7487317323684692\r\nStep 1895, loss: 0.7542077898979187\r\nStep 1896, loss: 0.7497702836990356\r\nStep 1897, loss: 0.7576917409896851\r\nStep 1898, loss: 0.7604444622993469\r\nStep 1899, loss: 0.7518671154975891\r\nStep 1900, loss: 0.7523936629295349\r\nStep 1901, loss: 0.7576996684074402\r\nStep 1902, loss: 0.7424939870834351\r\nStep 1903, loss: 0.8545480370521545\r\nStep 1904, loss: 0.7409409284591675\r\nStep 1905, loss: 0.7449516654014587\r\nStep 1906, loss: 0.7388997077941895\r\nStep 1907, loss: 0.7408303618431091\r\nStep 1908, loss: 0.7415260076522827\r\nStep 1909, loss: 0.7380179762840271\r\nStep 1910, loss: 0.7374407649040222\r\nStep 1911, loss: 0.7323340177536011\r\nStep 1912, loss: 0.7414637804031372\r\nStep 1913, loss: 0.7349995970726013\r\nStep 1914, loss: 0.7430143356323242\r\nStep 1915, loss: 0.7260279059410095\r\nStep 1916, loss: 0.7375624179840088\r\nStep 1917, loss: 0.7269505262374878\r\nStep 1918, loss: 0.7256373763084412\r\nStep 1919, loss: 0.7313711047172546\r\nStep 1920, loss: 0.9093896746635437\r\nStep 1921, loss: 0.7346041202545166\r\nStep 1922, loss: 0.7225201725959778\r\nStep 1923, loss: 0.7107158303260803\r\nStep 1924, loss: 0.7150481939315796\r\nStep 1925, loss: 0.7252671718597412\r\nStep 1926, loss: 0.7118439674377441\r\nStep 1927, loss: 0.7165244221687317\r\nStep 1928, loss: 0.7088948488235474\r\nStep 1929, loss: 0.72013920545578\r\nStep 1930, loss: 0.7062170505523682\r\nStep 1931, loss: 0.7096347212791443\r\nStep 1932, loss: 0.7208948731422424\r\nStep 1933, loss: 0.7157233953475952\r\nStep 1934, loss: 0.6992756128311157\r\nStep 1935, loss: 0.7098996043205261\r\nStep 1936, loss: 0.8454945683479309\r\nStep 1937, loss: 0.7339466214179993\r\nStep 1938, loss: 0.7015736103057861\r\nStep 1939, loss: 0.8106749057769775\r\nStep 1940, loss: 0.7038298845291138\r\nStep 1941, loss: 0.7139207124710083\r\nStep 1942, loss: 0.7084845304489136\r\nStep 1943, loss: 0.6959096193313599\r\nStep 1944, loss: 0.7009268999099731\r\nStep 1945, loss: 0.6987021565437317\r\nStep 1946, loss: 0.6886452436447144\r\nStep 1947, loss: 0.7015690803527832\r\nStep 1948, loss: 0.6846963167190552\r\nStep 1949, loss: 0.6853471994400024\r\nStep 1950, loss: 0.6912521123886108\r\nStep 1951, loss: 0.68869948387146\r\nStep 1952, loss: 0.6884112358093262\r\nStep 1953, loss: 0.6754599809646606\r\nStep 1954, loss: 0.6828229427337646\r\nStep 1955, loss: 0.6892924308776855\r\nStep 1956, loss: 0.7855614423751831\r\nStep 1957, loss: 0.6837025284767151\r\nStep 1958, loss: 0.6705043911933899\r\nStep 1959, loss: 0.6884928941726685\r\nStep 1960, loss: 0.6750643253326416\r\nStep 1961, loss: 0.6799752712249756\r\nStep 1962, loss: 0.6649171710014343\r\nStep 1963, loss: 0.6792445778846741\r\nStep 1964, loss: 0.6725846529006958\r\nStep 1965, loss: 0.6655964255332947\r\nStep 1966, loss: 0.670922040939331\r\nStep 1967, loss: 0.673560619354248\r\nStep 1968, loss: 0.6575588583946228\r\nStep 1969, loss: 0.6685384511947632\r\nStep 1970, loss: 0.6653204560279846\r\nStep 1971, loss: 0.6696574687957764\r\nStep 1972, loss: 0.7669012546539307\r\nStep 1973, loss: 0.6599910259246826\r\nStep 1974, loss: 0.6570128798484802\r\nStep 1975, loss: 0.6516786217689514\r\nStep 1976, loss: 0.6540130376815796\r\nStep 1977, loss: 0.6510182619094849\r\nStep 1978, loss: 0.6531619429588318\r\nStep 1979, loss: 0.6556141376495361\r\nStep 1980, loss: 0.6483604311943054\r\nStep 1981, loss: 0.6505267024040222\r\nStep 1982, loss: 0.6507347226142883\r\nStep 1983, loss: 0.6492764353752136\r\nStep 1984, loss: 0.6426064372062683\r\nStep 1985, loss: 0.6391268372535706\r\nStep 1986, loss: 0.6384880542755127\r\nStep 1987, loss: 0.636154055595398\r\nStep 1988, loss: 0.7327672243118286\r\nStep 1989, loss: 0.6370413899421692\r\nStep 1990, loss: 0.6439313888549805\r\nStep 1991, loss: 0.6394111514091492\r\nStep 1992, loss: 0.6375075578689575\r\nStep 1993, loss: 0.6265296936035156\r\nStep 1994, loss: 0.6309429407119751\r\nStep 1995, loss: 0.6326918005943298\r\nStep 1996, loss: 0.6395416259765625\r\nStep 1997, loss: 0.6266369819641113\r\nStep 1998, loss: 0.6305047869682312\r\nStep 1999, loss: 0.6290154457092285\r\nSaved checkpoint at step 2000\r\nStep 2000, loss: 0.6319228410720825\r\nStep 2001, loss: 0.6194838285446167\r\nStep 2002, loss: 0.6199934482574463\r\nStep 2003, loss: 0.6252915263175964\r\nStep 2004, loss: 0.7864132523536682\r\nStep 2005, loss: 0.6273344159126282\r\nStep 2006, loss: 0.6178197264671326\r\nStep 2007, loss: 0.6311671137809753\r\nStep 2008, loss: 0.6205786466598511\r\nStep 2009, loss: 0.6215342879295349\r\nStep 2010, loss: 0.6068145036697388\r\nStep 2011, loss: 0.6172385811805725\r\nStep 2012, loss: 0.6100578308105469\r\nStep 2013, loss: 0.6121529936790466\r\nStep 2014, loss: 0.6144748330116272\r\nStep 2015, loss: 0.6064121723175049\r\nStep 2016, loss: 0.6134046912193298\r\nStep 2017, loss: 0.6149330139160156\r\nStep 2018, loss: 0.6091203689575195\r\nStep 2019, loss: 0.6103823184967041\r\nStep 2020, loss: 0.61045902967453\r\nStep 2021, loss: 0.8372605443000793\r\nStep 2022, loss: 0.6210668087005615\r\nStep 2023, loss: 0.6092928647994995\r\nStep 2024, loss: 0.6066732406616211\r\nStep 2025, loss: 0.5997593402862549\r\nStep 2026, loss: 0.6039242744445801\r\nStep 2027, loss: 0.5953725576400757\r\nStep 2028, loss: 0.6040375828742981\r\nStep 2029, loss: 0.6001499891281128\r\nStep 2030, loss: 0.5930587649345398\r\nStep 2031, loss: 0.5993248820304871\r\nStep 2032, loss: 0.5977038741111755\r\nStep 2033, loss: 0.6974673271179199\r\nStep 2034, loss: 0.5890640616416931\r\nStep 2035, loss: 0.5961368680000305\r\nStep 2036, loss: 0.5953733921051025\r\nStep 2037, loss: 0.585129976272583\r\nStep 2038, loss: 0.5957796573638916\r\nStep 2039, loss: 0.5824058055877686\r\nStep 2040, loss: 0.5829987525939941\r\nStep 2041, loss: 0.5853310227394104\r\nStep 2042, loss: 0.5873395800590515\r\nStep 2043, loss: 0.5758510231971741\r\nStep 2044, loss: 0.5773819088935852\r\nStep 2045, loss: 0.5731393098831177\r\nStep 2046, loss: 0.6647065281867981\r\nStep 2047, loss: 0.5828980803489685\r\nStep 2048, loss: 0.5727710127830505\r\nStep 2049, loss: 0.5759400725364685\r\nStep 2050, loss: 0.5705195069313049\r\nStep 2051, loss: 0.5682947039604187\r\nStep 2052, loss: 0.5711899399757385\r\nStep 2053, loss: 0.5765246152877808\r\nStep 2054, loss: 0.5740693807601929\r\nStep 2055, loss: 0.5681224465370178\r\nStep 2056, loss: 0.5632190108299255\r\nStep 2057, loss: 0.5696794986724854\r\nStep 2058, loss: 0.5608694553375244\r\nStep 2059, loss: 0.5680816173553467\r\nStep 2060, loss: 0.6688662767410278\r\nStep 2061, loss: 0.5686497092247009\r\nStep 2062, loss: 0.5587794780731201\r\nStep 2063, loss: 0.5658949017524719\r\nStep 2064, loss: 0.5724101066589355\r\nStep 2065, loss: 0.5634804964065552\r\nStep 2066, loss: 0.556942880153656\r\nStep 2067, loss: 0.5587777495384216\r\nStep 2068, loss: 0.5545395016670227\r\nStep 2069, loss: 0.5536628365516663\r\nStep 2070, loss: 0.5527036786079407\r\nStep 2071, loss: 0.5559666752815247\r\nStep 2072, loss: 0.5528866052627563\r\nStep 2073, loss: 0.5499931573867798\r\nStep 2074, loss: 0.5496010780334473\r\nStep 2075, loss: 0.5506083965301514\r\nStep 2076, loss: 0.6515548229217529\r\nStep 2077, loss: 0.546619176864624\r\nStep 2078, loss: 0.5433439016342163\r\nStep 2079, loss: 0.530807375907898\r\nStep 2080, loss: 0.5434479713439941\r\nStep 2081, loss: 0.5402997136116028\r\nStep 2082, loss: 0.5515939593315125\r\nStep 2083, loss: 0.5477591156959534\r\nStep 2084, loss: 0.5466631650924683\r\nStep 2085, loss: 0.5343933701515198\r\nStep 2086, loss: 0.5378848910331726\r\nStep 2087, loss: 0.5318010449409485\r\nStep 2088, loss: 0.5338581204414368\r\nStep 2089, loss: 0.5445492267608643\r\nStep 2090, loss: 0.5263606309890747\r\nStep 2091, loss: 0.5245886445045471\r\nStep 2092, loss: 0.6188791990280151\r\nStep 2093, loss: 0.5256460905075073\r\nStep 2094, loss: 0.5209018588066101\r\nStep 2095, loss: 0.5272752642631531\r\n",,terminal_output +3332,9072123,"TERMINAL",0,0,"6\t ",,terminal_output +3333,9073164,"TERMINAL",0,0,"7\t ",,terminal_output +3334,9074217,"TERMINAL",0,0,"8\t ",,terminal_output +3335,9075342,"TERMINAL",0,0,"9\t ",,terminal_output +3336,9076274,"TERMINAL",0,0,"30\t ",,terminal_output +3337,9077083,"TERMINAL",0,0,"Step 2096, loss: 0.5212636590003967\r\nStep 2097, loss: 0.5278089046478271\r\nStep 2098, loss: 0.5258370041847229\r\nStep 2099, loss: 0.5277998447418213\r\nStep 2100, loss: 0.6488233208656311\r\nStep 2101, loss: 0.5364575982093811\r\nStep 2102, loss: 0.5376482009887695\r\nStep 2103, loss: 0.5207223892211914\r\nStep 2104, loss: 0.5308751463890076\r\nStep 2105, loss: 0.5147538185119629\r\nStep 2106, loss: 0.5260673761367798\r\nStep 2107, loss: 0.5147553086280823\r\nStep 2108, loss: 0.5168204307556152\r\nStep 2109, loss: 0.5083422660827637\r\nStep 2110, loss: 0.5098488330841064\r\nStep 2111, loss: 0.5158275365829468\r\nStep 2112, loss: 0.5113270282745361\r\nStep 2113, loss: 0.6170437335968018\r\nStep 2114, loss: 0.5087074637413025\r\nStep 2115, loss: 0.5100061893463135\r\nStep 2116, loss: 0.515216052532196\r\nStep 2117, loss: 0.5012286305427551\r\nStep 2118, loss: 0.5050649046897888\r\nStep 2119, loss: 0.5078508257865906\r\nStep 2120, loss: 0.502376914024353\r\nStep 2121, loss: 0.5077912211418152\r\nStep 2122, loss: 0.5032449960708618\r\nStep 2123, loss: 0.49574849009513855\r\nStep 2124, loss: 0.4862176477909088\r\nStep 2125, loss: 0.5957979559898376\r\nStep 2126, loss: 0.5001740455627441\r\nStep 2127, loss: 0.49359288811683655\r\nStep 2128, loss: 0.49512845277786255\r\nStep 2129, loss: 0.49303925037384033\r\nStep 2130, loss: 0.49276262521743774\r\nStep 2131, loss: 0.4916940927505493\r\nStep 2132, loss: 0.4868279993534088\r\nStep 2133, loss: 0.48150238394737244\r\nStep 2134, loss: 0.4891718626022339\r\nStep 2135, loss: 0.4835034906864166\r\nStep 2136, loss: 0.4893358051776886\r\nStep 2137, loss: 0.47884175181388855\r\nStep 2138, loss: 0.4791523814201355\r\nStep 2139, loss: 0.4819422662258148\r\nStep 2140, loss: 0.5761228799819946\r\nStep 2141, loss: 0.47981172800064087\r\nStep 2142, loss: 0.4805384576320648\r\nStep 2143, loss: 0.47608256340026855\r\nStep 2144, loss: 0.47663164138793945\r\nStep 2145, loss: 0.47635191679000854\r\nStep 2146, loss: 0.4741176962852478\r\nStep 2147, loss: 0.471931517124176\r\nStep 2148, loss: 0.47936582565307617\r\nStep 2149, loss: 0.4796225428581238\r\nStep 2150, loss: 0.4803331792354584\r\nStep 2151, loss: 0.46910783648490906\r\nStep 2152, loss: 0.47940436005592346\r\nStep 2153, loss: 0.4673788547515869\r\nStep 2154, loss: 0.468284010887146\r\nStep 2155, loss: 0.46488890051841736\r\nStep 2156, loss: 0.4716155230998993\r\nStep 2157, loss: 0.5599209666252136\r\nStep 2158, loss: 0.48053115606307983\r\nStep 2159, loss: 0.46765685081481934\r\nStep 2160, loss: 0.4609626233577728\r\nStep 2161, loss: 0.4626825749874115\r\nStep 2162, loss: 0.4679463803768158\r\nStep 2163, loss: 0.45684894919395447\r\nStep 2164, loss: 0.4565001428127289\r\nStep 2165, loss: 0.46116912364959717\r\nStep 2166, loss: 0.4547851085662842\r\nStep 2167, loss: 0.4591623544692993\r\nStep 2168, loss: 0.44205763936042786\r\nStep 2169, loss: 0.46447935700416565\r\nStep 2170, loss: 0.45087510347366333\r\nStep 2171, loss: 0.4438648819923401\r\nStep 2172, loss: 0.4573356807231903\r\nStep 2173, loss: 0.6683803796768188\r\nStep 2174, loss: 0.4656687080860138\r\nStep 2175, loss: 0.4606297016143799\r\nStep 2176, loss: 0.45375940203666687\r\nStep 2177, loss: 0.44863179326057434\r\nStep 2178, loss: 0.45998552441596985\r\nStep 2179, loss: 0.4552098512649536\r\nStep 2180, loss: 0.45171457529067993\r\nStep 2181, loss: 0.4566428065299988\r\nStep 2182, loss: 0.4382454454898834\r\nStep 2183, loss: 0.43773987889289856\r\nStep 2184, loss: 0.4449123740196228\r\nStep 2185, loss: 0.44087713956832886\r\nStep 2186, loss: 0.4387533962726593\r\nStep 2187, loss: 0.5313928127288818\r\nStep 2188, loss: 0.4321961998939514\r\nStep 2189, loss: 0.4368981719017029\r\nStep 2190, loss: 0.43014538288116455\r\nStep 2191, loss: 0.4281185567378998\r\nStep 2192, loss: 0.43787682056427\r\nStep 2193, loss: 0.4271772801876068\r\nStep 2194, loss: 0.4357897639274597\r\nStep 2195, loss: 0.43802332878112793\r\nStep 2196, loss: 0.43183228373527527\r\nStep 2197, loss: 0.43118926882743835\r\nStep 2198, loss: 0.4292874336242676\r\nStep 2199, loss: 0.4392535090446472\r\nStep 2200, loss: 0.4270024299621582\r\nStep 2201, loss: 0.43025389313697815\r\nStep 2202, loss: 0.43268051743507385\r\nStep 2203, loss: 0.5129120945930481\r\nStep 2204, loss: 0.4270171821117401\r\nStep 2205, loss: 0.4192140996456146\r\nStep 2206, loss: 0.41680800914764404\r\nStep 2207, loss: 0.4158876836299896\r\nStep 2208, loss: 0.4207972586154938\r\nStep 2209, loss: 0.4207616448402405\r\nStep 2210, loss: 0.4151919484138489\r\nStep 2211, loss: 0.4171488285064697\r\nStep 2212, loss: 0.4194885194301605\r\nStep 2213, loss: 0.4129771590232849\r\nStep 2214, loss: 0.4190998673439026\r\nStep 2215, loss: 0.41907957196235657\r\nStep 2216, loss: 0.41032934188842773\r\nStep 2217, loss: 0.4204437732696533\r\nStep 2218, loss: 0.40859562158584595\r\nStep 2219, loss: 0.4980238974094391\r\nStep 2220, loss: 0.41910886764526367\r\nStep 2221, loss: 0.41219618916511536\r\nStep 2222, loss: 0.4117208421230316\r\nStep 2223, loss: 0.41333794593811035\r\nStep 2224, loss: 0.4129675626754761\r\nStep 2225, loss: 0.40091776847839355\r\nStep 2226, loss: 0.41056257486343384\r\nStep 2227, loss: 0.4141392111778259\r\nStep 2228, loss: 0.4045460522174835\r\nStep 2229, loss: 0.39880332350730896\r\nStep 2230, loss: 0.4045920670032501\r\nStep 2231, loss: 0.4065803289413452\r\nStep 2232, loss: 0.4055696427822113\r\nStep 2233, loss: 0.40113529562950134\r\nStep 2234, loss: 0.4059527814388275\r\nStep 2235, loss: 0.4052909314632416\r\nStep 2236, loss: 0.39830639958381653\r\nStep 2237, loss: 0.49375131726264954\r\nStep 2238, loss: 0.4010924994945526\r\nStep 2239, loss: 0.39894533157348633\r\nStep 2240, loss: 0.39669832587242126\r\nStep 2241, loss: 0.3950706720352173\r\nStep 2242, loss: 0.40025651454925537\r\nStep 2243, loss: 0.39517953991889954\r\nStep 2244, loss: 0.39792153239250183\r\nStep 2245, loss: 0.39419618248939514\r\nStep 2246, loss: 0.3843322694301605\r\nStep 2247, loss: 0.3885559141635895\r\nStep 2248, loss: 0.38453638553619385\r\nStep 2249, loss: 0.38751882314682007\r\nStep 2250, loss: 0.3934539258480072\r\nStep 2251, loss: 0.38690078258514404\r\nStep 2252, loss: 0.3984197676181793\r\nStep 2253, loss: 0.47288036346435547\r\nStep 2254, loss: 0.3879437744617462\r\nStep 2255, loss: 0.38433852791786194\r\nStep 2256, loss: 0.3830108344554901\r\nStep 2257, loss: 0.3923860192298889\r\nStep 2258, loss: 0.37548089027404785\r\nStep 2259, loss: 0.3731031119823456\r\nStep 2260, loss: 0.3781687319278717\r\nStep 2261, loss: 0.3753912150859833\r\nStep 2262, loss: 0.3757671117782593\r\nStep 2263, loss: 0.3803831934928894\r\nStep 2264, loss: 0.37603095173835754\r\nStep 2265, loss: 0.38197657465934753\r\nStep 2266, loss: 0.3790059983730316\r\nStep 2267, loss: 0.37630966305732727\r\nStep 2268, loss: 0.3760080933570862\r\nStep 2269, loss: 0.37508484721183777\r\nStep 2270, loss: 0.45927029848098755\r\nStep 2271, loss: 0.3767974078655243\r\nStep 2272, loss: 0.3692813217639923\r\nStep 2273, loss: 0.3767513036727905\r\nStep 2274, loss: 0.37130147218704224\r\nStep 2275, loss: 0.3733128607273102\r\nStep 2276, loss: 0.36976340413093567\r\nStep 2277, loss: 0.37786638736724854\r\nStep 2278, loss: 0.362106591463089\r\nStep 2279, loss: 0.37079453468322754\r\nStep 2280, loss: 0.363018274307251\r\nStep 2281, loss: 0.4810994267463684\r\nStep 2282, loss: 0.3863673210144043\r\nStep 2283, loss: 0.37113410234451294\r\nStep 2284, loss: 0.3746160566806793\r\nStep 2285, loss: 0.36919263005256653\r\nStep 2286, loss: 0.36635369062423706\r\nStep 2287, loss: 0.3752707839012146\r\nStep 2288, loss: 0.4464479684829712\r\nStep 2289, loss: 0.36326345801353455\r\nStep 2290, loss: 0.35734349489212036\r\nStep 2291, loss: 0.35312554240226746\r\nStep 2292, loss: 0.36095765233039856\r\nStep 2293, loss: 0.3593858778476715\r\nStep 2294, loss: 0.35560330748558044\r\nStep 2295, loss: 0.34909507632255554\r\nStep 2296, loss: 0.3544866740703583\r\nStep 2297, loss: 0.3569851219654083\r\nStep 2298, loss: 0.3554278612136841\r\nStep 2299, loss: 0.35207194089889526\r\nStep 2300, loss: 0.4324997365474701\r\nStep 2301, loss: 0.35332679748535156\r\nStep 2302, loss: 0.35623249411582947\r\nStep 2303, loss: 0.34783515334129333\r\nStep 2304, loss: 0.3446216583251953\r\nStep 2305, loss: 0.3529545068740845\r\nStep 2306, loss: 0.3474853038787842\r\nStep 2307, loss: 0.3459276854991913\r\nStep 2308, loss: 0.3498739004135132\r\nStep 2309, loss: 0.34094202518463135\r\nStep 2310, loss: 0.3492644429206848\r\nStep 2311, loss: 0.3467921018600464\r\nStep 2312, loss: 0.343462735414505\r\nStep 2313, loss: 0.33885112404823303\r\nStep 2314, loss: 0.33825334906578064\r\nStep 2315, loss: 0.3396425247192383\r\nStep 2316, loss: 0.33893468976020813\r\nStep 2317, loss: 0.3364556133747101\r\nStep 2318, loss: 0.42035937309265137\r\nStep 2319, loss: 0.34193581342697144\r\nStep 2320, loss: 0.34168219566345215\r\n",,terminal_output +3338,9077396,"TERMINAL",0,0,"1\t ",,terminal_output +3339,9078363,"TERMINAL",0,0,"3\t ",,terminal_output +3340,9079398,"TERMINAL",0,0,"4\t ",,terminal_output +3341,9080442,"TERMINAL",0,0,"5\t ",,terminal_output +3342,9081590,"TERMINAL",0,0,"6\t ",,terminal_output +3343,9082006,"TERMINAL",0,0,"Step 2321, loss: 0.34748852252960205\r\nStep 2322, loss: 0.3351535201072693\r\nStep 2323, loss: 0.3436218798160553\r\nStep 2324, loss: 0.34407466650009155\r\nStep 2325, loss: 0.33610495924949646\r\nStep 2326, loss: 0.3361627757549286\r\nStep 2327, loss: 0.33895155787467957\r\nStep 2328, loss: 0.33496925234794617\r\nStep 2329, loss: 0.3347941040992737\r\nStep 2330, loss: 0.33629879355430603\r\nStep 2331, loss: 0.3281131088733673\r\nStep 2332, loss: 0.3319389820098877\r\nStep 2333, loss: 0.33244460821151733\r\nStep 2334, loss: 0.41621413826942444\r\nStep 2335, loss: 0.328920841217041\r\nStep 2336, loss: 0.329171746969223\r\nStep 2337, loss: 0.3316762447357178\r\nStep 2338, loss: 0.33075010776519775\r\nStep 2339, loss: 0.3206152617931366\r\nStep 2340, loss: 0.33042535185813904\r\nStep 2341, loss: 0.3294980227947235\r\nStep 2342, loss: 0.32066476345062256\r\nStep 2343, loss: 0.31800323724746704\r\nStep 2344, loss: 0.32396551966667175\r\nStep 2345, loss: 0.32245951890945435\r\nStep 2346, loss: 0.31761887669563293\r\nStep 2347, loss: 0.32075512409210205\r\nStep 2348, loss: 0.3229900598526001\r\nStep 2349, loss: 0.32015398144721985\r\nStep 2350, loss: 0.40883833169937134\r\nStep 2351, loss: 0.32135042548179626\r\nStep 2352, loss: 0.32287824153900146\r\nStep 2353, loss: 0.3127676248550415\r\nStep 2354, loss: 0.31523168087005615\r\nStep 2355, loss: 0.32015034556388855\r\nStep 2356, loss: 0.4256949722766876\r\nStep 2357, loss: 0.3313594460487366\r\nStep 2358, loss: 0.3240962624549866\r\nStep 2359, loss: 0.31529396772384644\r\nStep 2360, loss: 0.3224615454673767\r\nStep 2361, loss: 0.3212662935256958\r\nStep 2362, loss: 0.31334006786346436\r\nStep 2363, loss: 0.31596922874450684\r\nStep 2364, loss: 0.3114641010761261\r\nStep 2365, loss: 0.3204165995121002\r\nStep 2366, loss: 0.305770605802536\r\nStep 2367, loss: 0.3052954375743866\r\nStep 2368, loss: 0.30890998244285583\r\nStep 2369, loss: 0.3838890790939331\r\nStep 2370, loss: 0.30801668763160706\r\nStep 2371, loss: 0.30524954199790955\r\nStep 2372, loss: 0.3128449618816376\r\nStep 2373, loss: 0.30953916907310486\r\nStep 2374, loss: 0.2954831123352051\r\nStep 2375, loss: 0.30087417364120483\r\nStep 2376, loss: 0.3040201663970947\r\nStep 2377, loss: 0.2976074516773224\r\nStep 2378, loss: 0.29578670859336853\r\nStep 2379, loss: 0.2988859713077545\r\nStep 2380, loss: 0.29430949687957764\r\nStep 2381, loss: 0.2975861132144928\r\nStep 2382, loss: 0.2960820198059082\r\nStep 2383, loss: 0.29525962471961975\r\nStep 2384, loss: 0.29557013511657715\r\nStep 2385, loss: 0.3753916025161743\r\nStep 2386, loss: 0.28352048993110657\r\nStep 2387, loss: 0.29542505741119385\r\nStep 2388, loss: 0.2953447103500366\r\nStep 2389, loss: 0.2964453399181366\r\nStep 2390, loss: 0.2905368208885193\r\nStep 2391, loss: 0.3002040684223175\r\nStep 2392, loss: 0.2978457808494568\r\nStep 2393, loss: 0.2897167503833771\r\nStep 2394, loss: 0.2949385941028595\r\nStep 2395, loss: 0.2871512174606323\r\nStep 2396, loss: 0.2948032319545746\r\nStep 2397, loss: 0.28860315680503845\r\nStep 2398, loss: 0.2896987199783325\r\nStep 2399, loss: 0.2873709201812744\r\nStep 2400, loss: 0.2872734069824219\r\nStep 2401, loss: 0.28753602504730225\r\nStep 2402, loss: 0.36587661504745483\r\nStep 2403, loss: 0.28679630160331726\r\nStep 2404, loss: 0.2861040532588959\r\nStep 2405, loss: 0.2852390706539154\r\nStep 2406, loss: 0.28040459752082825\r\nStep 2407, loss: 0.2846375107765198\r\nStep 2408, loss: 0.27991223335266113\r\nStep 2409, loss: 0.2780855596065521\r\nStep 2410, loss: 0.28594571352005005\r\nStep 2411, loss: 0.2752811014652252\r\nStep 2412, loss: 0.2758810818195343\r\nStep 2413, loss: 0.27777647972106934\r\nStep 2414, loss: 0.27848711609840393\r\nStep 2415, loss: 0.27738943696022034\r\nStep 2416, loss: 0.280342161655426\r\nStep 2417, loss: 0.3576899468898773\r\nStep 2418, loss: 0.2772153913974762\r\nStep 2419, loss: 0.28241291642189026\r\nStep 2420, loss: 0.2769932746887207\r\nStep 2421, loss: 0.2776236832141876\r\nStep 2422, loss: 0.27697086334228516\r\nStep 2423, loss: 0.2742988169193268\r\nStep 2424, loss: 0.278632253408432\r\nStep 2425, loss: 0.27337175607681274\r\nStep 2426, loss: 0.27046409249305725\r\nStep 2427, loss: 0.2757404148578644\r\nStep 2428, loss: 0.2705932557582855\r\nStep 2429, loss: 0.2767161428928375\r\nStep 2430, loss: 0.5101476907730103\r\nStep 2431, loss: 0.27727630734443665\r\nStep 2432, loss: 0.2692122757434845\r\nStep 2433, loss: 0.27408885955810547\r\nStep 2434, loss: 0.2733619213104248\r\nStep 2435, loss: 0.2824040949344635\r\nStep 2436, loss: 0.2599729597568512\r\nStep 2437, loss: 0.2756614089012146\r\nStep 2438, loss: 0.3492714762687683\r\nStep 2439, loss: 0.2647572159767151\r\nStep 2440, loss: 0.2691785991191864\r\nStep 2441, loss: 0.2624354660511017\r\nStep 2442, loss: 0.2668564021587372\r\nStep 2443, loss: 0.25754573941230774\r\nStep 2444, loss: 0.26434415578842163\r\nStep 2445, loss: 0.2666802704334259\r\nStep 2446, loss: 0.27049127221107483\r\nStep 2447, loss: 0.26592740416526794\r\nStep 2448, loss: 0.26696309447288513\r\nStep 2449, loss: 0.25768670439720154\r\nStep 2450, loss: 0.2610768973827362\r\nStep 2451, loss: 0.2597978413105011\r\nStep 2452, loss: 0.2625565528869629\r\nStep 2453, loss: 0.26174697279930115\r\nStep 2454, loss: 0.25764939188957214\r\nStep 2455, loss: 0.25803661346435547\r\nStep 2456, loss: 0.2626917064189911\r\nStep 2457, loss: 0.33977413177490234\r\nStep 2458, loss: 0.2540980577468872\r\nStep 2459, loss: 0.25510576367378235\r\nStep 2460, loss: 0.2590373158454895\r\nStep 2461, loss: 0.252870112657547\r\nStep 2462, loss: 0.2538451850414276\r\nStep 2463, loss: 0.2514367997646332\r\nStep 2464, loss: 0.2521975338459015\r\nStep 2465, loss: 0.24651870131492615\r\nStep 2466, loss: 0.2557665705680847\r\nStep 2467, loss: 0.25300830602645874\r\nStep 2468, loss: 0.24715769290924072\r\nStep 2469, loss: 0.2508199214935303\r\nStep 2470, loss: 0.2480732798576355\r\nStep 2471, loss: 0.24930962920188904\r\nStep 2472, loss: 0.25018852949142456\r\nStep 2473, loss: 0.24696186184883118\r\nStep 2474, loss: 0.2487451136112213\r\nStep 2475, loss: 0.24914410710334778\r\nStep 2476, loss: 0.323481947183609\r\nStep 2477, loss: 0.2461281716823578\r\nStep 2478, loss: 0.2413129210472107\r\nStep 2479, loss: 0.249737948179245\r\nStep 2480, loss: 0.24487949907779694\r\nStep 2481, loss: 0.23913051187992096\r\nStep 2482, loss: 0.24318009614944458\r\nStep 2483, loss: 0.24069182574748993\r\nStep 2484, loss: 0.23900924623012543\r\nStep 2485, loss: 0.24818991124629974\r\nStep 2486, loss: 0.23762230575084686\r\nStep 2487, loss: 0.24297109246253967\r\nStep 2488, loss: 0.24584054946899414\r\nStep 2489, loss: 0.24478642642498016\r\nStep 2490, loss: 0.23704954981803894\r\nStep 2491, loss: 0.23608098924160004\r\nStep 2492, loss: 0.3129092752933502\r\nStep 2493, loss: 0.23863020539283752\r\nStep 2494, loss: 0.24086515605449677\r\nStep 2495, loss: 0.23634691536426544\r\nStep 2496, loss: 0.2355962097644806\r\nStep 2497, loss: 0.2374601662158966\r\nStep 2498, loss: 0.23668569326400757\r\nStep 2499, loss: 0.23628289997577667\r\nStep 2500, loss: 0.23387545347213745\r\nStep 2501, loss: 0.23473934829235077\r\nStep 2502, loss: 0.22971905767917633\r\nStep 2503, loss: 0.23384438455104828\r\nStep 2504, loss: 0.22863338887691498\r\nStep 2505, loss: 0.23169486224651337\r\nStep 2506, loss: 0.22807873785495758\r\nStep 2507, loss: 0.2330167293548584\r\nStep 2508, loss: 0.3016507625579834\r\nStep 2509, loss: 0.225376158952713\r\nStep 2510, loss: 0.23221832513809204\r\nStep 2511, loss: 0.22739194333553314\r\nStep 2512, loss: 0.22929517924785614\r\nStep 2513, loss: 0.23101623356342316\r\nStep 2514, loss: 0.22787362337112427\r\nStep 2515, loss: 0.22529947757720947\r\nStep 2516, loss: 0.2324385643005371\r\nStep 2517, loss: 0.22370584309101105\r\nStep 2518, loss: 0.22221489250659943\r\nStep 2519, loss: 0.22735708951950073\r\nStep 2520, loss: 0.22793355584144592\r\nStep 2521, loss: 0.22090241312980652\r\nStep 2522, loss: 0.22481724619865417\r\nStep 2523, loss: 0.22290410101413727\r\nStep 2524, loss: 0.23127752542495728\r\nStep 2525, loss: 0.22206811606884003\r\nStep 2526, loss: 0.22751915454864502\r\nStep 2527, loss: 0.22633400559425354\r\nStep 2528, loss: 0.21965722739696503\r\nStep 2529, loss: 0.2174047827720642\r\nStep 2530, loss: 0.2894614338874817\r\nStep 2531, loss: 0.2178904265165329\r\nStep 2532, loss: 0.21862071752548218\r\nStep 2533, loss: 0.2153322696685791\r\nStep 2534, loss: 0.21768000721931458\r\nStep 2535, loss: 0.2161618322134018\r\nStep 2536, loss: 0.22249065339565277\r\nStep 2537, loss: 0.21888211369514465\r\nStep 2538, loss: 0.21098043024539948\r\nStep 2539, loss: 0.2141132652759552\r\nStep 2540, loss: 0.21615377068519592\r\nStep 2541, loss: 0.21049641072750092\r\nStep 2542, loss: 0.21619829535484314\r\nStep 2543, loss: 0.2137487232685089\r\nStep 2544, loss: 0.21337515115737915\r\n",,terminal_output +3344,9082520,"TERMINAL",0,0,"7\t ",,terminal_output +3345,9083560,"TERMINAL",0,0,"8\t ",,terminal_output +3346,9084866,"TERMINAL",0,0,"9\t ",,terminal_output +3347,9085791,"TERMINAL",0,0,"40\t ",,terminal_output +3348,9086967,"TERMINAL",0,0,"1\t ",,terminal_output +3349,9087450,"TERMINAL",0,0,"Step 2545, loss: 0.218998521566391\r\nStep 2546, loss: 0.3107515871524811\r\nStep 2547, loss: 0.22405172884464264\r\nStep 2548, loss: 0.21334095299243927\r\nStep 2549, loss: 0.2212003618478775\r\nStep 2550, loss: 0.2207515388727188\r\nStep 2551, loss: 0.21538092195987701\r\nStep 2552, loss: 0.29355910420417786\r\nStep 2553, loss: 0.2139543890953064\r\nStep 2554, loss: 0.21076732873916626\r\nStep 2555, loss: 0.21256500482559204\r\nStep 2556, loss: 0.21066352725028992\r\nStep 2557, loss: 0.2056945264339447\r\nStep 2558, loss: 0.21078351140022278\r\nStep 2559, loss: 0.20221185684204102\r\nStep 2560, loss: 0.20634253323078156\r\nStep 2561, loss: 0.2029818445444107\r\nStep 2562, loss: 0.2004687637090683\r\nStep 2563, loss: 0.20679715275764465\r\nStep 2564, loss: 0.20659048855304718\r\nStep 2565, loss: 0.20322971045970917\r\nStep 2566, loss: 0.20758254826068878\r\nStep 2567, loss: 0.2716931998729706\r\nStep 2568, loss: 0.203929141163826\r\nStep 2569, loss: 0.19963447749614716\r\nStep 2570, loss: 0.20243635773658752\r\nStep 2571, loss: 0.20013506710529327\r\nStep 2572, loss: 0.20554699003696442\r\nStep 2573, loss: 0.20230622589588165\r\nStep 2574, loss: 0.1998426616191864\r\nStep 2575, loss: 0.2047874629497528\r\nStep 2576, loss: 0.1968940794467926\r\nStep 2577, loss: 0.20542356371879578\r\nStep 2578, loss: 0.19544872641563416\r\nStep 2579, loss: 0.19908468425273895\r\nStep 2580, loss: 0.2063470482826233\r\nStep 2581, loss: 0.19664837419986725\r\nStep 2582, loss: 0.27220937609672546\r\nStep 2583, loss: 0.2000007927417755\r\nStep 2584, loss: 0.20015771687030792\r\nStep 2585, loss: 0.1975429654121399\r\nStep 2586, loss: 0.20245367288589478\r\nStep 2587, loss: 0.19780445098876953\r\nStep 2588, loss: 0.19453763961791992\r\nStep 2589, loss: 0.19208118319511414\r\nStep 2590, loss: 0.1985636055469513\r\nStep 2591, loss: 0.1965634822845459\r\nStep 2592, loss: 0.19761094450950623\r\nStep 2593, loss: 0.3838554620742798\r\nStep 2594, loss: 0.20028316974639893\r\nStep 2595, loss: 0.20625728368759155\r\nStep 2596, loss: 0.1987794190645218\r\nStep 2597, loss: 0.19568082690238953\r\nStep 2598, loss: 0.2019376903772354\r\nStep 2599, loss: 0.19595691561698914\r\nStep 2600, loss: 0.19297726452350616\r\nStep 2601, loss: 0.19388319551944733\r\nStep 2602, loss: 0.1863039880990982\r\nStep 2603, loss: 0.1905117928981781\r\nStep 2604, loss: 0.26289263367652893\r\nStep 2605, loss: 0.1933479607105255\r\nStep 2606, loss: 0.19315071403980255\r\nStep 2607, loss: 0.19154353439807892\r\nStep 2608, loss: 0.18642160296440125\r\nStep 2609, loss: 0.18885108828544617\r\nStep 2610, loss: 0.1952131688594818\r\nStep 2611, loss: 0.18669427931308746\r\nStep 2612, loss: 0.19148728251457214\r\nStep 2613, loss: 0.18862488865852356\r\nStep 2614, loss: 0.1892232745885849\r\nStep 2615, loss: 0.18826207518577576\r\nStep 2616, loss: 0.1845317929983139\r\nStep 2617, loss: 0.18932288885116577\r\nStep 2618, loss: 0.18318885564804077\r\nStep 2619, loss: 0.18445701897144318\r\nStep 2620, loss: 0.18388666212558746\r\nStep 2621, loss: 0.18307000398635864\r\nStep 2622, loss: 0.18177630007266998\r\nStep 2623, loss: 0.1851452738046646\r\nStep 2624, loss: 0.1849570870399475\r\nStep 2625, loss: 0.2514554262161255\r\nStep 2626, loss: 0.1838679015636444\r\nStep 2627, loss: 0.18270964920520782\r\nStep 2628, loss: 0.17955714464187622\r\nStep 2629, loss: 0.17932625114917755\r\nStep 2630, loss: 0.1836833506822586\r\nStep 2631, loss: 0.18201929330825806\r\nStep 2632, loss: 0.1824944168329239\r\nStep 2633, loss: 0.18114401400089264\r\nStep 2634, loss: 0.1752663254737854\r\nStep 2635, loss: 0.1802068054676056\r\nStep 2636, loss: 0.18084681034088135\r\nStep 2637, loss: 0.18239200115203857\r\nStep 2638, loss: 0.1843176782131195\r\nStep 2639, loss: 0.17518778145313263\r\nStep 2640, loss: 0.18306730687618256\r\nStep 2641, loss: 0.17444011569023132\r\nStep 2642, loss: 0.1853867471218109\r\nStep 2643, loss: 0.17697368562221527\r\nStep 2644, loss: 0.1754462718963623\r\nStep 2645, loss: 0.24099892377853394\r\nStep 2646, loss: 0.17741067707538605\r\nStep 2647, loss: 0.17537270486354828\r\nStep 2648, loss: 0.18034952878952026\r\nStep 2649, loss: 0.17162415385246277\r\nStep 2650, loss: 0.17360641062259674\r\nStep 2651, loss: 0.17421022057533264\r\nStep 2652, loss: 0.17367437481880188\r\nStep 2653, loss: 0.1728464812040329\r\nStep 2654, loss: 0.16759948432445526\r\nStep 2655, loss: 0.17002446949481964\r\nStep 2656, loss: 0.1711575984954834\r\nStep 2657, loss: 0.17385539412498474\r\nStep 2658, loss: 0.17503099143505096\r\nStep 2659, loss: 0.17347975075244904\r\nStep 2660, loss: 0.16903138160705566\r\nStep 2661, loss: 0.17012228071689606\r\nStep 2662, loss: 0.17641568183898926\r\nStep 2663, loss: 0.17127108573913574\r\nStep 2664, loss: 0.17309866845607758\r\nStep 2665, loss: 0.17024224996566772\r\nStep 2666, loss: 0.2300070971250534\r\nStep 2667, loss: 0.1694054901599884\r\nStep 2668, loss: 0.1658623367547989\r\nStep 2669, loss: 0.17407691478729248\r\nStep 2670, loss: 0.1680699586868286\r\nStep 2671, loss: 0.1646224856376648\r\nStep 2672, loss: 0.1654883623123169\r\nStep 2673, loss: 0.16762042045593262\r\nStep 2674, loss: 0.16303664445877075\r\nStep 2675, loss: 0.16055575013160706\r\nStep 2676, loss: 0.16457080841064453\r\nStep 2677, loss: 0.1600349396467209\r\nStep 2678, loss: 0.1603669822216034\r\nStep 2679, loss: 0.159470796585083\r\nStep 2680, loss: 0.164365753531456\r\nStep 2681, loss: 0.1620679497718811\r\nStep 2682, loss: 0.26054003834724426\r\nStep 2683, loss: 0.1816864311695099\r\nStep 2684, loss: 0.166695699095726\r\nStep 2685, loss: 0.1727498471736908\r\nStep 2686, loss: 0.16671636700630188\r\nStep 2687, loss: 0.17055831849575043\r\nStep 2688, loss: 0.2323097139596939\r\nStep 2689, loss: 0.15928280353546143\r\nStep 2690, loss: 0.16581931710243225\r\nStep 2691, loss: 0.16533595323562622\r\nStep 2692, loss: 0.158277690410614\r\nStep 2693, loss: 0.1636834740638733\r\nStep 2694, loss: 0.16307266056537628\r\nStep 2695, loss: 0.1559557318687439\r\nStep 2696, loss: 0.1587933450937271\r\nStep 2697, loss: 0.16012638807296753\r\nStep 2698, loss: 0.15730267763137817\r\nStep 2699, loss: 0.1559479832649231\r\nStep 2700, loss: 0.16060203313827515\r\nStep 2701, loss: 0.1594872921705246\r\nStep 2702, loss: 0.15287631750106812\r\nStep 2703, loss: 0.1558436006307602\r\nStep 2704, loss: 0.15621685981750488\r\nStep 2705, loss: 0.15385226905345917\r\nStep 2706, loss: 0.15702366828918457\r\nStep 2707, loss: 0.15623125433921814\r\nStep 2708, loss: 0.15340711176395416\r\nStep 2709, loss: 0.15472252666950226\r\nStep 2710, loss: 0.21461939811706543\r\nStep 2711, loss: 0.16046473383903503\r\nStep 2712, loss: 0.15144656598567963\r\nStep 2713, loss: 0.15615834295749664\r\nStep 2714, loss: 0.1574038565158844\r\nStep 2715, loss: 0.1503535658121109\r\nStep 2716, loss: 0.15948867797851562\r\nStep 2717, loss: 0.15283459424972534\r\nStep 2718, loss: 0.1543479859828949\r\nStep 2719, loss: 0.15150593221187592\r\nStep 2720, loss: 0.14888182282447815\r\nStep 2721, loss: 0.15620870888233185\r\nStep 2722, loss: 0.15049560368061066\r\nStep 2723, loss: 0.15171007812023163\r\nStep 2724, loss: 0.1500089019536972\r\nStep 2725, loss: 0.1535365730524063\r\nStep 2726, loss: 0.1482682079076767\r\nStep 2727, loss: 0.15227562189102173\r\nStep 2728, loss: 0.15096211433410645\r\nStep 2729, loss: 0.2004634588956833\r\nStep 2730, loss: 0.14824776351451874\r\nStep 2731, loss: 0.14682604372501373\r\nStep 2732, loss: 0.1449105590581894\r\nStep 2733, loss: 0.1489359587430954\r\nStep 2734, loss: 0.14770005643367767\r\nStep 2735, loss: 0.14419366419315338\r\nStep 2736, loss: 0.14376255869865417\r\nStep 2737, loss: 0.1460242122411728\r\nStep 2738, loss: 0.14742383360862732\r\nStep 2739, loss: 0.14229270815849304\r\nStep 2740, loss: 0.3006083369255066\r\nStep 2741, loss: 0.15344876050949097\r\nStep 2742, loss: 0.1485162079334259\r\nStep 2743, loss: 0.1499262899160385\r\nStep 2744, loss: 0.1539427638053894\r\nStep 2745, loss: 0.14264464378356934\r\nStep 2746, loss: 0.1475958526134491\r\nStep 2747, loss: 0.1452566236257553\r\nStep 2748, loss: 0.1393509954214096\r\nStep 2749, loss: 0.145334392786026\r\nStep 2750, loss: 0.14780500531196594\r\nStep 2751, loss: 0.1467030793428421\r\nStep 2752, loss: 0.142316535115242\r\nStep 2753, loss: 0.1431470364332199\r\nStep 2754, loss: 0.14070285856723785\r\nStep 2755, loss: 0.19746723771095276\r\nStep 2756, loss: 0.14182987809181213\r\nStep 2757, loss: 0.14297327399253845\r\nStep 2758, loss: 0.13955402374267578\r\nStep 2759, loss: 0.1401270478963852\r\nStep 2760, loss: 0.13741305470466614\r\nStep 2761, loss: 0.14051969349384308\r\nStep 2762, loss: 0.14000824093818665\r\nStep 2763, loss: 0.13964654505252838\r\nStep 2764, loss: 0.13965412974357605\r\nStep 2765, loss: 0.1357763260602951\r\nStep 2766, loss: 0.135432630777359\r\nStep 2767, loss: 0.1367768496274948\r\nStep 2768, loss: 0.13556599617004395\r\n",,terminal_output +3350,9087944,"TERMINAL",0,0,"2\t ",,terminal_output +3351,9089373,"TERMINAL",0,0,"3\t ",,terminal_output +3352,9090219,"TERMINAL",0,0,"4\t ",,terminal_output +3353,9091138,"TERMINAL",0,0,"5\t ",,terminal_output +3354,9092131,"TERMINAL",0,0,"6\t ",,terminal_output +3355,9092452,"TERMINAL",0,0,"Step 2769, loss: 0.13474515080451965\r\nStep 2770, loss: 0.14245811104774475\r\nStep 2771, loss: 0.13590773940086365\r\nStep 2772, loss: 0.13502290844917297\r\nStep 2773, loss: 0.13343803584575653\r\nStep 2774, loss: 0.13162213563919067\r\nStep 2775, loss: 0.1350700855255127\r\nStep 2776, loss: 0.18472649157047272\r\nStep 2777, loss: 0.13361407816410065\r\nStep 2778, loss: 0.13704831898212433\r\nStep 2779, loss: 0.1314212530851364\r\nStep 2780, loss: 0.14007245004177094\r\nStep 2781, loss: 0.1303289234638214\r\nStep 2782, loss: 0.13171444833278656\r\nStep 2783, loss: 0.1335948258638382\r\nStep 2784, loss: 0.12965239584445953\r\nStep 2785, loss: 0.1359083205461502\r\nStep 2786, loss: 0.1319267749786377\r\nStep 2787, loss: 0.12981903553009033\r\nStep 2788, loss: 0.13313236832618713\r\nStep 2789, loss: 0.13398264348506927\r\nStep 2790, loss: 0.13002009689807892\r\nStep 2791, loss: 0.12988105416297913\r\nStep 2792, loss: 0.13046510517597198\r\nStep 2793, loss: 0.1326470673084259\r\nStep 2794, loss: 0.13624157011508942\r\nStep 2795, loss: 0.12907792627811432\r\nStep 2796, loss: 0.12782873213291168\r\nStep 2797, loss: 0.12677910923957825\r\nStep 2798, loss: 0.13145147264003754\r\nStep 2799, loss: 0.12849874794483185\r\nStep 2800, loss: 0.129723459482193\r\nStep 2801, loss: 0.12807387113571167\r\nStep 2802, loss: 0.1783583015203476\r\nStep 2803, loss: 0.126010000705719\r\nStep 2804, loss: 0.12873931229114532\r\nStep 2805, loss: 0.12628446519374847\r\nStep 2806, loss: 0.12688934803009033\r\nStep 2807, loss: 0.12840792536735535\r\nStep 2808, loss: 0.129326730966568\r\nStep 2809, loss: 0.1272476464509964\r\nStep 2810, loss: 0.1245802640914917\r\nStep 2811, loss: 0.12090212851762772\r\nStep 2812, loss: 0.12552517652511597\r\nStep 2813, loss: 0.12342054396867752\r\nStep 2814, loss: 0.12354676425457001\r\nStep 2815, loss: 0.12546291947364807\r\nStep 2816, loss: 0.12303179502487183\r\nStep 2817, loss: 0.12322347611188889\r\nStep 2818, loss: 0.1754073053598404\r\nStep 2819, loss: 0.1211557388305664\r\nStep 2820, loss: 0.12396363914012909\r\nStep 2821, loss: 0.12460434436798096\r\nStep 2822, loss: 0.12110162526369095\r\nStep 2823, loss: 0.12148319184780121\r\nStep 2824, loss: 0.1206493079662323\r\nStep 2825, loss: 0.1216040849685669\r\nStep 2826, loss: 0.12360496073961258\r\nStep 2827, loss: 0.12076213210821152\r\nStep 2828, loss: 0.12389180809259415\r\nStep 2829, loss: 0.12415226548910141\r\nStep 2830, loss: 0.11854974180459976\r\nStep 2831, loss: 0.12313053011894226\r\nStep 2832, loss: 0.1215645968914032\r\nStep 2833, loss: 0.11832661181688309\r\nStep 2834, loss: 0.12699459493160248\r\nStep 2835, loss: 0.11632285267114639\r\nStep 2836, loss: 0.12670151889324188\r\nStep 2837, loss: 0.11649885773658752\r\nStep 2838, loss: 0.12175322324037552\r\nStep 2839, loss: 0.12188029289245605\r\nStep 2840, loss: 0.11497671157121658\r\nStep 2841, loss: 0.1145145371556282\r\nStep 2842, loss: 0.11756515502929688\r\nStep 2843, loss: 0.16645823419094086\r\nStep 2844, loss: 0.11610690504312515\r\nStep 2845, loss: 0.1170087456703186\r\nStep 2846, loss: 0.11843397468328476\r\nStep 2847, loss: 0.11935465782880783\r\nStep 2848, loss: 0.11435224860906601\r\nStep 2849, loss: 0.12143212556838989\r\nStep 2850, loss: 0.11347649991512299\r\nStep 2851, loss: 0.11675555258989334\r\nStep 2852, loss: 0.1142343059182167\r\nStep 2853, loss: 0.11485597491264343\r\nStep 2854, loss: 0.11072693020105362\r\nStep 2855, loss: 0.11642646044492722\r\nStep 2856, loss: 0.11308379471302032\r\nStep 2857, loss: 0.1122380942106247\r\nStep 2858, loss: 0.1104704737663269\r\nStep 2859, loss: 0.11091209948062897\r\nStep 2860, loss: 0.1183941587805748\r\nStep 2861, loss: 0.11667563021183014\r\nStep 2862, loss: 0.11501607298851013\r\nStep 2863, loss: 0.11160507798194885\r\nStep 2864, loss: 0.11380808800458908\r\nStep 2865, loss: 0.1109670028090477\r\nStep 2866, loss: 0.16740813851356506\r\nStep 2867, loss: 0.1081925556063652\r\nStep 2868, loss: 0.10815372318029404\r\nStep 2869, loss: 0.11156371980905533\r\nStep 2870, loss: 0.11068320274353027\r\nStep 2871, loss: 0.11361780017614365\r\nStep 2872, loss: 0.11141522228717804\r\nStep 2873, loss: 0.11063200235366821\r\nStep 2874, loss: 0.11354666203260422\r\nStep 2875, loss: 0.10643061995506287\r\nStep 2876, loss: 0.1074114590883255\r\nStep 2877, loss: 0.1070839911699295\r\nStep 2878, loss: 0.25455427169799805\r\nStep 2879, loss: 0.11339960247278214\r\nStep 2880, loss: 0.11268091201782227\r\nStep 2881, loss: 0.11267434805631638\r\nStep 2882, loss: 0.11117702722549438\r\nStep 2883, loss: 0.11667473614215851\r\nStep 2884, loss: 0.11336171627044678\r\nStep 2885, loss: 0.10872949659824371\r\nStep 2886, loss: 0.11100053042173386\r\nStep 2887, loss: 0.10934709757566452\r\nStep 2888, loss: 0.1042356938123703\r\nStep 2889, loss: 0.10918685048818588\r\nStep 2890, loss: 0.10435599088668823\r\nStep 2891, loss: 0.11239615082740784\r\nStep 2892, loss: 0.10249648988246918\r\nStep 2893, loss: 0.15736354887485504\r\nStep 2894, loss: 0.1070178970694542\r\nStep 2895, loss: 0.10825078934431076\r\nStep 2896, loss: 0.10436254739761353\r\nStep 2897, loss: 0.1072196513414383\r\nStep 2898, loss: 0.10536140203475952\r\nStep 2899, loss: 0.10319503396749496\r\nStep 2900, loss: 0.1046992763876915\r\nStep 2901, loss: 0.10460510104894638\r\nStep 2902, loss: 0.10561521351337433\r\nStep 2903, loss: 0.10396751016378403\r\nStep 2904, loss: 0.10459179431200027\r\nStep 2905, loss: 0.10427229106426239\r\nStep 2906, loss: 0.10636860877275467\r\nStep 2907, loss: 0.10344163328409195\r\nStep 2908, loss: 0.10216843336820602\r\nStep 2909, loss: 0.1045258417725563\r\nStep 2910, loss: 0.10332290828227997\r\nStep 2911, loss: 0.10067888349294662\r\nStep 2912, loss: 0.10021629929542542\r\nStep 2913, loss: 0.09824515134096146\r\nStep 2914, loss: 0.10066545754671097\r\nStep 2915, loss: 0.10272091627120972\r\nStep 2916, loss: 0.09918460994958878\r\nStep 2917, loss: 0.10235756635665894\r\nStep 2918, loss: 0.10189453512430191\r\nStep 2919, loss: 0.09939735382795334\r\nStep 2920, loss: 0.14808772504329681\r\nStep 2921, loss: 0.09975174069404602\r\nStep 2922, loss: 0.10219382494688034\r\nStep 2923, loss: 0.0988132432103157\r\nStep 2924, loss: 0.10202455520629883\r\nStep 2925, loss: 0.10113901644945145\r\nStep 2926, loss: 0.10092946887016296\r\nStep 2927, loss: 0.0996236503124237\r\nStep 2928, loss: 0.09989912062883377\r\nStep 2929, loss: 0.09855642914772034\r\nStep 2930, loss: 0.09984336048364639\r\nStep 2931, loss: 0.09811293333768845\r\nStep 2932, loss: 0.09661681205034256\r\nStep 2933, loss: 0.09647028893232346\r\nStep 2934, loss: 0.09685108065605164\r\nStep 2935, loss: 0.09931203722953796\r\nStep 2936, loss: 0.09584257751703262\r\nStep 2937, loss: 0.09736188501119614\r\nStep 2938, loss: 0.09926505386829376\r\nStep 2939, loss: 0.09788083285093307\r\nStep 2940, loss: 0.10053090006113052\r\nStep 2941, loss: 0.09537439793348312\r\nStep 2942, loss: 0.09667686372995377\r\nStep 2943, loss: 0.09771540760993958\r\nStep 2944, loss: 0.1439654380083084\r\nStep 2945, loss: 0.0949772521853447\r\nStep 2946, loss: 0.09736022353172302\r\nStep 2947, loss: 0.09769288450479507\r\nStep 2948, loss: 0.09527776390314102\r\nStep 2949, loss: 0.0954955518245697\r\nStep 2950, loss: 0.09811817109584808\r\nStep 2951, loss: 0.09690964967012405\r\nStep 2952, loss: 0.09606757014989853\r\nStep 2953, loss: 0.09554625302553177\r\nStep 2954, loss: 0.09280466288328171\r\nStep 2955, loss: 0.09367058426141739\r\nStep 2956, loss: 0.0935298353433609\r\nStep 2957, loss: 0.09211385250091553\r\nStep 2958, loss: 0.09268838167190552\r\nStep 2959, loss: 0.09295836836099625\r\nStep 2960, loss: 0.09405751526355743\r\nStep 2961, loss: 0.09295959025621414\r\nStep 2962, loss: 0.0934198871254921\r\nStep 2963, loss: 0.09278053045272827\r\nStep 2964, loss: 0.09334910660982132\r\nStep 2965, loss: 0.09032595902681351\r\nStep 2966, loss: 0.0900634303689003\r\nStep 2967, loss: 0.09238541126251221\r\nStep 2968, loss: 0.09271823614835739\r\nStep 2969, loss: 0.08940634876489639\r\nStep 2970, loss: 0.08745851367712021\r\nStep 2971, loss: 0.09251789003610611\r\nStep 2972, loss: 0.13232353329658508\r\nStep 2973, loss: 0.09196684509515762\r\nStep 2974, loss: 0.08918841183185577\r\nStep 2975, loss: 0.08907028287649155\r\nStep 2976, loss: 0.09165249019861221\r\nStep 2977, loss: 0.09075402468442917\r\nStep 2978, loss: 0.08808129280805588\r\nStep 2979, loss: 0.0893024429678917\r\nStep 2980, loss: 0.08655640482902527\r\nStep 2981, loss: 0.08558279275894165\r\nStep 2982, loss: 0.09002291411161423\r\nStep 2983, loss: 0.08788325637578964\r\nStep 2984, loss: 0.08763756603002548\r\nStep 2985, loss: 0.09106053411960602\r\nStep 2986, loss: 0.08869151026010513\r\nStep 2987, loss: 0.26440125703811646\r\nStep 2988, loss: 0.1006244644522667\r\nStep 2989, loss: 0.09331979602575302\r\nStep 2990, loss: 0.08935396373271942\r\n",,terminal_output +3356,9093143,"TERMINAL",0,0,"7\t ",,terminal_output +3357,9094186,"TERMINAL",0,0,"8\t ",,terminal_output +3358,9095312,"TERMINAL",0,0,"9\t ",,terminal_output +3359,9096347,"TERMINAL",0,0,"50\t ",,terminal_output +3360,9099124,"TERMINAL",0,0,"1\t 2\t ",,terminal_output +3361,9099148,"TERMINAL",0,0,"Step 2991, loss: 0.0956280380487442\r\nStep 2992, loss: 0.09134281426668167\r\nStep 2993, loss: 0.09259893745183945\r\nStep 2994, loss: 0.08952240645885468\r\nStep 2995, loss: 0.08651509881019592\r\nStep 2996, loss: 0.08631240576505661\r\nStep 2997, loss: 0.08853261917829514\r\nStep 2998, loss: 0.08656709641218185\r\nStep 2999, loss: 0.0869423896074295\r\nSaved checkpoint at step 3000\r\nStep 3000, loss: 0.0873226523399353\r\nStep 3001, loss: 0.08207868039608002\r\nStep 3002, loss: 0.08509781211614609\r\nStep 3003, loss: 0.08474276959896088\r\nStep 3004, loss: 0.08507923036813736\r\nStep 3005, loss: 0.08149100095033646\r\nStep 3006, loss: 0.08779460191726685\r\nStep 3007, loss: 0.0863519236445427\r\nStep 3008, loss: 0.0862816646695137\r\nStep 3009, loss: 0.1324681043624878\r\nStep 3010, loss: 0.08564279973506927\r\nStep 3011, loss: 0.08301039040088654\r\nStep 3012, loss: 0.08393842726945877\r\nStep 3013, loss: 0.0834483727812767\r\nStep 3014, loss: 0.08835902065038681\r\nStep 3015, loss: 0.08812805265188217\r\nStep 3016, loss: 0.08375655114650726\r\nStep 3017, loss: 0.08291415125131607\r\nStep 3018, loss: 0.08442284911870956\r\nStep 3019, loss: 0.08220171183347702\r\nStep 3020, loss: 0.08226146548986435\r\nStep 3021, loss: 0.07994375377893448\r\nStep 3022, loss: 0.08522839844226837\r\nStep 3023, loss: 0.08413679152727127\r\nStep 3024, loss: 0.08144091069698334\r\nStep 3025, loss: 0.08354175835847855\r\nStep 3026, loss: 0.08094575256109238\r\nStep 3027, loss: 0.07991644740104675\r\nStep 3028, loss: 0.12658415734767914\r\nStep 3029, loss: 0.08366823196411133\r\nStep 3030, loss: 0.07923261821269989\r\nStep 3031, loss: 0.07960126549005508\r\nStep 3032, loss: 0.08018472790718079\r\nStep 3033, loss: 0.08367331326007843\r\nStep 3034, loss: 0.08064261078834534\r\nStep 3035, loss: 0.08269140124320984\r\nStep 3036, loss: 0.08230448514223099\r\nStep 3037, loss: 0.0811023935675621\r\nStep 3038, loss: 0.08091333508491516\r\nStep 3039, loss: 0.07684571295976639\r\nStep 3040, loss: 0.08093606680631638\r\nStep 3041, loss: 0.08096811175346375\r\nStep 3042, loss: 0.08042449504137039\r\nStep 3043, loss: 0.07479952275753021\r\nStep 3044, loss: 0.07744520157575607\r\nStep 3045, loss: 0.07776162028312683\r\nStep 3046, loss: 0.078578881919384\r\nStep 3047, loss: 0.07740219682455063\r\nStep 3048, loss: 0.07886476814746857\r\nStep 3049, loss: 0.07765565812587738\r\nStep 3050, loss: 0.07795851677656174\r\nStep 3051, loss: 0.07667602598667145\r\nStep 3052, loss: 0.07694864273071289\r\nStep 3053, loss: 0.07477542757987976\r\nStep 3054, loss: 0.11743529886007309\r\nStep 3055, loss: 0.07861344516277313\r\nStep 3056, loss: 0.07860493659973145\r\nStep 3057, loss: 0.07905102521181107\r\nStep 3058, loss: 0.07491487264633179\r\nStep 3059, loss: 0.07248539477586746\r\nStep 3060, loss: 0.07802750915288925\r\nStep 3061, loss: 0.07653548568487167\r\nStep 3062, loss: 0.07816291600465775\r\nStep 3063, loss: 0.07527301460504532\r\nStep 3064, loss: 0.07696671038866043\r\nStep 3065, loss: 0.07639384269714355\r\nStep 3066, loss: 0.20111782848834991\r\nStep 3067, loss: 0.0808001458644867\r\nStep 3068, loss: 0.07824928313493729\r\nStep 3069, loss: 0.0739564299583435\r\nStep 3070, loss: 0.0774633139371872\r\nStep 3071, loss: 0.07601785659790039\r\nStep 3072, loss: 0.07745999842882156\r\nStep 3073, loss: 0.076262928545475\r\nStep 3074, loss: 0.07738218456506729\r\nStep 3075, loss: 0.07149919867515564\r\nStep 3076, loss: 0.07513806968927383\r\nStep 3077, loss: 0.07229255884885788\r\nStep 3078, loss: 0.07241454720497131\r\nStep 3079, loss: 0.1142365112900734\r\nStep 3080, loss: 0.0746806412935257\r\nStep 3081, loss: 0.07430607825517654\r\nStep 3082, loss: 0.07409930974245071\r\nStep 3083, loss: 0.07297547161579132\r\nStep 3084, loss: 0.07342661917209625\r\nStep 3085, loss: 0.07324425876140594\r\nStep 3086, loss: 0.07120143622159958\r\nStep 3087, loss: 0.07322373241186142\r\nStep 3088, loss: 0.07137955725193024\r\nStep 3089, loss: 0.0710524320602417\r\nStep 3090, loss: 0.07120450586080551\r\nStep 3091, loss: 0.0737251415848732\r\nStep 3092, loss: 0.07629604637622833\r\nStep 3093, loss: 0.07555455714464188\r\nStep 3094, loss: 0.0697251483798027\r\nStep 3095, loss: 0.07033316791057587\r\nStep 3096, loss: 0.07290702313184738\r\nStep 3097, loss: 0.07022829353809357\r\nStep 3098, loss: 0.07014597952365875\r\nStep 3099, loss: 0.10638628900051117\r\nStep 3100, loss: 0.0699419230222702\r\nStep 3101, loss: 0.07182814180850983\r\nStep 3102, loss: 0.06880008429288864\r\nStep 3103, loss: 0.07250417023897171\r\nStep 3104, loss: 0.06943300366401672\r\nStep 3105, loss: 0.06878499686717987\r\nStep 3106, loss: 0.06778137385845184\r\nStep 3107, loss: 0.07066758722066879\r\nStep 3108, loss: 0.07054787129163742\r\nStep 3109, loss: 0.06755678355693817\r\nStep 3110, loss: 0.07075701653957367\r\nStep 3111, loss: 0.06815066933631897\r\nStep 3112, loss: 0.06852865219116211\r\nStep 3113, loss: 0.06809967011213303\r\nStep 3114, loss: 0.06857427954673767\r\nStep 3115, loss: 0.06872540712356567\r\nStep 3116, loss: 0.06927100569009781\r\nStep 3117, loss: 0.06905078887939453\r\nStep 3118, loss: 0.06697221100330353\r\nStep 3119, loss: 0.0709301233291626\r\nStep 3120, loss: 0.10873201489448547\r\nStep 3121, loss: 0.06794550269842148\r\nStep 3122, loss: 0.06916769593954086\r\nStep 3123, loss: 0.06535112857818604\r\nStep 3124, loss: 0.06943682581186295\r\nStep 3125, loss: 0.0698738619685173\r\nStep 3126, loss: 0.06301078200340271\r\nStep 3127, loss: 0.06748499721288681\r\nStep 3128, loss: 0.07099015265703201\r\nStep 3129, loss: 0.06623625010251999\r\nStep 3130, loss: 0.06511345505714417\r\nStep 3131, loss: 0.06980625540018082\r\nStep 3132, loss: 0.06630862504243851\r\nStep 3133, loss: 0.06487499177455902\r\nStep 3134, loss: 0.06643340736627579\r\nStep 3135, loss: 0.06609878689050674\r\nStep 3136, loss: 0.06487806886434555\r\nStep 3137, loss: 0.06425928324460983\r\nStep 3138, loss: 0.06932999938726425\r\nStep 3139, loss: 0.06645256280899048\r\nStep 3140, loss: 0.06427039206027985\r\nStep 3141, loss: 0.06274417787790298\r\nStep 3142, loss: 0.06816675513982773\r\nStep 3143, loss: 0.06659865379333496\r\nStep 3144, loss: 0.06493861228227615\r\nStep 3145, loss: 0.0660671815276146\r\nStep 3146, loss: 0.06659567356109619\r\nStep 3147, loss: 0.06346439570188522\r\nStep 3148, loss: 0.06483909487724304\r\nStep 3149, loss: 0.06330514699220657\r\nStep 3150, loss: 0.09809108823537827\r\nStep 3151, loss: 0.06638631224632263\r\nStep 3152, loss: 0.06238057464361191\r\nStep 3153, loss: 0.06500415503978729\r\nStep 3154, loss: 0.1965978890657425\r\nStep 3155, loss: 0.07311005145311356\r\nStep 3156, loss: 0.06582766026258469\r\nStep 3157, loss: 0.06594140082597733\r\nStep 3158, loss: 0.06546702235937119\r\nStep 3159, loss: 0.06558580696582794\r\nStep 3160, loss: 0.06606799364089966\r\nStep 3161, loss: 0.06669744104146957\r\nStep 3162, loss: 0.06372974067926407\r\nStep 3163, loss: 0.06541121751070023\r\nStep 3164, loss: 0.06338110566139221\r\nStep 3165, loss: 0.06650203466415405\r\nStep 3166, loss: 0.06439904123544693\r\nStep 3167, loss: 0.06557881087064743\r\nStep 3168, loss: 0.06331733614206314\r\nStep 3169, loss: 0.060233425348997116\r\nStep 3170, loss: 0.0631810799241066\r\nStep 3171, loss: 0.06255178153514862\r\nStep 3172, loss: 0.0623803474009037\r\nStep 3173, loss: 0.06119173765182495\r\nStep 3174, loss: 0.06015175208449364\r\nStep 3175, loss: 0.059398241341114044\r\nStep 3176, loss: 0.062234748154878616\r\nStep 3177, loss: 0.061565518379211426\r\nStep 3178, loss: 0.060517288744449615\r\nStep 3179, loss: 0.09279201179742813\r\nStep 3180, loss: 0.06110085919499397\r\nStep 3181, loss: 0.06254827231168747\r\nStep 3182, loss: 0.06130925938487053\r\nStep 3183, loss: 0.06371305137872696\r\nStep 3184, loss: 0.05873087793588638\r\nStep 3185, loss: 0.05865961313247681\r\nStep 3186, loss: 0.060317493975162506\r\nStep 3187, loss: 0.05791023373603821\r\nStep 3188, loss: 0.06099190562963486\r\nStep 3189, loss: 0.060167472809553146\r\nStep 3190, loss: 0.059756480157375336\r\nStep 3191, loss: 0.0580904521048069\r\nStep 3192, loss: 0.05907192826271057\r\nStep 3193, loss: 0.05654436722397804\r\nStep 3194, loss: 0.05754789337515831\r\nStep 3195, loss: 0.0585041269659996\r\nStep 3196, loss: 0.058339282870292664\r\nStep 3197, loss: 0.08897547423839569\r\nStep 3198, loss: 0.060440171509981155\r\nStep 3199, loss: 0.05845474451780319\r\nStep 3200, loss: 0.058192718774080276\r\nStep 3201, loss: 0.06076592206954956\r\nStep 3202, loss: 0.05979914963245392\r\nStep 3203, loss: 0.057154323905706406\r\nStep 3204, loss: 0.05667528882622719\r\nStep 3205, loss: 0.05839042738080025\r\nStep 3206, loss: 0.05914296582341194\r\nStep 3207, loss: 0.055390845984220505\r\nStep 3208, loss: 0.05756857246160507\r\nStep 3209, loss: 0.056128017604351044\r\nStep 3210, loss: 0.1625520884990692\r\nStep 3211, loss: 0.05808952823281288\r\n",,terminal_output +3362,9099365,"TERMINAL",0,0,"4\t ",,terminal_output +3363,9100400,"TERMINAL",0,0,"5\t ",,terminal_output +3364,9101481,"TERMINAL",0,0,"6\t ",,terminal_output +3365,9102480,"TERMINAL",0,0,"7\t ",,terminal_output +3366,9103516,"TERMINAL",0,0,"8\t ",,terminal_output +3367,9104298,"TERMINAL",0,0,"Step 3212, loss: 0.05832064896821976\r\nStep 3213, loss: 0.05769883468747139\r\nStep 3214, loss: 0.059861842542886734\r\nStep 3215, loss: 0.057267870754003525\r\nStep 3216, loss: 0.05800454691052437\r\nStep 3217, loss: 0.05579710006713867\r\nStep 3218, loss: 0.05746519938111305\r\nStep 3219, loss: 0.05377852916717529\r\nStep 3220, loss: 0.056686051189899445\r\nStep 3221, loss: 0.05587702617049217\r\nStep 3222, loss: 0.05684326961636543\r\nStep 3223, loss: 0.05674344673752785\r\nStep 3224, loss: 0.055038128048181534\r\nStep 3225, loss: 0.05528365820646286\r\nStep 3226, loss: 0.0542423278093338\r\nStep 3227, loss: 0.057136133313179016\r\nStep 3228, loss: 0.05453464016318321\r\nStep 3229, loss: 0.08953656256198883\r\nStep 3230, loss: 0.05396486073732376\r\nStep 3231, loss: 0.05436496436595917\r\nStep 3232, loss: 0.05558545142412186\r\nStep 3233, loss: 0.05576304718852043\r\nStep 3234, loss: 0.05644258111715317\r\nStep 3235, loss: 0.05584530532360077\r\nStep 3236, loss: 0.055214762687683105\r\nStep 3237, loss: 0.05288456380367279\r\nStep 3238, loss: 0.0566420741379261\r\nStep 3239, loss: 0.054025422781705856\r\nStep 3240, loss: 0.05384869500994682\r\nStep 3241, loss: 0.05450214445590973\r\nStep 3242, loss: 0.05359950289130211\r\nStep 3243, loss: 0.053460974246263504\r\nStep 3244, loss: 0.05189570039510727\r\nStep 3245, loss: 0.05424777790904045\r\nStep 3246, loss: 0.054285887628793716\r\nStep 3247, loss: 0.0531151108443737\r\nStep 3248, loss: 0.05201657861471176\r\nStep 3249, loss: 0.05087141692638397\r\nStep 3250, loss: 0.05352502688765526\r\nStep 3251, loss: 0.05551053211092949\r\nStep 3252, loss: 0.05224023014307022\r\nStep 3253, loss: 0.05060356482863426\r\nStep 3254, loss: 0.07989537715911865\r\nStep 3255, loss: 0.05066172033548355\r\nStep 3256, loss: 0.053637586534023285\r\nStep 3257, loss: 0.050742704421281815\r\nStep 3258, loss: 0.05202189087867737\r\nStep 3259, loss: 0.04965377226471901\r\nStep 3260, loss: 0.050770070403814316\r\nStep 3261, loss: 0.050200484693050385\r\nStep 3262, loss: 0.052070166915655136\r\nStep 3263, loss: 0.05168815702199936\r\nStep 3264, loss: 0.05037873610854149\r\nStep 3265, loss: 0.051624421030282974\r\nStep 3266, loss: 0.05101696774363518\r\nStep 3267, loss: 0.0490303635597229\r\nStep 3268, loss: 0.0491749569773674\r\nStep 3269, loss: 0.05095120519399643\r\nStep 3270, loss: 0.05332716926932335\r\nStep 3271, loss: 0.05064411088824272\r\nStep 3272, loss: 0.05058116838335991\r\nStep 3273, loss: 0.052315931767225266\r\nStep 3274, loss: 0.04929981008172035\r\nStep 3275, loss: 0.049816764891147614\r\nStep 3276, loss: 0.05090722441673279\r\nStep 3277, loss: 0.049352843314409256\r\nStep 3278, loss: 0.05285279452800751\r\nStep 3279, loss: 0.07942207902669907\r\nStep 3280, loss: 0.05143360421061516\r\nStep 3281, loss: 0.051481567323207855\r\nStep 3282, loss: 0.04864417389035225\r\nStep 3283, loss: 0.04943187162280083\r\nStep 3284, loss: 0.050279900431632996\r\nStep 3285, loss: 0.04909113794565201\r\nStep 3286, loss: 0.04936693236231804\r\nStep 3287, loss: 0.04834282025694847\r\nStep 3288, loss: 0.0475020669400692\r\nStep 3289, loss: 0.05122774466872215\r\nStep 3290, loss: 0.04966406151652336\r\nStep 3291, loss: 0.04823443293571472\r\nStep 3292, loss: 0.05012001097202301\r\nStep 3293, loss: 0.04947802796959877\r\nStep 3294, loss: 0.04870982468128204\r\nStep 3295, loss: 0.04755045473575592\r\nStep 3296, loss: 0.04894164204597473\r\nStep 3297, loss: 0.05067967250943184\r\nStep 3298, loss: 0.04694777727127075\r\nStep 3299, loss: 0.047671008855104446\r\nStep 3300, loss: 0.047766510397195816\r\nStep 3301, loss: 0.04777185618877411\r\nStep 3302, loss: 0.04952886328101158\r\nStep 3303, loss: 0.04979202151298523\r\nStep 3304, loss: 0.046805739402770996\r\nStep 3305, loss: 0.05017581954598427\r\nStep 3306, loss: 0.04764731228351593\r\nStep 3307, loss: 0.2441316843032837\r\nStep 3308, loss: 0.05802742391824722\r\nStep 3309, loss: 0.05246606096625328\r\nStep 3310, loss: 0.05082080885767937\r\nStep 3311, loss: 0.05217905342578888\r\nStep 3312, loss: 0.05071390047669411\r\nStep 3313, loss: 0.050005555152893066\r\nStep 3314, loss: 0.08583664149045944\r\nStep 3315, loss: 0.05041912570595741\r\nStep 3316, loss: 0.05085643753409386\r\nStep 3317, loss: 0.0496281236410141\r\nStep 3318, loss: 0.04781681299209595\r\nStep 3319, loss: 0.04782308265566826\r\nStep 3320, loss: 0.050894491374492645\r\nStep 3321, loss: 0.0485604964196682\r\nStep 3322, loss: 0.04809077829122543\r\nStep 3323, loss: 0.0468401275575161\r\nStep 3324, loss: 0.04684402421116829\r\nStep 3325, loss: 0.04747867211699486\r\nStep 3326, loss: 0.047474268823862076\r\nStep 3327, loss: 0.046364616602659225\r\nStep 3328, loss: 0.04942375048995018\r\nStep 3329, loss: 0.045870065689086914\r\nStep 3330, loss: 0.045340646058321\r\nStep 3331, loss: 0.046662312000989914\r\nStep 3332, loss: 0.14116668701171875\r\nStep 3333, loss: 0.048559948801994324\r\nStep 3334, loss: 0.04748588055372238\r\nStep 3335, loss: 0.04580283164978027\r\nStep 3336, loss: 0.04742236062884331\r\nStep 3337, loss: 0.04640723392367363\r\nStep 3338, loss: 0.04773583263158798\r\nStep 3339, loss: 0.049041662365198135\r\nStep 3340, loss: 0.046660590916872025\r\nStep 3341, loss: 0.04774235188961029\r\nStep 3342, loss: 0.04348111152648926\r\nStep 3343, loss: 0.04465216025710106\r\nStep 3344, loss: 0.04427590221166611\r\nStep 3345, loss: 0.04119537025690079\r\nStep 3346, loss: 0.04263775795698166\r\nStep 3347, loss: 0.044305164366960526\r\nStep 3348, loss: 0.06914208829402924\r\nStep 3349, loss: 0.04364136978983879\r\nStep 3350, loss: 0.044999152421951294\r\nStep 3351, loss: 0.043789640069007874\r\nStep 3352, loss: 0.044655557721853256\r\nStep 3353, loss: 0.04377240687608719\r\nStep 3354, loss: 0.0428854338824749\r\nStep 3355, loss: 0.0451858751475811\r\nStep 3356, loss: 0.045278824865818024\r\nStep 3357, loss: 0.043234821408987045\r\nStep 3358, loss: 0.044691797345876694\r\nStep 3359, loss: 0.04163692519068718\r\nStep 3360, loss: 0.04221754148602486\r\nStep 3361, loss: 0.04393233358860016\r\nStep 3362, loss: 0.04222525656223297\r\nStep 3363, loss: 0.04334297776222229\r\nStep 3364, loss: 0.04364399239420891\r\nStep 3365, loss: 0.045856233686208725\r\nStep 3366, loss: 0.04372597485780716\r\nStep 3367, loss: 0.0408308245241642\r\nStep 3368, loss: 0.044682908803224564\r\nStep 3369, loss: 0.04370421916246414\r\nStep 3370, loss: 0.06756030023097992\r\nStep 3371, loss: 0.04436945542693138\r\nStep 3372, loss: 0.04274505376815796\r\nStep 3373, loss: 0.04290309548377991\r\nStep 3374, loss: 0.04229236766695976\r\nStep 3375, loss: 0.041368018835783005\r\nStep 3376, loss: 0.04266831651329994\r\nStep 3377, loss: 0.041287221014499664\r\nStep 3378, loss: 0.04230622574687004\r\nStep 3379, loss: 0.04132816940546036\r\nStep 3380, loss: 0.04161227494478226\r\nStep 3381, loss: 0.043041061609983444\r\nStep 3382, loss: 0.04044061154127121\r\nStep 3383, loss: 0.04198218509554863\r\nStep 3384, loss: 0.04021931439638138\r\nStep 3385, loss: 0.041612088680267334\r\nStep 3386, loss: 0.04116472974419594\r\nStep 3387, loss: 0.040337178856134415\r\nStep 3388, loss: 0.041271988302469254\r\nStep 3389, loss: 0.041910771280527115\r\nStep 3390, loss: 0.040361009538173676\r\nStep 3391, loss: 0.04096471890807152\r\nStep 3392, loss: 0.04138355329632759\r\nStep 3393, loss: 0.04260611534118652\r\nStep 3394, loss: 0.040638942271471024\r\nStep 3395, loss: 0.040577132254838943\r\nStep 3396, loss: 0.06535308063030243\r\nStep 3397, loss: 0.04127484932541847\r\nStep 3398, loss: 0.040958747267723083\r\nStep 3399, loss: 0.03928395360708237\r\nStep 3400, loss: 0.0424082949757576\r\nStep 3401, loss: 0.041385162621736526\r\nStep 3402, loss: 0.039765067398548126\r\nStep 3403, loss: 0.03881251439452171\r\nStep 3404, loss: 0.038562431931495667\r\nStep 3405, loss: 0.0403621569275856\r\nStep 3406, loss: 0.040982745587825775\r\nStep 3407, loss: 0.038916587829589844\r\nStep 3408, loss: 0.040192294865846634\r\nStep 3409, loss: 0.03729875758290291\r\nStep 3410, loss: 0.03839842602610588\r\nStep 3411, loss: 0.038141995668411255\r\nStep 3412, loss: 0.0393497496843338\r\nStep 3413, loss: 0.03838728740811348\r\nStep 3414, loss: 0.03770462051033974\r\nStep 3415, loss: 0.04159405454993248\r\nStep 3416, loss: 0.037313781678676605\r\nStep 3417, loss: 0.03827657178044319\r\nStep 3418, loss: 0.039451949298381805\r\nStep 3419, loss: 0.03842557594180107\r\nStep 3420, loss: 0.03871513530611992\r\nStep 3421, loss: 0.03979453444480896\r\nStep 3422, loss: 0.03717903420329094\r\nStep 3423, loss: 0.036536894738674164\r\nStep 3424, loss: 0.03847592696547508\r\nStep 3425, loss: 0.03848285600543022\r\nStep 3426, loss: 0.03772340342402458\r\nStep 3427, loss: 0.03900793194770813\r\nStep 3428, loss: 0.19340519607067108\r\nStep 3429, loss: 0.04581746086478233\r\nStep 3430, loss: 0.044562928378582\r\nStep 3431, loss: 0.04159374162554741\r\n",,terminal_output +3368,9104561,"TERMINAL",0,0,"9\t ",,terminal_output +3369,9105599,"TERMINAL",0,0,"2:00\t ",,terminal_output +3370,9106648,"TERMINAL",0,0,"1\t ",,terminal_output +3371,9107682,"TERMINAL",0,0,"2\t ",,terminal_output +3372,9108731,"TERMINAL",0,0,"3\t ",,terminal_output +3373,9109139,"TERMINAL",0,0,"Step 3432, loss: 0.04308610409498215\r\nStep 3433, loss: 0.044399213045835495\r\nStep 3434, loss: 0.04065559804439545\r\nStep 3435, loss: 0.03927004337310791\r\nStep 3436, loss: 0.04103703051805496\r\nStep 3437, loss: 0.03795251250267029\r\nStep 3438, loss: 0.03861311078071594\r\nStep 3439, loss: 0.03898017480969429\r\nStep 3440, loss: 0.038518015295267105\r\nStep 3441, loss: 0.03593932092189789\r\nStep 3442, loss: 0.03846292197704315\r\nStep 3443, loss: 0.0379217267036438\r\nStep 3444, loss: 0.036305978894233704\r\nStep 3445, loss: 0.037563737481832504\r\nStep 3446, loss: 0.03716007620096207\r\nStep 3447, loss: 0.03719327226281166\r\nStep 3448, loss: 0.037651125341653824\r\nStep 3449, loss: 0.037274543195962906\r\nStep 3450, loss: 0.08759822696447372\r\nStep 3451, loss: 0.03944433107972145\r\nStep 3452, loss: 0.03840644657611847\r\nStep 3453, loss: 0.036020923405885696\r\nStep 3454, loss: 0.041071195155382156\r\nStep 3455, loss: 0.03884885087609291\r\nStep 3456, loss: 0.03491251543164253\r\nStep 3457, loss: 0.036502860486507416\r\nStep 3458, loss: 0.1416502147912979\r\nStep 3459, loss: 0.03949859365820885\r\nStep 3460, loss: 0.03853130340576172\r\nStep 3461, loss: 0.040991201996803284\r\nStep 3462, loss: 0.03817218169569969\r\nStep 3463, loss: 0.036294374614953995\r\nStep 3464, loss: 0.037563521414995193\r\nStep 3465, loss: 0.03497340902686119\r\nStep 3466, loss: 0.03774375095963478\r\nStep 3467, loss: 0.03727244585752487\r\nStep 3468, loss: 0.03773335739970207\r\nStep 3469, loss: 0.03587649390101433\r\nStep 3470, loss: 0.036802347749471664\r\nStep 3471, loss: 0.037289515137672424\r\nStep 3472, loss: 0.03529837727546692\r\nStep 3473, loss: 0.03601137921214104\r\nStep 3474, loss: 0.034268684685230255\r\nStep 3475, loss: 0.03475920483469963\r\nStep 3476, loss: 0.034997887909412384\r\nStep 3477, loss: 0.03556222841143608\r\nStep 3478, loss: 0.03516329079866409\r\nStep 3479, loss: 0.03476493060588837\r\nStep 3480, loss: 0.03481031209230423\r\nStep 3481, loss: 0.11494488269090652\r\nStep 3482, loss: 0.036035116761922836\r\nStep 3483, loss: 0.034411050379276276\r\nStep 3484, loss: 0.034939996898174286\r\nStep 3485, loss: 0.037620410323143005\r\nStep 3486, loss: 0.03537745028734207\r\nStep 3487, loss: 0.03434715420007706\r\nStep 3488, loss: 0.03457966446876526\r\nStep 3489, loss: 0.033046551048755646\r\nStep 3490, loss: 0.03495078533887863\r\nStep 3491, loss: 0.03586428239941597\r\nStep 3492, loss: 0.033997535705566406\r\nStep 3493, loss: 0.033651866018772125\r\nStep 3494, loss: 0.03317544236779213\r\nStep 3495, loss: 0.033858250826597214\r\nStep 3496, loss: 0.033493101596832275\r\nStep 3497, loss: 0.03397738188505173\r\nStep 3498, loss: 0.03396168351173401\r\nStep 3499, loss: 0.03514862060546875\r\nStep 3500, loss: 0.03395482897758484\r\nStep 3501, loss: 0.03484037145972252\r\nStep 3502, loss: 0.03383760526776314\r\nStep 3503, loss: 0.05804015323519707\r\nStep 3504, loss: 0.0341717004776001\r\nStep 3505, loss: 0.03297252580523491\r\nStep 3506, loss: 0.03412457928061485\r\nStep 3507, loss: 0.033137883991003036\r\nStep 3508, loss: 0.03350658342242241\r\nStep 3509, loss: 0.03426491469144821\r\nStep 3510, loss: 0.034333519637584686\r\nStep 3511, loss: 0.031599681824445724\r\nStep 3512, loss: 0.03339041769504547\r\nStep 3513, loss: 0.03183675929903984\r\nStep 3514, loss: 0.03246445581316948\r\nStep 3515, loss: 0.032460689544677734\r\nStep 3516, loss: 0.031990308314561844\r\nStep 3517, loss: 0.032539643347263336\r\nStep 3518, loss: 0.03127250075340271\r\nStep 3519, loss: 0.03253863379359245\r\nStep 3520, loss: 0.03277866914868355\r\nStep 3521, loss: 0.03163193166255951\r\nStep 3522, loss: 0.0568869523704052\r\nStep 3523, loss: 0.035408295691013336\r\nStep 3524, loss: 0.034723155200481415\r\nStep 3525, loss: 0.03409053757786751\r\nStep 3526, loss: 0.03219780698418617\r\nStep 3527, loss: 0.03487257659435272\r\nStep 3528, loss: 0.031866393983364105\r\nStep 3529, loss: 0.031005874276161194\r\nStep 3530, loss: 0.033232431858778\r\nStep 3531, loss: 0.0336323156952858\r\nStep 3532, loss: 0.029411453753709793\r\nStep 3533, loss: 0.03155900537967682\r\nStep 3534, loss: 0.03226080164313316\r\nStep 3535, loss: 0.03396743908524513\r\nStep 3536, loss: 0.03156247362494469\r\nStep 3537, loss: 0.030360490083694458\r\nStep 3538, loss: 0.03147176653146744\r\nStep 3539, loss: 0.03179658576846123\r\nStep 3540, loss: 0.030563395470380783\r\nStep 3541, loss: 0.029596921056509018\r\nStep 3542, loss: 0.03058614395558834\r\nStep 3543, loss: 0.03105500340461731\r\nStep 3544, loss: 0.030400486662983894\r\nStep 3545, loss: 0.03171136975288391\r\nStep 3546, loss: 0.05050944909453392\r\nStep 3547, loss: 0.030314259231090546\r\nStep 3548, loss: 0.030795840546488762\r\nStep 3549, loss: 0.03046933002769947\r\nStep 3550, loss: 0.029864437878131866\r\nStep 3551, loss: 0.02964220568537712\r\nStep 3552, loss: 0.030396489426493645\r\nStep 3553, loss: 0.030990006402134895\r\nStep 3554, loss: 0.02927776426076889\r\nStep 3555, loss: 0.029331248253583908\r\nStep 3556, loss: 0.03055214136838913\r\nStep 3557, loss: 0.029614442959427834\r\nStep 3558, loss: 0.03123422898352146\r\nStep 3559, loss: 0.029256047680974007\r\nStep 3560, loss: 0.030997158959507942\r\nStep 3561, loss: 0.03126826882362366\r\nStep 3562, loss: 0.029868589714169502\r\nStep 3563, loss: 0.027842270210385323\r\nStep 3564, loss: 0.029812240973114967\r\nStep 3565, loss: 0.030136968940496445\r\nStep 3566, loss: 0.02990925870835781\r\nStep 3567, loss: 0.02901807799935341\r\nStep 3568, loss: 0.02910797856748104\r\nStep 3569, loss: 0.029415378347039223\r\nStep 3570, loss: 0.030453307554125786\r\nStep 3571, loss: 0.029732519760727882\r\nStep 3572, loss: 0.02916395664215088\r\nStep 3573, loss: 0.02817610464990139\r\nStep 3574, loss: 0.028578005731105804\r\nStep 3575, loss: 0.02926570177078247\r\nStep 3576, loss: 0.028069499880075455\r\nStep 3577, loss: 0.02946426346898079\r\nStep 3578, loss: 0.0289425291121006\r\nStep 3579, loss: 0.027616865932941437\r\nStep 3580, loss: 0.048997826874256134\r\nStep 3581, loss: 0.03073946200311184\r\nStep 3582, loss: 0.029509223997592926\r\nStep 3583, loss: 0.029979007318615913\r\nStep 3584, loss: 0.030070040374994278\r\nStep 3585, loss: 0.029943369328975677\r\nStep 3586, loss: 0.029903260990977287\r\nStep 3587, loss: 0.02792501263320446\r\nStep 3588, loss: 0.030644038692116737\r\nStep 3589, loss: 0.028502190485596657\r\nStep 3590, loss: 0.16893966495990753\r\nStep 3591, loss: 0.0364992693066597\r\nStep 3592, loss: 0.03535819426178932\r\nStep 3593, loss: 0.0340292751789093\r\nStep 3594, loss: 0.032831449061632156\r\nStep 3595, loss: 0.03273937478661537\r\nStep 3596, loss: 0.030818399041891098\r\nStep 3597, loss: 0.031848158687353134\r\nStep 3598, loss: 0.030160151422023773\r\nStep 3599, loss: 0.03145298361778259\r\nStep 3600, loss: 0.030700132250785828\r\nStep 3601, loss: 0.03005976974964142\r\nStep 3602, loss: 0.0295945443212986\r\nStep 3603, loss: 0.030989130958914757\r\nStep 3604, loss: 0.030865859240293503\r\nStep 3605, loss: 0.027859607711434364\r\nStep 3606, loss: 0.03072681650519371\r\nStep 3607, loss: 0.028118440881371498\r\nStep 3608, loss: 0.028751196339726448\r\nStep 3609, loss: 0.029555033892393112\r\nStep 3610, loss: 0.02776816114783287\r\nStep 3611, loss: 0.029847249388694763\r\nStep 3612, loss: 0.03015071526169777\r\nStep 3613, loss: 0.027638433501124382\r\nStep 3614, loss: 0.02720467746257782\r\nStep 3615, loss: 0.026853106915950775\r\nStep 3616, loss: 0.04919356107711792\r\nStep 3617, loss: 0.028085241094231606\r\nStep 3618, loss: 0.03016926348209381\r\nStep 3619, loss: 0.0285806804895401\r\nStep 3620, loss: 0.02843702770769596\r\nStep 3621, loss: 0.029505601152777672\r\nStep 3622, loss: 0.028534626588225365\r\nStep 3623, loss: 0.02681834250688553\r\nStep 3624, loss: 0.09924015402793884\r\nStep 3625, loss: 0.02946704998612404\r\nStep 3626, loss: 0.028698816895484924\r\nStep 3627, loss: 0.027128592133522034\r\nStep 3628, loss: 0.02927878312766552\r\nStep 3629, loss: 0.02873973362147808\r\nStep 3630, loss: 0.026406705379486084\r\nStep 3631, loss: 0.02712506242096424\r\nStep 3632, loss: 0.02763264626264572\r\nStep 3633, loss: 0.02776567079126835\r\nStep 3634, loss: 0.028388088569045067\r\nStep 3635, loss: 0.028342852368950844\r\nStep 3636, loss: 0.027896270155906677\r\nStep 3637, loss: 0.027406686916947365\r\nStep 3638, loss: 0.028278321027755737\r\nStep 3639, loss: 0.02645583264529705\r\nStep 3640, loss: 0.027751823887228966\r\nStep 3641, loss: 0.02659590169787407\r\nStep 3642, loss: 0.02576674520969391\r\nStep 3643, loss: 0.026595521718263626\r\nStep 3644, loss: 0.026189623400568962\r\nStep 3645, loss: 0.02397475391626358\r\nStep 3646, loss: 0.02548178657889366\r\nStep 3647, loss: 0.02518901228904724\r\nStep 3648, loss: 0.02570245787501335\r\nStep 3649, loss: 0.051438912749290466\r\nStep 3650, loss: 0.02728925459086895\r\n",,terminal_output +3374,9109768,"TERMINAL",0,0,"4\t ",,terminal_output +3375,9110876,"TERMINAL",0,0,"5\t ",,terminal_output +3376,9111860,"TERMINAL",0,0,"6\t ",,terminal_output +3377,9112902,"TERMINAL",0,0,"7\t ",,terminal_output +3378,9113957,"TERMINAL",0,0,"8\t ",,terminal_output +3379,9114246,"TERMINAL",0,0,"Step 3651, loss: 0.028334828093647957\r\nStep 3652, loss: 0.02487134002149105\r\nStep 3653, loss: 0.027086518704891205\r\nStep 3654, loss: 0.02706354297697544\r\nStep 3655, loss: 0.027023693546652794\r\nStep 3656, loss: 0.026338543742895126\r\nStep 3657, loss: 0.025019129738211632\r\nStep 3658, loss: 0.026726283133029938\r\nStep 3659, loss: 0.02608250267803669\r\nStep 3660, loss: 0.0267773550003767\r\nStep 3661, loss: 0.0257435105741024\r\nStep 3662, loss: 0.026385392993688583\r\nStep 3663, loss: 0.026497839018702507\r\nStep 3664, loss: 0.025242246687412262\r\nStep 3665, loss: 0.026700468733906746\r\nStep 3666, loss: 0.024985726922750473\r\nStep 3667, loss: 0.025945058092474937\r\nStep 3668, loss: 0.02483956888318062\r\nStep 3669, loss: 0.023531224578619003\r\nStep 3670, loss: 0.023248206824064255\r\nStep 3671, loss: 0.02482711896300316\r\nStep 3672, loss: 0.024343781173229218\r\nStep 3673, loss: 0.02608741633594036\r\nStep 3674, loss: 0.025482870638370514\r\nStep 3675, loss: 0.023550931364297867\r\nStep 3676, loss: 0.025216948240995407\r\nStep 3677, loss: 0.024734189733862877\r\nStep 3678, loss: 0.024760855361819267\r\nStep 3679, loss: 0.02529851533472538\r\nStep 3680, loss: 0.02507028914988041\r\nStep 3681, loss: 0.024105992168188095\r\nStep 3682, loss: 0.024735908955335617\r\nStep 3683, loss: 0.023490967229008675\r\nStep 3684, loss: 0.024568159133195877\r\nStep 3685, loss: 0.02393294870853424\r\nStep 3686, loss: 0.02483331225812435\r\nStep 3687, loss: 0.025757895782589912\r\nStep 3688, loss: 0.02452910505235195\r\nStep 3689, loss: 0.024236125871539116\r\nStep 3690, loss: 0.02320277690887451\r\nStep 3691, loss: 0.0227409265935421\r\nStep 3692, loss: 0.046187423169612885\r\nStep 3693, loss: 0.025432046502828598\r\nStep 3694, loss: 0.022947851568460464\r\nStep 3695, loss: 0.02427680976688862\r\nStep 3696, loss: 0.024091370403766632\r\nStep 3697, loss: 0.023958222940564156\r\nStep 3698, loss: 0.024586455896496773\r\nStep 3699, loss: 0.023110071197152138\r\nStep 3700, loss: 0.0243836659938097\r\nStep 3701, loss: 0.024208951741456985\r\nStep 3702, loss: 0.023205801844596863\r\nStep 3703, loss: 0.02383112721145153\r\nStep 3704, loss: 0.023588113486766815\r\nStep 3705, loss: 0.02339736744761467\r\nStep 3706, loss: 0.023969288915395737\r\nStep 3707, loss: 0.024079807102680206\r\nStep 3708, loss: 0.022954633459448814\r\nStep 3709, loss: 0.023989202454686165\r\nStep 3710, loss: 0.023037664592266083\r\nStep 3711, loss: 0.022812046110630035\r\nStep 3712, loss: 0.02273709699511528\r\nStep 3713, loss: 0.02345089614391327\r\nStep 3714, loss: 0.023054396733641624\r\nStep 3715, loss: 0.022729814052581787\r\nStep 3716, loss: 0.02258145995438099\r\nStep 3717, loss: 0.021160058677196503\r\nStep 3718, loss: 0.024025071412324905\r\nStep 3719, loss: 0.022920183837413788\r\nStep 3720, loss: 0.022942334413528442\r\nStep 3721, loss: 0.02316516451537609\r\nStep 3722, loss: 0.02296331897377968\r\nStep 3723, loss: 0.022470353171229362\r\nStep 3724, loss: 0.02304951846599579\r\nStep 3725, loss: 0.02299954742193222\r\nStep 3726, loss: 0.022316735237836838\r\nStep 3727, loss: 0.02233114279806614\r\nStep 3728, loss: 0.022843174636363983\r\nStep 3729, loss: 0.02310406044125557\r\nStep 3730, loss: 0.022195884957909584\r\nStep 3731, loss: 0.10228093713521957\r\nStep 3732, loss: 0.026203328743577003\r\nStep 3733, loss: 0.02580675110220909\r\nStep 3734, loss: 0.02482028678059578\r\nStep 3735, loss: 0.0267544686794281\r\nStep 3736, loss: 0.02517342008650303\r\nStep 3737, loss: 0.024104900658130646\r\nStep 3738, loss: 0.02246914990246296\r\nStep 3739, loss: 0.02414797805249691\r\nStep 3740, loss: 0.023503409698605537\r\nStep 3741, loss: 0.025519490242004395\r\nStep 3742, loss: 0.021705862134695053\r\nStep 3743, loss: 0.04084693640470505\r\nStep 3744, loss: 0.023356452584266663\r\nStep 3745, loss: 0.023802250623703003\r\nStep 3746, loss: 0.021154871210455894\r\nStep 3747, loss: 0.022765714675188065\r\nStep 3748, loss: 0.022339578717947006\r\nStep 3749, loss: 0.02211933396756649\r\nStep 3750, loss: 0.023511478677392006\r\nStep 3751, loss: 0.021727383136749268\r\nStep 3752, loss: 0.022830547764897346\r\nStep 3753, loss: 0.021958285942673683\r\nStep 3754, loss: 0.020925546064972878\r\nStep 3755, loss: 0.021910790354013443\r\nStep 3756, loss: 0.022489365190267563\r\nStep 3757, loss: 0.021735306829214096\r\nStep 3758, loss: 0.021106284111738205\r\nStep 3759, loss: 0.02079482562839985\r\nStep 3760, loss: 0.01971210166811943\r\nStep 3761, loss: 0.021641142666339874\r\nStep 3762, loss: 0.02172497846186161\r\nStep 3763, loss: 0.020532850176095963\r\nStep 3764, loss: 0.02179768495261669\r\nStep 3765, loss: 0.1200207844376564\r\nStep 3766, loss: 0.02776232734322548\r\nStep 3767, loss: 0.025019101798534393\r\nStep 3768, loss: 0.02437841147184372\r\nStep 3769, loss: 0.025359490886330605\r\nStep 3770, loss: 0.022247740998864174\r\nStep 3771, loss: 0.025500286370515823\r\nStep 3772, loss: 0.023579753935337067\r\nStep 3773, loss: 0.02472677081823349\r\nStep 3774, loss: 0.02198728173971176\r\nStep 3775, loss: 0.022887496277689934\r\nStep 3776, loss: 0.022508317604660988\r\nStep 3777, loss: 0.0210369061678648\r\nStep 3778, loss: 0.022911565378308296\r\nStep 3779, loss: 0.020484451204538345\r\nStep 3780, loss: 0.020841849967837334\r\nStep 3781, loss: 0.08532189577817917\r\nStep 3782, loss: 0.023757850751280785\r\nStep 3783, loss: 0.022070005536079407\r\nStep 3784, loss: 0.022491155192255974\r\nStep 3785, loss: 0.021963613107800484\r\nStep 3786, loss: 0.022095447406172752\r\nStep 3787, loss: 0.022246215492486954\r\nStep 3788, loss: 0.022394021973013878\r\nStep 3789, loss: 0.021966667845845222\r\nStep 3790, loss: 0.020667660981416702\r\nStep 3791, loss: 0.021948659792542458\r\nStep 3792, loss: 0.021098975092172623\r\nStep 3793, loss: 0.020428992807865143\r\nStep 3794, loss: 0.021966947242617607\r\nStep 3795, loss: 0.021192632615566254\r\nStep 3796, loss: 0.02137923426926136\r\nStep 3797, loss: 0.020158089697360992\r\nStep 3798, loss: 0.021067297086119652\r\nStep 3799, loss: 0.022214481607079506\r\nStep 3800, loss: 0.01918908953666687\r\nStep 3801, loss: 0.019781198352575302\r\nStep 3802, loss: 0.020276829600334167\r\nStep 3803, loss: 0.019484398886561394\r\nStep 3804, loss: 0.019670823588967323\r\nStep 3805, loss: 0.020180504769086838\r\nStep 3806, loss: 0.02083706296980381\r\nStep 3807, loss: 0.02015862613916397\r\nStep 3808, loss: 0.020467791706323624\r\nStep 3809, loss: 0.04053831472992897\r\nStep 3810, loss: 0.021590575575828552\r\nStep 3811, loss: 0.02088666521012783\r\nStep 3812, loss: 0.0204891636967659\r\nStep 3813, loss: 0.020458118990063667\r\nStep 3814, loss: 0.02008834481239319\r\nStep 3815, loss: 0.02027169242501259\r\nStep 3816, loss: 0.019541021436452866\r\nStep 3817, loss: 0.02041810378432274\r\nStep 3818, loss: 0.01984037458896637\r\nStep 3819, loss: 0.020417751744389534\r\nStep 3820, loss: 0.019263245165348053\r\nStep 3821, loss: 0.018861936405301094\r\nStep 3822, loss: 0.019726261496543884\r\nStep 3823, loss: 0.01888410374522209\r\nStep 3824, loss: 0.02029787003993988\r\nStep 3825, loss: 0.019404707476496696\r\nStep 3826, loss: 0.019714927300810814\r\nStep 3827, loss: 0.019065523520112038\r\nStep 3828, loss: 0.020134683698415756\r\nStep 3829, loss: 0.019146718084812164\r\nStep 3830, loss: 0.03298630192875862\r\nStep 3831, loss: 0.019153358414769173\r\nStep 3832, loss: 0.019237414002418518\r\nStep 3833, loss: 0.02012726478278637\r\nStep 3834, loss: 0.020257441326975822\r\nStep 3835, loss: 0.019056973978877068\r\nStep 3836, loss: 0.01811029016971588\r\nStep 3837, loss: 0.020778987556695938\r\nStep 3838, loss: 0.019088968634605408\r\nStep 3839, loss: 0.018671059980988503\r\nStep 3840, loss: 0.019150815904140472\r\nStep 3841, loss: 0.019113192334771156\r\nStep 3842, loss: 0.019299721345305443\r\nStep 3843, loss: 0.01930643990635872\r\nStep 3844, loss: 0.019060920923948288\r\nStep 3845, loss: 0.018240682780742645\r\nStep 3846, loss: 0.018769903108477592\r\nStep 3847, loss: 0.020005855709314346\r\nStep 3848, loss: 0.018349245190620422\r\nStep 3849, loss: 0.017768269404768944\r\nStep 3850, loss: 0.018634893000125885\r\nStep 3851, loss: 0.01893470250070095\r\nStep 3852, loss: 0.019014041870832443\r\nStep 3853, loss: 0.018795626237988472\r\nStep 3854, loss: 0.01938609592616558\r\nStep 3855, loss: 0.019351337105035782\r\nStep 3856, loss: 0.07870817184448242\r\nStep 3857, loss: 0.021529097110033035\r\nStep 3858, loss: 0.022173618897795677\r\nStep 3859, loss: 0.02001330628991127\r\nStep 3860, loss: 0.021669192239642143\r\nStep 3861, loss: 0.020930690690875053\r\nStep 3862, loss: 0.02009555697441101\r\nStep 3863, loss: 0.018998561426997185\r\nStep 3864, loss: 0.01904110237956047\r\nStep 3865, loss: 0.01965821161866188\r\nStep 3866, loss: 0.01816377602517605\r\nStep 3867, loss: 0.01950272172689438\r\n",,terminal_output +3380,9115073,"TERMINAL",0,0,"9\t ",,terminal_output +3381,9116099,"TERMINAL",0,0,"10\t ",,terminal_output +3382,9117071,"TERMINAL",0,0,"1\t ",,terminal_output +3383,9118114,"TERMINAL",0,0,"2\t ",,terminal_output +3384,9119149,"TERMINAL",0,0,"31",,terminal_output +3385,9120199,"TERMINAL",0,0,"4\t ",,terminal_output +3386,9121021,"TERMINAL",0,0,"Step 3868, loss: 0.018057113513350487\r\nStep 3869, loss: 0.01851329393684864\r\nStep 3870, loss: 0.018983663991093636\r\nStep 3871, loss: 0.020102694630622864\r\nStep 3872, loss: 0.017815979197621346\r\nStep 3873, loss: 0.01921229436993599\r\nStep 3874, loss: 0.02084028720855713\r\nStep 3875, loss: 0.030849678441882133\r\nStep 3876, loss: 0.019380705431103706\r\nStep 3877, loss: 0.019148970022797585\r\nStep 3878, loss: 0.01738712564110756\r\nStep 3879, loss: 0.019346702843904495\r\nStep 3880, loss: 0.017557824030518532\r\nStep 3881, loss: 0.017439518123865128\r\nStep 3882, loss: 0.017496058717370033\r\nStep 3883, loss: 0.017755338922142982\r\nStep 3884, loss: 0.017452966421842575\r\nStep 3885, loss: 0.017717162147164345\r\nStep 3886, loss: 0.01778961531817913\r\nStep 3887, loss: 0.017280831933021545\r\nStep 3888, loss: 0.016866501420736313\r\nStep 3889, loss: 0.017825346440076828\r\nStep 3890, loss: 0.017966311424970627\r\nStep 3891, loss: 0.01689964346587658\r\nStep 3892, loss: 0.01652616076171398\r\nStep 3893, loss: 0.01844245009124279\r\nStep 3894, loss: 0.017998788505792618\r\nStep 3895, loss: 0.01843164674937725\r\nStep 3896, loss: 0.017725368961691856\r\nStep 3897, loss: 0.01779956743121147\r\nStep 3898, loss: 0.018224047496914864\r\nStep 3899, loss: 0.028182601556181908\r\nStep 3900, loss: 0.01830369606614113\r\nStep 3901, loss: 0.017704883590340614\r\nStep 3902, loss: 0.016845427453517914\r\nStep 3903, loss: 0.01726483926177025\r\nStep 3904, loss: 0.0177324078977108\r\nStep 3905, loss: 0.01815040595829487\r\nStep 3906, loss: 0.01685219630599022\r\nStep 3907, loss: 0.016866112127900124\r\nStep 3908, loss: 0.016308117657899857\r\nStep 3909, loss: 0.016719553619623184\r\nStep 3910, loss: 0.016874076798558235\r\nStep 3911, loss: 0.017670949921011925\r\nStep 3912, loss: 0.017387935891747475\r\nStep 3913, loss: 0.016733165830373764\r\nStep 3914, loss: 0.017693940550088882\r\nStep 3915, loss: 0.018037281930446625\r\nStep 3916, loss: 0.016043346375226974\r\nStep 3917, loss: 0.01571171171963215\r\nStep 3918, loss: 0.01713871955871582\r\nStep 3919, loss: 0.015621045604348183\r\nStep 3920, loss: 0.06899445503950119\r\nStep 3921, loss: 0.016914522275328636\r\nStep 3922, loss: 0.017940746620297432\r\nStep 3923, loss: 0.018271489068865776\r\nStep 3924, loss: 0.017246300354599953\r\nStep 3925, loss: 0.01876147650182247\r\nStep 3926, loss: 0.01818498596549034\r\nStep 3927, loss: 0.01707269810140133\r\nStep 3928, loss: 0.01682434044778347\r\nStep 3929, loss: 0.01830344647169113\r\nStep 3930, loss: 0.018120042979717255\r\nStep 3931, loss: 0.017498530447483063\r\nStep 3932, loss: 0.01782725751399994\r\nStep 3933, loss: 0.017478976398706436\r\nStep 3934, loss: 0.01636580564081669\r\nStep 3935, loss: 0.015773748978972435\r\nStep 3936, loss: 0.017572762444615364\r\nStep 3937, loss: 0.015887930989265442\r\nStep 3938, loss: 0.01606769487261772\r\nStep 3939, loss: 0.01534466352313757\r\nStep 3940, loss: 0.0160346869379282\r\nStep 3941, loss: 0.01835886389017105\r\nStep 3942, loss: 0.03111274354159832\r\nStep 3943, loss: 0.01780993677675724\r\nStep 3944, loss: 0.016434138640761375\r\nStep 3945, loss: 0.017641635611653328\r\nStep 3946, loss: 0.0163868498057127\r\nStep 3947, loss: 0.017293361946940422\r\nStep 3948, loss: 0.01701580360531807\r\nStep 3949, loss: 0.016107728704810143\r\nStep 3950, loss: 0.01590787246823311\r\nStep 3951, loss: 0.01629703678190708\r\nStep 3952, loss: 0.0171827245503664\r\nStep 3953, loss: 0.015078633092343807\r\nStep 3954, loss: 0.015835002064704895\r\nStep 3955, loss: 0.016070568934082985\r\nStep 3956, loss: 0.015506432391703129\r\nStep 3957, loss: 0.015109878033399582\r\nStep 3958, loss: 0.014826263301074505\r\nStep 3959, loss: 0.01698330231010914\r\nStep 3960, loss: 0.014703911729156971\r\nStep 3961, loss: 0.015949057415127754\r\nStep 3962, loss: 0.016760481521487236\r\nStep 3963, loss: 0.014693118631839752\r\nStep 3964, loss: 0.01498363260179758\r\nStep 3965, loss: 0.015111027285456657\r\nStep 3966, loss: 0.014967251569032669\r\nStep 3967, loss: 0.015367282554507256\r\nStep 3968, loss: 0.015672115609049797\r\nStep 3969, loss: 0.014235804788768291\r\nStep 3970, loss: 0.014802767895162106\r\nStep 3971, loss: 0.01522475853562355\r\nStep 3972, loss: 0.015396034345030785\r\nStep 3973, loss: 0.015292656607925892\r\nStep 3974, loss: 0.014453450217843056\r\nStep 3975, loss: 0.016109077259898186\r\nStep 3976, loss: 0.015578724443912506\r\nStep 3977, loss: 0.01555931381881237\r\nStep 3978, loss: 0.015576975420117378\r\nStep 3979, loss: 0.015024309977889061\r\nStep 3980, loss: 0.014834034256637096\r\nStep 3981, loss: 0.1054786890745163\r\nStep 3982, loss: 0.019762655720114708\r\nStep 3983, loss: 0.020656220614910126\r\nStep 3984, loss: 0.016884852200746536\r\nStep 3985, loss: 0.017482148483395576\r\nStep 3986, loss: 0.018197933211922646\r\nStep 3987, loss: 0.01710725761950016\r\nStep 3988, loss: 0.016598885878920555\r\nStep 3989, loss: 0.016742216423153877\r\nStep 3990, loss: 0.017240574583411217\r\nStep 3991, loss: 0.016805795952677727\r\nStep 3992, loss: 0.017580445855855942\r\nStep 3993, loss: 0.017371278256177902\r\nStep 3994, loss: 0.01521320827305317\r\nStep 3995, loss: 0.01619414985179901\r\nStep 3996, loss: 0.01675819791853428\r\nStep 3997, loss: 0.0155946621671319\r\nStep 3998, loss: 0.01442532055079937\r\nStep 3999, loss: 0.015736836940050125\r\nSaved checkpoint at step 4000\r\nStep 4000, loss: 0.014978994615375996\r\nStep 4001, loss: 0.01350949052721262\r\nStep 4002, loss: 0.015663644298911095\r\nStep 4003, loss: 0.0151066267862916\r\nStep 4004, loss: 0.015209559351205826\r\nStep 4005, loss: 0.015233522281050682\r\nStep 4006, loss: 0.06990201771259308\r\nStep 4007, loss: 0.01540255919098854\r\nStep 4008, loss: 0.018036261200904846\r\nStep 4009, loss: 0.014927800744771957\r\nStep 4010, loss: 0.015441618859767914\r\nStep 4011, loss: 0.016217757016420364\r\nStep 4012, loss: 0.015274959616363049\r\nStep 4013, loss: 0.016000671312212944\r\nStep 4014, loss: 0.016407396644353867\r\nStep 4015, loss: 0.014897782355546951\r\nStep 4016, loss: 0.014432741329073906\r\nStep 4017, loss: 0.01455174945294857\r\nStep 4018, loss: 0.013854792341589928\r\nStep 4019, loss: 0.01612439937889576\r\nStep 4020, loss: 0.014364087022840977\r\nStep 4021, loss: 0.014828051440417767\r\nStep 4022, loss: 0.01574166864156723\r\nStep 4023, loss: 0.014638375490903854\r\nStep 4024, loss: 0.014645270071923733\r\nStep 4025, loss: 0.014795362949371338\r\nStep 4026, loss: 0.013924747705459595\r\nStep 4027, loss: 0.016139542683959007\r\nStep 4028, loss: 0.0142601253464818\r\nStep 4029, loss: 0.015648959204554558\r\nStep 4030, loss: 0.034524980932474136\r\nStep 4031, loss: 0.01455420721322298\r\nStep 4032, loss: 0.014673581346869469\r\nStep 4033, loss: 0.014849516563117504\r\nStep 4034, loss: 0.015722429379820824\r\nStep 4035, loss: 0.014644728042185307\r\nStep 4036, loss: 0.014765871688723564\r\nStep 4037, loss: 0.016191421076655388\r\nStep 4038, loss: 0.015572288073599339\r\nStep 4039, loss: 0.015109594911336899\r\nStep 4040, loss: 0.0144657539203763\r\nStep 4041, loss: 0.014692082069814205\r\nStep 4042, loss: 0.01413112971931696\r\nStep 4043, loss: 0.014414873905479908\r\nStep 4044, loss: 0.013828950934112072\r\nStep 4045, loss: 0.014397912658751011\r\nStep 4046, loss: 0.014059938490390778\r\nStep 4047, loss: 0.013976836577057838\r\nStep 4048, loss: 0.013212688267230988\r\nStep 4049, loss: 0.01455154363065958\r\nStep 4050, loss: 0.014074915088713169\r\nStep 4051, loss: 0.014048022218048573\r\nStep 4052, loss: 0.012955300509929657\r\nStep 4053, loss: 0.012770050205290318\r\nStep 4054, loss: 0.013761764392256737\r\nStep 4055, loss: 0.01399045530706644\r\nStep 4056, loss: 0.014020390808582306\r\nStep 4057, loss: 0.12378262728452682\r\nStep 4058, loss: 0.01787720061838627\r\nStep 4059, loss: 0.019501492381095886\r\nStep 4060, loss: 0.017514988780021667\r\nStep 4061, loss: 0.016821786761283875\r\nStep 4062, loss: 0.018278026953339577\r\nStep 4063, loss: 0.01604950800538063\r\nStep 4064, loss: 0.015553129836916924\r\nStep 4065, loss: 0.016828903928399086\r\nStep 4066, loss: 0.015474068000912666\r\nStep 4067, loss: 0.013337639160454273\r\nStep 4068, loss: 0.015234486199915409\r\nStep 4069, loss: 0.014985531568527222\r\nStep 4070, loss: 0.014075882732868195\r\nStep 4071, loss: 0.014454466290771961\r\nStep 4072, loss: 0.014369016513228416\r\nStep 4073, loss: 0.016297610476613045\r\nStep 4074, loss: 0.013721270486712456\r\nStep 4075, loss: 0.01445820089429617\r\nStep 4076, loss: 0.028317969292402267\r\nStep 4077, loss: 0.01385942567139864\r\nStep 4078, loss: 0.014621893875300884\r\nStep 4079, loss: 0.01414393913000822\r\nStep 4080, loss: 0.01305434014648199\r\nStep 4081, loss: 0.014388257637619972\r\nStep 4082, loss: 0.01487482339143753\r\nStep 4083, loss: 0.01338122971355915\r\n",,terminal_output +3387,9121228,"TERMINAL",0,0,"5\t ",,terminal_output +3388,9122266,"TERMINAL",0,0,"6\t ",,terminal_output +3389,9123308,"TERMINAL",0,0,"7\t ",,terminal_output +3390,9124378,"TERMINAL",0,0,"9\t ",,terminal_output +3391,9125445,"TERMINAL",0,0,"20\t ",,terminal_output +3392,9125946,"TERMINAL",0,0,"Step 4084, loss: 0.013647761195898056\r\nStep 4085, loss: 0.013557234779000282\r\nStep 4086, loss: 0.012892537750303745\r\nStep 4087, loss: 0.014498423784971237\r\nStep 4088, loss: 0.013405936770141125\r\nStep 4089, loss: 0.013739029876887798\r\nStep 4090, loss: 0.012789439409971237\r\nStep 4091, loss: 0.012355412356555462\r\nStep 4092, loss: 0.014280816540122032\r\nStep 4093, loss: 0.013438201509416103\r\nStep 4094, loss: 0.012919191271066666\r\nStep 4095, loss: 0.013147051446139812\r\nStep 4096, loss: 0.012846636585891247\r\nStep 4097, loss: 0.013038207776844501\r\nStep 4098, loss: 0.012878784909844398\r\nStep 4099, loss: 0.012602110393345356\r\nStep 4100, loss: 0.012430982664227486\r\nStep 4101, loss: 0.01383944507688284\r\nStep 4102, loss: 0.012724500149488449\r\nStep 4103, loss: 0.013415267691016197\r\nStep 4104, loss: 0.08705765008926392\r\nStep 4105, loss: 0.01924111507833004\r\nStep 4106, loss: 0.015796059742569923\r\nStep 4107, loss: 0.01567787677049637\r\nStep 4108, loss: 0.016538769006729126\r\nStep 4109, loss: 0.015567131340503693\r\nStep 4110, loss: 0.015586449764668941\r\nStep 4111, loss: 0.017318328842520714\r\nStep 4112, loss: 0.015216917730867863\r\nStep 4113, loss: 0.013777747750282288\r\nStep 4114, loss: 0.013802791945636272\r\nStep 4115, loss: 0.014951197430491447\r\nStep 4116, loss: 0.014748438261449337\r\nStep 4117, loss: 0.013592025265097618\r\nStep 4118, loss: 0.014677378349006176\r\nStep 4119, loss: 0.013791517354547977\r\nStep 4120, loss: 0.013148820027709007\r\nStep 4121, loss: 0.013457400724291801\r\nStep 4122, loss: 0.013439354486763477\r\nStep 4123, loss: 0.01447175070643425\r\nStep 4124, loss: 0.013915927149355412\r\nStep 4125, loss: 0.013417772017419338\r\nStep 4126, loss: 0.01340765506029129\r\nStep 4127, loss: 0.012822123244404793\r\nStep 4128, loss: 0.012746944092214108\r\nStep 4129, loss: 0.012494883500039577\r\nStep 4130, loss: 0.012358193285763264\r\nStep 4131, loss: 0.011441702954471111\r\nStep 4132, loss: 0.023700479418039322\r\nStep 4133, loss: 0.012992024421691895\r\nStep 4134, loss: 0.013122420758008957\r\nStep 4135, loss: 0.012848050333559513\r\nStep 4136, loss: 0.01219344325363636\r\nStep 4137, loss: 0.013120791874825954\r\nStep 4138, loss: 0.012127194553613663\r\nStep 4139, loss: 0.012712471187114716\r\nStep 4140, loss: 0.012332689948379993\r\nStep 4141, loss: 0.012535358779132366\r\nStep 4142, loss: 0.012355539947748184\r\nStep 4143, loss: 0.05515044555068016\r\nStep 4144, loss: 0.013229972682893276\r\nStep 4145, loss: 0.015904642641544342\r\nStep 4146, loss: 0.014220756478607655\r\nStep 4147, loss: 0.01284827385097742\r\nStep 4148, loss: 0.012334334664046764\r\nStep 4149, loss: 0.014037185348570347\r\nStep 4150, loss: 0.01312832161784172\r\nStep 4151, loss: 0.01288183405995369\r\nStep 4152, loss: 0.012846671044826508\r\nStep 4153, loss: 0.012812787666916847\r\nStep 4154, loss: 0.013192091137170792\r\nStep 4155, loss: 0.012668943032622337\r\nStep 4156, loss: 0.011926342733204365\r\nStep 4157, loss: 0.013535658828914165\r\nStep 4158, loss: 0.012836514040827751\r\nStep 4159, loss: 0.012178744189441204\r\nStep 4160, loss: 0.013018987141549587\r\nStep 4161, loss: 0.011507530696690083\r\nStep 4162, loss: 0.011716826818883419\r\nStep 4163, loss: 0.012918432243168354\r\nStep 4164, loss: 0.013555578887462616\r\nStep 4165, loss: 0.01049735490232706\r\nStep 4166, loss: 0.01127096638083458\r\nStep 4167, loss: 0.01302402000874281\r\nStep 4168, loss: 0.011396745219826698\r\nStep 4169, loss: 0.010875759646296501\r\nStep 4170, loss: 0.022005422040820122\r\nStep 4171, loss: 0.012183627113699913\r\nStep 4172, loss: 0.012897401116788387\r\nStep 4173, loss: 0.012155596166849136\r\nStep 4174, loss: 0.013260330073535442\r\nStep 4175, loss: 0.013140833005309105\r\nStep 4176, loss: 0.012051515281200409\r\nStep 4177, loss: 0.011759251356124878\r\nStep 4178, loss: 0.012612035498023033\r\nStep 4179, loss: 0.012157085351645947\r\nStep 4180, loss: 0.012269699946045876\r\nStep 4181, loss: 0.011169406585395336\r\nStep 4182, loss: 0.011929405853152275\r\nStep 4183, loss: 0.04924629628658295\r\nStep 4184, loss: 0.012261311523616314\r\nStep 4185, loss: 0.012281027622520924\r\nStep 4186, loss: 0.012578189373016357\r\nStep 4187, loss: 0.011966856196522713\r\nStep 4188, loss: 0.01278387475758791\r\nStep 4189, loss: 0.014300057664513588\r\nStep 4190, loss: 0.011573493480682373\r\nStep 4191, loss: 0.012561308220028877\r\nStep 4192, loss: 0.011506683193147182\r\nStep 4193, loss: 0.0116963442414999\r\nStep 4194, loss: 0.012416107580065727\r\nStep 4195, loss: 0.01130929309874773\r\nStep 4196, loss: 0.010526101104915142\r\nStep 4197, loss: 0.011137580499053001\r\nStep 4198, loss: 0.010779651813209057\r\nStep 4199, loss: 0.011334607377648354\r\nStep 4200, loss: 0.011336908675730228\r\nStep 4201, loss: 0.01217751856893301\r\nStep 4202, loss: 0.011977161280810833\r\nStep 4203, loss: 0.012118098326027393\r\nStep 4204, loss: 0.01118409913033247\r\nStep 4205, loss: 0.011471361853182316\r\nStep 4206, loss: 0.010213148780167103\r\nStep 4207, loss: 0.011469198390841484\r\nStep 4208, loss: 0.010940607637166977\r\nStep 4209, loss: 0.010394479148089886\r\nStep 4210, loss: 0.011531916446983814\r\nStep 4211, loss: 0.011409779079258442\r\nStep 4212, loss: 0.019526436924934387\r\nStep 4213, loss: 0.011148988269269466\r\nStep 4214, loss: 0.011027118191123009\r\nStep 4215, loss: 0.011265452951192856\r\nStep 4216, loss: 0.011262431740760803\r\nStep 4217, loss: 0.011662586592137814\r\nStep 4218, loss: 0.01075836457312107\r\nStep 4219, loss: 0.010629253461956978\r\nStep 4220, loss: 0.010953408665955067\r\nStep 4221, loss: 0.010356210172176361\r\nStep 4222, loss: 0.010837755165994167\r\nStep 4223, loss: 0.10503073781728745\r\nStep 4224, loss: 0.013653023168444633\r\nStep 4225, loss: 0.017026884481310844\r\nStep 4226, loss: 0.01309831254184246\r\nStep 4227, loss: 0.013962573371827602\r\nStep 4228, loss: 0.013569675385951996\r\nStep 4229, loss: 0.013646744191646576\r\nStep 4230, loss: 0.01376678328961134\r\nStep 4231, loss: 0.013051138259470463\r\nStep 4232, loss: 0.0125248609110713\r\nStep 4233, loss: 0.012615323066711426\r\nStep 4234, loss: 0.012017251923680305\r\nStep 4235, loss: 0.012126816436648369\r\nStep 4236, loss: 0.01204416248947382\r\nStep 4237, loss: 0.01215906161814928\r\nStep 4238, loss: 0.012050937861204147\r\nStep 4239, loss: 0.011842858977615833\r\nStep 4240, loss: 0.012179854325950146\r\nStep 4241, loss: 0.01107859704643488\r\nStep 4242, loss: 0.010550417937338352\r\nStep 4243, loss: 0.010755104944109917\r\nStep 4244, loss: 0.011100679636001587\r\nStep 4245, loss: 0.011367841623723507\r\nStep 4246, loss: 0.010390589945018291\r\nStep 4247, loss: 0.010516967624425888\r\nStep 4248, loss: 0.010444052517414093\r\nStep 4249, loss: 0.010595629923045635\r\nStep 4250, loss: 0.011193829588592052\r\nStep 4251, loss: 0.010981695726513863\r\nStep 4252, loss: 0.010785296559333801\r\nStep 4253, loss: 0.010315400548279285\r\nStep 4254, loss: 0.010605568997561932\r\nStep 4255, loss: 0.009897508658468723\r\nStep 4256, loss: 0.009826657362282276\r\nStep 4257, loss: 0.010362563654780388\r\nStep 4258, loss: 0.009922700934112072\r\nStep 4259, loss: 0.009963075630366802\r\nStep 4260, loss: 0.009692572988569736\r\nStep 4261, loss: 0.010322407819330692\r\nStep 4262, loss: 0.01125026773661375\r\nStep 4263, loss: 0.010861258022487164\r\nStep 4264, loss: 0.06215044483542442\r\nStep 4265, loss: 0.015211138874292374\r\nStep 4266, loss: 0.014028197154402733\r\nStep 4267, loss: 0.012349462136626244\r\nStep 4268, loss: 0.012841904535889626\r\nStep 4269, loss: 0.012721526436507702\r\nStep 4270, loss: 0.013152928091585636\r\nStep 4271, loss: 0.0133522292599082\r\nStep 4272, loss: 0.011681442148983479\r\nStep 4273, loss: 0.011735484004020691\r\nStep 4274, loss: 0.011405106633901596\r\nStep 4275, loss: 0.011746257543563843\r\nStep 4276, loss: 0.010779104195535183\r\nStep 4277, loss: 0.011212904006242752\r\nStep 4278, loss: 0.011084883473813534\r\nStep 4279, loss: 0.010924042202532291\r\nStep 4280, loss: 0.010603208094835281\r\nStep 4281, loss: 0.011079097166657448\r\nStep 4282, loss: 0.010328654199838638\r\nStep 4283, loss: 0.010455445386469364\r\nStep 4284, loss: 0.010220889933407307\r\nStep 4285, loss: 0.010504329577088356\r\nStep 4286, loss: 0.010781816206872463\r\nStep 4287, loss: 0.009753752499818802\r\nStep 4288, loss: 0.01050315797328949\r\nStep 4289, loss: 0.010225783102214336\r\nStep 4290, loss: 0.010062476620078087\r\nStep 4291, loss: 0.010032027959823608\r\nStep 4292, loss: 0.010326205752789974\r\nStep 4293, loss: 0.009709661826491356\r\nStep 4294, loss: 0.009865629486739635\r\nStep 4295, loss: 0.009581847116351128\r\nStep 4296, loss: 0.00938871968537569\r\nStep 4297, loss: 0.04417472705245018\r\nStep 4298, loss: 0.011650724336504936\r\nStep 4299, loss: 0.011014871299266815\r\n",,terminal_output +3393,9126458,"TERMINAL",0,0,"1\t ",,terminal_output +3394,9127466,"TERMINAL",0,0,"2\t ",,terminal_output +3395,9128499,"TERMINAL",0,0,"3\t ",,terminal_output +3396,9129548,"TERMINAL",0,0,"4\t ",,terminal_output +3397,9130585,"TERMINAL",0,0,"5\t ",,terminal_output +3398,9131155,"TERMINAL",0,0,"Step 4300, loss: 0.01094388123601675\r\nStep 4301, loss: 0.010841275565326214\r\nStep 4302, loss: 0.010467597283422947\r\nStep 4303, loss: 0.01089168805629015\r\nStep 4304, loss: 0.010576288215816021\r\nStep 4305, loss: 0.010340850800275803\r\nStep 4306, loss: 0.011010119691491127\r\nStep 4307, loss: 0.010651879943907261\r\nStep 4308, loss: 0.009470284916460514\r\nStep 4309, loss: 0.01104652974754572\r\nStep 4310, loss: 0.00992704089730978\r\nStep 4311, loss: 0.01014870684593916\r\nStep 4312, loss: 0.010688550770282745\r\nStep 4313, loss: 0.010370722971856594\r\nStep 4314, loss: 0.010753079317510128\r\nStep 4315, loss: 0.01001750212162733\r\nStep 4316, loss: 0.009226047433912754\r\nStep 4317, loss: 0.009950347244739532\r\nStep 4318, loss: 0.06679243594408035\r\nStep 4319, loss: 0.013260002247989178\r\nStep 4320, loss: 0.012107057496905327\r\nStep 4321, loss: 0.011737008579075336\r\nStep 4322, loss: 0.01221543364226818\r\nStep 4323, loss: 0.01011100597679615\r\nStep 4324, loss: 0.010904135182499886\r\nStep 4325, loss: 0.01178079191595316\r\nStep 4326, loss: 0.010691068135201931\r\nStep 4327, loss: 0.011167249642312527\r\nStep 4328, loss: 0.011948603205382824\r\nStep 4329, loss: 0.00971419271081686\r\nStep 4330, loss: 0.010803719982504845\r\nStep 4331, loss: 0.009349199943244457\r\nStep 4332, loss: 0.010005808435380459\r\nStep 4333, loss: 0.01093033142387867\r\nStep 4334, loss: 0.009494361467659473\r\nStep 4335, loss: 0.009950228966772556\r\nStep 4336, loss: 0.009711489081382751\r\nStep 4337, loss: 0.010667541064321995\r\nStep 4338, loss: 0.008936027064919472\r\nStep 4339, loss: 0.009220884181559086\r\nStep 4340, loss: 0.04461270943284035\r\nStep 4341, loss: 0.010379008948802948\r\nStep 4342, loss: 0.010132179595530033\r\nStep 4343, loss: 0.011075579561293125\r\nStep 4344, loss: 0.010139857418835163\r\nStep 4345, loss: 0.010713580995798111\r\nStep 4346, loss: 0.010909301228821278\r\nStep 4347, loss: 0.01058953907340765\r\nStep 4348, loss: 0.00976360123604536\r\nStep 4349, loss: 0.009613841772079468\r\nStep 4350, loss: 0.010064191184937954\r\nStep 4351, loss: 0.008837740868330002\r\nStep 4352, loss: 0.009819095022976398\r\nStep 4353, loss: 0.008736494928598404\r\nStep 4354, loss: 0.01007346622645855\r\nStep 4355, loss: 0.009320879355072975\r\nStep 4356, loss: 0.010128756985068321\r\nStep 4357, loss: 0.009114525280892849\r\nStep 4358, loss: 0.009437255561351776\r\nStep 4359, loss: 0.00978883821517229\r\nStep 4360, loss: 0.008994552306830883\r\nStep 4361, loss: 0.009625940583646297\r\nStep 4362, loss: 0.0100493049249053\r\nStep 4363, loss: 0.009306089021265507\r\nStep 4364, loss: 0.008694639429450035\r\nStep 4365, loss: 0.009484241716563702\r\nStep 4366, loss: 0.008734948001801968\r\nStep 4367, loss: 0.009290032088756561\r\nStep 4368, loss: 0.009199975058436394\r\nStep 4369, loss: 0.009367046877741814\r\nStep 4370, loss: 0.009892427362501621\r\nStep 4371, loss: 0.008267453871667385\r\nStep 4372, loss: 0.008259602822363377\r\nStep 4373, loss: 0.009366174228489399\r\nStep 4374, loss: 0.009139161556959152\r\nStep 4375, loss: 0.01923862099647522\r\nStep 4376, loss: 0.008897071704268456\r\nStep 4377, loss: 0.009794154204428196\r\nStep 4378, loss: 0.008810743689537048\r\nStep 4379, loss: 0.00932932086288929\r\nStep 4380, loss: 0.00989687442779541\r\nStep 4381, loss: 0.008872219361364841\r\nStep 4382, loss: 0.010150064714252949\r\nStep 4383, loss: 0.009300422854721546\r\nStep 4384, loss: 0.00855589285492897\r\nStep 4385, loss: 0.009650591760873795\r\nStep 4386, loss: 0.00879052933305502\r\nStep 4387, loss: 0.0090401079505682\r\nStep 4388, loss: 0.009263088926672935\r\nStep 4389, loss: 0.009673908352851868\r\nStep 4390, loss: 0.009188777767121792\r\nStep 4391, loss: 0.00923454575240612\r\nStep 4392, loss: 0.00900141429156065\r\nStep 4393, loss: 0.009166090749204159\r\nStep 4394, loss: 0.009551163762807846\r\nStep 4395, loss: 0.009244362823665142\r\nStep 4396, loss: 0.00940397847443819\r\nStep 4397, loss: 0.008732778951525688\r\nStep 4398, loss: 0.009475933387875557\r\nStep 4399, loss: 0.009422725066542625\r\nStep 4400, loss: 0.009498882107436657\r\nStep 4401, loss: 0.008950240910053253\r\nStep 4402, loss: 0.008469443768262863\r\nStep 4403, loss: 0.008715381845831871\r\nStep 4404, loss: 0.009109852835536003\r\nStep 4405, loss: 0.007971420884132385\r\nStep 4406, loss: 0.008251383900642395\r\nStep 4407, loss: 0.015410464257001877\r\nStep 4408, loss: 0.008129381574690342\r\nStep 4409, loss: 0.00965066347271204\r\nStep 4410, loss: 0.008543414995074272\r\nStep 4411, loss: 0.009962373413145542\r\nStep 4412, loss: 0.008957518264651299\r\nStep 4413, loss: 0.009727656841278076\r\nStep 4414, loss: 0.008413618430495262\r\nStep 4415, loss: 0.00889347679913044\r\nStep 4416, loss: 0.010404843837022781\r\nStep 4417, loss: 0.034141506999731064\r\nStep 4418, loss: 0.009430053643882275\r\nStep 4419, loss: 0.008852940052747726\r\nStep 4420, loss: 0.009349439293146133\r\nStep 4421, loss: 0.00985705479979515\r\nStep 4422, loss: 0.008617023006081581\r\nStep 4423, loss: 0.009584581479430199\r\nStep 4424, loss: 0.00960939098149538\r\nStep 4425, loss: 0.009284677915275097\r\nStep 4426, loss: 0.008872546255588531\r\nStep 4427, loss: 0.009094329550862312\r\nStep 4428, loss: 0.008349227719008923\r\nStep 4429, loss: 0.008059888146817684\r\nStep 4430, loss: 0.009107757359743118\r\nStep 4431, loss: 0.00888796616345644\r\nStep 4432, loss: 0.008351033553481102\r\nStep 4433, loss: 0.008959508500993252\r\nStep 4434, loss: 0.008561855182051659\r\nStep 4435, loss: 0.007526794448494911\r\nStep 4436, loss: 0.008504703640937805\r\nStep 4437, loss: 0.015955112874507904\r\nStep 4438, loss: 0.009210259653627872\r\nStep 4439, loss: 0.009552763774991035\r\nStep 4440, loss: 0.008598376996815205\r\nStep 4441, loss: 0.008877485990524292\r\nStep 4442, loss: 0.009368352591991425\r\nStep 4443, loss: 0.0086085619404912\r\nStep 4444, loss: 0.009638580493628979\r\nStep 4445, loss: 0.008571461774408817\r\nStep 4446, loss: 0.007934309542179108\r\nStep 4447, loss: 0.00914657674729824\r\nStep 4448, loss: 0.00926777720451355\r\nStep 4449, loss: 0.008398504927754402\r\nStep 4450, loss: 0.008711177855730057\r\nStep 4451, loss: 0.00842150766402483\r\nStep 4452, loss: 0.008534669876098633\r\nStep 4453, loss: 0.11826895922422409\r\nStep 4454, loss: 0.012645799666643143\r\nStep 4455, loss: 0.016903147101402283\r\nStep 4456, loss: 0.013748147524893284\r\nStep 4457, loss: 0.011603373102843761\r\nStep 4458, loss: 0.011454824358224869\r\nStep 4459, loss: 0.01163946557790041\r\nStep 4460, loss: 0.011476482264697552\r\nStep 4461, loss: 0.009616270661354065\r\nStep 4462, loss: 0.010945914313197136\r\nStep 4463, loss: 0.011043296195566654\r\nStep 4464, loss: 0.010378443636000156\r\nStep 4465, loss: 0.010142991319298744\r\nStep 4466, loss: 0.010219104588031769\r\nStep 4467, loss: 0.009330475702881813\r\nStep 4468, loss: 0.009238062426447868\r\nStep 4469, loss: 0.009848857298493385\r\nStep 4470, loss: 0.008679784834384918\r\nStep 4471, loss: 0.009102696552872658\r\nStep 4472, loss: 0.008392101153731346\r\nStep 4473, loss: 0.00883318018168211\r\nStep 4474, loss: 0.008469972759485245\r\nStep 4475, loss: 0.007656603120267391\r\nStep 4476, loss: 0.00860583409667015\r\nStep 4477, loss: 0.008737090043723583\r\nStep 4478, loss: 0.008395551703870296\r\nStep 4479, loss: 0.008323505520820618\r\nStep 4480, loss: 0.007873290218412876\r\nStep 4481, loss: 0.008260599337518215\r\nStep 4482, loss: 0.006815871223807335\r\nStep 4483, loss: 0.007488789968192577\r\nStep 4484, loss: 0.008016416803002357\r\nStep 4485, loss: 0.008147278800606728\r\nStep 4486, loss: 0.008729509077966213\r\nStep 4487, loss: 0.008468440733850002\r\nStep 4488, loss: 0.007626519538462162\r\nStep 4489, loss: 0.008837196044623852\r\nStep 4490, loss: 0.007422168739140034\r\nStep 4491, loss: 0.007622703444212675\r\nStep 4492, loss: 0.008083797991275787\r\nStep 4493, loss: 0.008377614431083202\r\nStep 4494, loss: 0.046843692660331726\r\nStep 4495, loss: 0.00954405590891838\r\nStep 4496, loss: 0.009803306311368942\r\nStep 4497, loss: 0.008854730054736137\r\nStep 4498, loss: 0.009039578028023243\r\nStep 4499, loss: 0.010022873058915138\r\nStep 4500, loss: 0.008478157222270966\r\nStep 4501, loss: 0.008473712019622326\r\nStep 4502, loss: 0.009741858579218388\r\nStep 4503, loss: 0.009505843743681908\r\nStep 4504, loss: 0.008934899233281612\r\nStep 4505, loss: 0.008591562509536743\r\nStep 4506, loss: 0.008976887911558151\r\nStep 4507, loss: 0.008321345783770084\r\nStep 4508, loss: 0.008137596771121025\r\nStep 4509, loss: 0.008593492209911346\r\nStep 4510, loss: 0.008376594632863998\r\nStep 4511, loss: 0.007946901954710484\r\nStep 4512, loss: 0.008318513631820679\r\nStep 4513, loss: 0.007954434491693974\r\nStep 4514, loss: 0.008442102000117302\r\nStep 4515, loss: 0.00853508897125721\r\n",,terminal_output +3399,9131661,"TERMINAL",0,0,"6\t ",,terminal_output +3400,9132692,"TERMINAL",0,0,"7\t ",,terminal_output +3401,9133712,"TERMINAL",0,0,"8\t ",,terminal_output +3402,9134837,"TERMINAL",0,0,"9\t ",,terminal_output +3403,9135790,"TERMINAL",0,0,"30\t ",,terminal_output +3404,9136172,"TERMINAL",0,0,"Step 4516, loss: 0.007827158086001873\r\nStep 4517, loss: 0.008252101019024849\r\nStep 4518, loss: 0.00784272514283657\r\nStep 4519, loss: 0.007943606935441494\r\nStep 4520, loss: 0.007772058714181185\r\nStep 4521, loss: 0.0069958982057869434\r\nStep 4522, loss: 0.007473240606486797\r\nStep 4523, loss: 0.008243395946919918\r\nStep 4524, loss: 0.07034891098737717\r\nStep 4525, loss: 0.009591075591742992\r\nStep 4526, loss: 0.010382002219557762\r\nStep 4527, loss: 0.009009272791445255\r\nStep 4528, loss: 0.010070078074932098\r\nStep 4529, loss: 0.008625155314803123\r\nStep 4530, loss: 0.008942599408328533\r\nStep 4531, loss: 0.009290354326367378\r\nStep 4532, loss: 0.010400067083537579\r\nStep 4533, loss: 0.009209948591887951\r\nStep 4534, loss: 0.008022109977900982\r\nStep 4535, loss: 0.00848532933741808\r\nStep 4536, loss: 0.008678572252392769\r\nStep 4537, loss: 0.008930189535021782\r\nStep 4538, loss: 0.008077939972281456\r\nStep 4539, loss: 0.008482731878757477\r\nStep 4540, loss: 0.007980043068528175\r\nStep 4541, loss: 0.008627544157207012\r\nStep 4542, loss: 0.008214844390749931\r\nStep 4543, loss: 0.0071898954920470715\r\nStep 4544, loss: 0.008118478581309319\r\nStep 4545, loss: 0.008261986076831818\r\nStep 4546, loss: 0.007593881338834763\r\nStep 4547, loss: 0.007071455474942923\r\nStep 4548, loss: 0.04941859468817711\r\nStep 4549, loss: 0.00969657115638256\r\nStep 4550, loss: 0.011716010980308056\r\nStep 4551, loss: 0.008388761430978775\r\nStep 4552, loss: 0.009407463483512402\r\nStep 4553, loss: 0.009812012314796448\r\nStep 4554, loss: 0.009459915570914745\r\nStep 4555, loss: 0.007857552729547024\r\nStep 4556, loss: 0.008996051736176014\r\nStep 4557, loss: 0.008897245861589909\r\nStep 4558, loss: 0.008689029142260551\r\nStep 4559, loss: 0.008472131565213203\r\nStep 4560, loss: 0.007370975334197283\r\nStep 4561, loss: 0.008178063668310642\r\nStep 4562, loss: 0.0083761652931571\r\nStep 4563, loss: 0.00909749697893858\r\nStep 4564, loss: 0.0075969151221215725\r\nStep 4565, loss: 0.007595236878842115\r\nStep 4566, loss: 0.008072023279964924\r\nStep 4567, loss: 0.007579537574201822\r\nStep 4568, loss: 0.00814406294375658\r\nStep 4569, loss: 0.007910138927400112\r\nStep 4570, loss: 0.007609030231833458\r\nStep 4571, loss: 0.0072767287492752075\r\nStep 4572, loss: 0.00747049693018198\r\nStep 4573, loss: 0.00715521490201354\r\nStep 4574, loss: 0.007397943641990423\r\nStep 4575, loss: 0.00736143859103322\r\nStep 4576, loss: 0.007040026597678661\r\nStep 4577, loss: 0.00722730765119195\r\nStep 4578, loss: 0.0076812212355434895\r\nStep 4579, loss: 0.007533787749707699\r\nStep 4580, loss: 0.007176912855356932\r\nStep 4581, loss: 0.007232558913528919\r\nStep 4582, loss: 0.00640919478610158\r\nStep 4583, loss: 0.007915674708783627\r\nStep 4584, loss: 0.006808759644627571\r\nStep 4585, loss: 0.007067925296723843\r\nStep 4586, loss: 0.007066499441862106\r\nStep 4587, loss: 0.00728720985352993\r\nStep 4588, loss: 0.006928115151822567\r\nStep 4589, loss: 0.0079794405028224\r\nStep 4590, loss: 0.014408291317522526\r\nStep 4591, loss: 0.007808622904121876\r\nStep 4592, loss: 0.0072889169678092\r\nStep 4593, loss: 0.007536920718848705\r\nStep 4594, loss: 0.007154306396842003\r\nStep 4595, loss: 0.006624353118240833\r\nStep 4596, loss: 0.007845219224691391\r\nStep 4597, loss: 0.006545964162796736\r\nStep 4598, loss: 0.006787335034459829\r\nStep 4599, loss: 0.007351522333920002\r\nStep 4600, loss: 0.006942004431039095\r\nStep 4601, loss: 0.006984065752476454\r\nStep 4602, loss: 0.007259671110659838\r\nStep 4603, loss: 0.007909554056823254\r\nStep 4604, loss: 0.006465391721576452\r\nStep 4605, loss: 0.0071635679341852665\r\nStep 4606, loss: 0.007740146014839411\r\nStep 4607, loss: 0.007698934990912676\r\nStep 4608, loss: 0.007098096888512373\r\nStep 4609, loss: 0.00708272447809577\r\nStep 4610, loss: 0.006773571018129587\r\nStep 4611, loss: 0.007857807911932468\r\nStep 4612, loss: 0.007078870199620724\r\nStep 4613, loss: 0.007064076606184244\r\nStep 4614, loss: 0.00645044120028615\r\nStep 4615, loss: 0.054741475731134415\r\nStep 4616, loss: 0.011133571155369282\r\nStep 4617, loss: 0.009710630401968956\r\nStep 4618, loss: 0.008660740219056606\r\nStep 4619, loss: 0.009161937981843948\r\nStep 4620, loss: 0.009734109975397587\r\nStep 4621, loss: 0.008729536086320877\r\nStep 4622, loss: 0.007669113576412201\r\nStep 4623, loss: 0.008041223511099815\r\nStep 4624, loss: 0.008359239436686039\r\nStep 4625, loss: 0.008641033433377743\r\nStep 4626, loss: 0.007620137184858322\r\nStep 4627, loss: 0.0071550896391272545\r\nStep 4628, loss: 0.007200367283076048\r\nStep 4629, loss: 0.006880548782646656\r\nStep 4630, loss: 0.006885170936584473\r\nStep 4631, loss: 0.007866381667554379\r\nStep 4632, loss: 0.006993405520915985\r\nStep 4633, loss: 0.006754640024155378\r\nStep 4634, loss: 0.007296635303646326\r\nStep 4635, loss: 0.006554959807544947\r\nStep 4636, loss: 0.007214185316115618\r\nStep 4637, loss: 0.007779165171086788\r\nStep 4638, loss: 0.0074515980668365955\r\nStep 4639, loss: 0.0074689509347081184\r\nStep 4640, loss: 0.006389694754034281\r\nStep 4641, loss: 0.006711344700306654\r\nStep 4642, loss: 0.007776737678796053\r\nStep 4643, loss: 0.013403257355093956\r\nStep 4644, loss: 0.007204935420304537\r\nStep 4645, loss: 0.0064925625920295715\r\nStep 4646, loss: 0.007163857575505972\r\nStep 4647, loss: 0.007319738622754812\r\nStep 4648, loss: 0.006747567560523748\r\nStep 4649, loss: 0.006664274260401726\r\nStep 4650, loss: 0.007195955608040094\r\nStep 4651, loss: 0.00726632634177804\r\nStep 4652, loss: 0.006847159471362829\r\nStep 4653, loss: 0.006502178963273764\r\nStep 4654, loss: 0.006081903353333473\r\nStep 4655, loss: 0.006518619135022163\r\nStep 4656, loss: 0.007410451769828796\r\nStep 4657, loss: 0.006441586650907993\r\nStep 4658, loss: 0.006310069467872381\r\nStep 4659, loss: 0.006364046595990658\r\nStep 4660, loss: 0.00614100880920887\r\nStep 4661, loss: 0.006639826577156782\r\nStep 4662, loss: 0.006453441455960274\r\nStep 4663, loss: 0.005846189334988594\r\nStep 4664, loss: 0.0067642079666256905\r\nStep 4665, loss: 0.0069414847530424595\r\nStep 4666, loss: 0.006398964673280716\r\nStep 4667, loss: 0.10480092465877533\r\nStep 4668, loss: 0.009549511596560478\r\nStep 4669, loss: 0.013274678029119968\r\nStep 4670, loss: 0.010898989625275135\r\nStep 4671, loss: 0.010315855965018272\r\nStep 4672, loss: 0.008859237655997276\r\nStep 4673, loss: 0.009251906536519527\r\nStep 4674, loss: 0.00845828466117382\r\nStep 4675, loss: 0.009228135459125042\r\nStep 4676, loss: 0.00937372911721468\r\nStep 4677, loss: 0.008906206116080284\r\nStep 4678, loss: 0.007288120221346617\r\nStep 4679, loss: 0.008323125541210175\r\nStep 4680, loss: 0.008432010188698769\r\nStep 4681, loss: 0.008138690143823624\r\nStep 4682, loss: 0.008423589169979095\r\nStep 4683, loss: 0.007387998513877392\r\nStep 4684, loss: 0.00803211610764265\r\nStep 4685, loss: 0.006886165589094162\r\nStep 4686, loss: 0.007520708255469799\r\nStep 4687, loss: 0.006918833125382662\r\nStep 4688, loss: 0.006980055943131447\r\nStep 4689, loss: 0.006990681402385235\r\nStep 4690, loss: 0.008025637827813625\r\nStep 4691, loss: 0.006295525003224611\r\nStep 4692, loss: 0.006793064530938864\r\nStep 4693, loss: 0.006621075794100761\r\nStep 4694, loss: 0.0063217440620064735\r\nStep 4695, loss: 0.007383252028375864\r\nStep 4696, loss: 0.005807626061141491\r\nStep 4697, loss: 0.007410222198814154\r\nStep 4698, loss: 0.0064991251565515995\r\nStep 4699, loss: 0.006885174661874771\r\nStep 4700, loss: 0.006409277208149433\r\nStep 4701, loss: 0.04370998218655586\r\nStep 4702, loss: 0.007556254975497723\r\nStep 4703, loss: 0.0077939569018781185\r\nStep 4704, loss: 0.007109604310244322\r\nStep 4705, loss: 0.007473812438547611\r\nStep 4706, loss: 0.0071991742588579655\r\nStep 4707, loss: 0.0070585343055427074\r\nStep 4708, loss: 0.006525248754769564\r\nStep 4709, loss: 0.006701778154820204\r\nStep 4710, loss: 0.006227018311619759\r\nStep 4711, loss: 0.006583311129361391\r\nStep 4712, loss: 0.007047806400805712\r\nStep 4713, loss: 0.006347761023789644\r\nStep 4714, loss: 0.006592501420527697\r\nStep 4715, loss: 0.006503872573375702\r\nStep 4716, loss: 0.006225551012903452\r\nStep 4717, loss: 0.006433920934796333\r\nStep 4718, loss: 0.006537551060318947\r\nStep 4719, loss: 0.00618960615247488\r\nStep 4720, loss: 0.006011580117046833\r\nStep 4721, loss: 0.005724464077502489\r\nStep 4722, loss: 0.006208931561559439\r\nStep 4723, loss: 0.0059891087003052235\r\nStep 4724, loss: 0.01203017495572567\r\nStep 4725, loss: 0.006050888914614916\r\nStep 4726, loss: 0.006233376916497946\r\nStep 4727, loss: 0.006099883001297712\r\nStep 4728, loss: 0.006356202531605959\r\nStep 4729, loss: 0.005452923942357302\r\nStep 4730, loss: 0.006568403448909521\r\n",,terminal_output +3405,9136835,"TERMINAL",0,0,"1\t ",,terminal_output +3406,9137881,"TERMINAL",0,0,"2\t ",,terminal_output +3407,9138913,"TERMINAL",0,0,"3\t ",,terminal_output +3408,9139949,"TERMINAL",0,0,"4\t ",,terminal_output +3409,9140896,"TERMINAL",0,0,"Step 4731, loss: 0.006646790076047182\r\nStep 4732, loss: 0.005402614362537861\r\nStep 4733, loss: 0.006884960923343897\r\nStep 4734, loss: 0.00584762915968895\r\nStep 4735, loss: 0.00609193928539753\r\nStep 4736, loss: 0.0060718911699950695\r\nStep 4737, loss: 0.006416186224669218\r\nStep 4738, loss: 0.0074608949944376945\r\nStep 4739, loss: 0.005764755420386791\r\nStep 4740, loss: 0.005930108483880758\r\nStep 4741, loss: 0.006117431912571192\r\nStep 4742, loss: 0.006474332883954048\r\nStep 4743, loss: 0.0062047927640378475\r\nStep 4744, loss: 0.006782562471926212\r\nStep 4745, loss: 0.0055806320160627365\r\nStep 4746, loss: 0.007232477888464928\r\nStep 4747, loss: 0.005725050810724497\r\nStep 4748, loss: 0.005536283832043409\r\nStep 4749, loss: 0.005725905764847994\r\nStep 4750, loss: 0.0059896716848015785\r\nStep 4751, loss: 0.005759075284004211\r\nStep 4752, loss: 0.006669177208095789\r\nStep 4753, loss: 0.006056720856577158\r\nStep 4754, loss: 0.005719984881579876\r\nStep 4755, loss: 0.0057792579755187035\r\nStep 4756, loss: 0.0056014033034443855\r\nStep 4757, loss: 0.00569904362782836\r\nStep 4758, loss: 0.005596814677119255\r\nStep 4759, loss: 0.010339096188545227\r\nStep 4760, loss: 0.005947936326265335\r\nStep 4761, loss: 0.007004945073276758\r\nStep 4762, loss: 0.005836648400872946\r\nStep 4763, loss: 0.005901855882257223\r\nStep 4764, loss: 0.026786789298057556\r\nStep 4765, loss: 0.006470948923379183\r\nStep 4766, loss: 0.007520357146859169\r\nStep 4767, loss: 0.0067893448285758495\r\nStep 4768, loss: 0.006169081199914217\r\nStep 4769, loss: 0.007196540012955666\r\nStep 4770, loss: 0.0061227064579725266\r\nStep 4771, loss: 0.00677054189145565\r\nStep 4772, loss: 0.007026775740087032\r\nStep 4773, loss: 0.0072579882107675076\r\nStep 4774, loss: 0.006141262128949165\r\nStep 4775, loss: 0.006150166504085064\r\nStep 4776, loss: 0.005813925061374903\r\nStep 4777, loss: 0.006009889300912619\r\nStep 4778, loss: 0.006006908603012562\r\nStep 4779, loss: 0.005812566261738539\r\nStep 4780, loss: 0.006587814074009657\r\nStep 4781, loss: 0.005897661205381155\r\nStep 4782, loss: 0.005879389587789774\r\nStep 4783, loss: 0.005612005013972521\r\nStep 4784, loss: 0.005432428326457739\r\nStep 4785, loss: 0.0060360729694366455\r\nStep 4786, loss: 0.005749671254307032\r\nStep 4787, loss: 0.005824186373502016\r\nStep 4788, loss: 0.005406937096267939\r\nStep 4789, loss: 0.005957499146461487\r\nStep 4790, loss: 0.00602043978869915\r\nStep 4791, loss: 0.005357834976166487\r\nStep 4792, loss: 0.005404195282608271\r\nStep 4793, loss: 0.005560103803873062\r\nStep 4794, loss: 0.006295606028288603\r\nStep 4795, loss: 0.005663574207574129\r\nStep 4796, loss: 0.0057547143660485744\r\nStep 4797, loss: 0.005486560985445976\r\nStep 4798, loss: 0.010174158029258251\r\nStep 4799, loss: 0.006961663253605366\r\nStep 4800, loss: 0.005307388491928577\r\nStep 4801, loss: 0.005912215914577246\r\nStep 4802, loss: 0.005847307853400707\r\nStep 4803, loss: 0.005654031876474619\r\nStep 4804, loss: 0.005548585671931505\r\nStep 4805, loss: 0.005818683188408613\r\nStep 4806, loss: 0.005086465273052454\r\nStep 4807, loss: 0.005312164779752493\r\nStep 4808, loss: 0.005903809797018766\r\nStep 4809, loss: 0.005422497633844614\r\nStep 4810, loss: 0.005606365390121937\r\nStep 4811, loss: 0.005998474545776844\r\nStep 4812, loss: 0.005044770892709494\r\nStep 4813, loss: 0.036257367581129074\r\nStep 4814, loss: 0.00640405947342515\r\nStep 4815, loss: 0.007685661781579256\r\nStep 4816, loss: 0.006774339359253645\r\nStep 4817, loss: 0.00664234533905983\r\nStep 4818, loss: 0.007679594215005636\r\nStep 4819, loss: 0.00680972496047616\r\nStep 4820, loss: 0.006281869485974312\r\nStep 4821, loss: 0.006494977045804262\r\nStep 4822, loss: 0.006515692453831434\r\nStep 4823, loss: 0.0057895248755812645\r\nStep 4824, loss: 0.006214712746441364\r\nStep 4825, loss: 0.006481457967311144\r\nStep 4826, loss: 0.00669147027656436\r\nStep 4827, loss: 0.005898481700569391\r\nStep 4828, loss: 0.005200749263167381\r\nStep 4829, loss: 0.005567776504904032\r\nStep 4830, loss: 0.005803801119327545\r\nStep 4831, loss: 0.005350097082555294\r\nStep 4832, loss: 0.005347341299057007\r\nStep 4833, loss: 0.00552641786634922\r\nStep 4834, loss: 0.005797607824206352\r\nStep 4835, loss: 0.005691238678991795\r\nStep 4836, loss: 0.005500253289937973\r\nStep 4837, loss: 0.005575117655098438\r\nStep 4838, loss: 0.005139597225934267\r\nStep 4839, loss: 0.005559158977121115\r\nStep 4840, loss: 0.0052828132174909115\r\nStep 4841, loss: 0.005240078549832106\r\nStep 4842, loss: 0.0052784583531320095\r\nStep 4843, loss: 0.005736676976084709\r\nStep 4844, loss: 0.005748939700424671\r\nStep 4845, loss: 0.005316141061484814\r\nStep 4846, loss: 0.005334431771188974\r\nStep 4847, loss: 0.0051979864947497845\r\nStep 4848, loss: 0.004970429930835962\r\nStep 4849, loss: 0.005459110252559185\r\nStep 4850, loss: 0.005335149355232716\r\nStep 4851, loss: 0.0052140201441943645\r\nStep 4852, loss: 0.005500827915966511\r\nStep 4853, loss: 0.005804415326565504\r\nStep 4854, loss: 0.0053718904964625835\r\nStep 4855, loss: 0.005104833282530308\r\nStep 4856, loss: 0.005263764411211014\r\nStep 4857, loss: 0.0058345128782093525\r\nStep 4858, loss: 0.004940391052514315\r\nStep 4859, loss: 0.005764033179730177\r\nStep 4860, loss: 0.015443912707269192\r\nStep 4861, loss: 0.005291355773806572\r\nStep 4862, loss: 0.007535794749855995\r\nStep 4863, loss: 0.005771464668214321\r\nStep 4864, loss: 0.005777350161224604\r\nStep 4865, loss: 0.005778122693300247\r\nStep 4866, loss: 0.006170514039695263\r\nStep 4867, loss: 0.005990097764879465\r\nStep 4868, loss: 0.005194007884711027\r\nStep 4869, loss: 0.023770315572619438\r\nStep 4870, loss: 0.0066542294807732105\r\nStep 4871, loss: 0.006565498653799295\r\nStep 4872, loss: 0.006243457552045584\r\nStep 4873, loss: 0.00606156513094902\r\nStep 4874, loss: 0.0053949481807649136\r\nStep 4875, loss: 0.005687876604497433\r\nStep 4876, loss: 0.005612609907984734\r\nStep 4877, loss: 0.0053953626193106174\r\nStep 4878, loss: 0.005586483981460333\r\nStep 4879, loss: 0.005728156305849552\r\nStep 4880, loss: 0.005444736685603857\r\nStep 4881, loss: 0.005356884095817804\r\nStep 4882, loss: 0.005229991395026445\r\nStep 4883, loss: 0.0048040831461548805\r\nStep 4884, loss: 0.005360550247132778\r\nStep 4885, loss: 0.005138203967362642\r\nStep 4886, loss: 0.005536452867090702\r\nStep 4887, loss: 0.005315105430781841\r\nStep 4888, loss: 0.005060574505478144\r\nStep 4889, loss: 0.005260727368295193\r\nStep 4890, loss: 0.005674872547388077\r\nStep 4891, loss: 0.004850773606449366\r\nStep 4892, loss: 0.005306530743837357\r\nStep 4893, loss: 0.004888845141977072\r\nStep 4894, loss: 0.0054971869103610516\r\nStep 4895, loss: 0.005302990321069956\r\nStep 4896, loss: 0.004884423688054085\r\nStep 4897, loss: 0.005048538092523813\r\nStep 4898, loss: 0.005435232073068619\r\nStep 4899, loss: 0.0052335914224386215\r\nStep 4900, loss: 0.005565229337662458\r\nStep 4901, loss: 0.004784215707331896\r\nStep 4902, loss: 0.004880912136286497\r\nStep 4903, loss: 0.005391599610447884\r\nStep 4904, loss: 0.004687035456299782\r\nStep 4905, loss: 0.005178045015782118\r\nStep 4906, loss: 0.005309754516929388\r\nStep 4907, loss: 0.004760514944791794\r\nStep 4908, loss: 0.004657996818423271\r\nStep 4909, loss: 0.022381288930773735\r\nStep 4910, loss: 0.006188845727592707\r\nStep 4911, loss: 0.006206941325217485\r\nStep 4912, loss: 0.005661572329699993\r\nStep 4913, loss: 0.005991788115352392\r\nStep 4914, loss: 0.005674122367054224\r\nStep 4915, loss: 0.005498602520674467\r\nStep 4916, loss: 0.0057196407578885555\r\nStep 4917, loss: 0.006286053452640772\r\nStep 4918, loss: 0.005335143301635981\r\nStep 4919, loss: 0.004891484975814819\r\nStep 4920, loss: 0.005513020791113377\r\nStep 4921, loss: 0.0054752761498093605\r\nStep 4922, loss: 0.005098676774650812\r\nStep 4923, loss: 0.005471029318869114\r\nStep 4924, loss: 0.005355270113795996\r\nStep 4925, loss: 0.004996477626264095\r\nStep 4926, loss: 0.005322715733200312\r\nStep 4927, loss: 0.005791025701910257\r\nStep 4928, loss: 0.009914207272231579\r\nStep 4929, loss: 0.0049238563515245914\r\nStep 4930, loss: 0.00521229999139905\r\nStep 4931, loss: 0.005202068015933037\r\nStep 4932, loss: 0.004605222959071398\r\nStep 4933, loss: 0.004825378302484751\r\nStep 4934, loss: 0.005207952111959457\r\nStep 4935, loss: 0.005003258120268583\r\nStep 4936, loss: 0.00508869206532836\r\nStep 4937, loss: 0.00491059897467494\r\nStep 4938, loss: 0.005049880128353834\r\nStep 4939, loss: 0.005015672650188208\r\nStep 4940, loss: 0.004355961456894875\r\nStep 4941, loss: 0.004589059855788946\r\nStep 4942, loss: 0.004722372628748417\r\nStep 4943, loss: 0.004797738511115313\r\nStep 4944, loss: 0.005038179457187653\r\nStep 4945, loss: 0.004360721912235022\r\n",,terminal_output +3410,9141005,"TERMINAL",0,0,"5\t ",,terminal_output +3411,9142110,"TERMINAL",0,0,"6\t ",,terminal_output +3412,9143134,"TERMINAL",0,0,"7\t ",,terminal_output +3413,9143668,"TERMINAL",0,0,"Step 4946, loss: 0.005286063998937607\r\nStep 4947, loss: 0.004564414732158184\r\nStep 4948, loss: 0.004649275913834572\r\nStep 4949, loss: 0.04045431688427925\r\nStep 4950, loss: 0.005610855296254158\r\nStep 4951, loss: 0.006699411664158106\r\nStep 4952, loss: 0.005946597084403038\r\nStep 4953, loss: 0.0055699036456644535\r\nStep 4954, loss: 0.006369486916810274\r\nStep 4955, loss: 0.00508836330845952\r\nStep 4956, loss: 0.005167901050299406\r\nStep 4957, loss: 0.005324522033333778\r\nStep 4958, loss: 0.005367602221667767\r\nStep 4959, loss: 0.005652112420648336\r\nStep 4960, loss: 0.004874355625361204\r\nStep 4961, loss: 0.0050912098959088326\r\nStep 4962, loss: 0.005076706875115633\r\nStep 4963, loss: 0.0054290699772536755\r\nStep 4964, loss: 0.004904395435005426\r\nStep 4965, loss: 0.00516925984993577\r\nStep 4966, loss: 0.0048340181820094585\r\nStep 4967, loss: 0.004699536599218845\r\nStep 4968, loss: 0.004638390149921179\r\nStep 4969, loss: 0.004800521768629551\r\nStep 4970, loss: 0.004868716932833195\r\nStep 4971, loss: 0.005201829131692648\r\nStep 4972, loss: 0.005259054712951183\r\nStep 4973, loss: 0.005394445266574621\r\nStep 4974, loss: 0.06227367743849754\r\nStep 4975, loss: 0.005434964783489704\r\nStep 4976, loss: 0.00865940097719431\r\nStep 4977, loss: 0.00824497640132904\r\nStep 4978, loss: 0.007162779103964567\r\nStep 4979, loss: 0.00615165289491415\r\nStep 4980, loss: 0.006588071119040251\r\nStep 4981, loss: 0.0066680749878287315\r\nStep 4982, loss: 0.006091525312513113\r\nStep 4983, loss: 0.005982535425573587\r\nStep 4984, loss: 0.006216664798557758\r\nStep 4985, loss: 0.005954488180577755\r\nStep 4986, loss: 0.005452719517052174\r\nStep 4987, loss: 0.0051263622008264065\r\nStep 4988, loss: 0.005849945824593306\r\nStep 4989, loss: 0.005699094384908676\r\nStep 4990, loss: 0.005506658926606178\r\nStep 4991, loss: 0.005618578288704157\r\nStep 4992, loss: 0.004800291266292334\r\nStep 4993, loss: 0.004436502233147621\r\nStep 4994, loss: 0.005041114054620266\r\nStep 4995, loss: 0.004977700766175985\r\nStep 4996, loss: 0.004654908087104559\r\nStep 4997, loss: 0.004672203678637743\r\nStep 4998, loss: 0.004410748835653067\r\nStep 4999, loss: 0.004278508946299553\r\nSaved checkpoint at step 5000\r\n",,terminal_output +3414,9144159,"TERMINAL",0,0,"8\t ",,terminal_output +3415,9145178,"TERMINAL",0,0,"9\t ",,terminal_output +3416,9145696,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-overfit-3373280 at: https://wandb.ai/instant-uv/jafar/runs/e16cv2fa\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_162927-e16cv2fa/logs\r\n",,terminal_output +3417,9146204,"TERMINAL",0,0,"40\t ",,terminal_output +3418,9147333,"TERMINAL",0,0,"1\t ",,terminal_output +3419,9147542,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3420,9148356,"TERMINAL",0,0,"2\t ",,terminal_output +3421,9149313,"TERMINAL",0,0,"3\t ",,terminal_output +3422,9150402,"TERMINAL",0,0,"5\t ",,terminal_output +3423,9151383,"TERMINAL",0,0,"6\t ",,terminal_output +3424,9152460,"TERMINAL",0,0,"7\t ",,terminal_output +3425,9153491,"TERMINAL",0,0,"8\t ",,terminal_output +3426,9154502,"TERMINAL",0,0,"9\t ",,terminal_output +3427,9155540,"TERMINAL",0,0,"50\t ",,terminal_output +3428,9156584,"TERMINAL",0,0,"1\t ",,terminal_output +3429,9157617,"TERMINAL",0,0,"2\t ",,terminal_output +3430,9158657,"TERMINAL",0,0,"3\t ",,terminal_output +3431,9159697,"TERMINAL",0,0,"4\t ",,terminal_output +3432,9160736,"TERMINAL",0,0,"5\t ",,terminal_output +3433,9161777,"TERMINAL",0,0,"62",,terminal_output +3434,9162815,"TERMINAL",0,0,"7\t ",,terminal_output +3435,9163865,"TERMINAL",0,0,"8\t ",,terminal_output +3436,9164903,"TERMINAL",0,0,"9\t ",,terminal_output +3437,9165934,"TERMINAL",0,0,"3:00\t ",,terminal_output +3438,9166970,"TERMINAL",0,0,"1\t ",,terminal_output +3439,9168019,"TERMINAL",0,0,"2\t ",,terminal_output +3440,9169149,"TERMINAL",0,0,"3\t ",,terminal_output +3441,9170171,"TERMINAL",0,0,"4\t ",,terminal_output +3442,9171190,"TERMINAL",0,0,"5\t ",,terminal_output +3443,9172219,"TERMINAL",0,0,"6\t ",,terminal_output +3444,9173237,"TERMINAL",0,0,"7\t ",,terminal_output +3445,9174261,"TERMINAL",0,0,"8\t ",,terminal_output +3446,9175295,"TERMINAL",0,0,"9\t ",,terminal_output +3447,9176412,"TERMINAL",0,0,"10\t ",,terminal_output +3448,9177392,"TERMINAL",0,0,"2\t ",,terminal_output +3449,9177563,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n ffn_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n batch_size = vid_embed.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n ffn_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n self.use_flash_attention,\n spatial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n\n # FIXME mihir: HACK\n # rng1, _rng = jax.random.split(batch[""mask_rng""])\n # noise = jax.random.normal(_rng, vid_embed_padded.shape)\n # logits = self.dynamics(noise)[:, :, :-1]\n\n rng1, _rng = jax.random.split(batch[""mask_rng""])\n noise = 0.25 * jax.random.normal(_rng, vid_embed_padded.shape)\n logits = self.dynamics(vid_embed_padded + noise)[:, :, :-1]\n\n # logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)\n",python,tab +3450,9178426,"TERMINAL",0,0,"3\t ",,terminal_output +3451,9179468,"TERMINAL",0,0,"4\t ",,terminal_output +3452,9180514,"TERMINAL",0,0,"5\t ",,terminal_output +3453,9181539,"TERMINAL",0,0,"6\t ",,terminal_output +3454,9182576,"TERMINAL",0,0,"7\t ",,terminal_output +3455,9183611,"TERMINAL",0,0,"8\t ",,terminal_output +3456,9184708,"TERMINAL",0,0,"9\t ",,terminal_output +3457,9185731,"TERMINAL",0,0,"20\t ",,terminal_output +3458,9186753,"TERMINAL",0,0,"1\t ",,terminal_output +3459,9187881,"TERMINAL",0,0,"2\t ",,terminal_output +3460,9188808,"TERMINAL",0,0,"3\t ",,terminal_output +3461,9190237,"TERMINAL",0,0,"41",,terminal_output +3462,9191264,"TERMINAL",0,0,"5\t ",,terminal_output +3463,9192285,"TERMINAL",0,0,"6\t ",,terminal_output +3464,9193309,"TERMINAL",0,0,"7\t ",,terminal_output +3465,9194362,"TERMINAL",0,0,"9\t ",,terminal_output +3466,9195463,"TERMINAL",0,0,"30\t ",,terminal_output +3467,9196482,"TERMINAL",0,0,"1\t ",,terminal_output +3468,9197464,"TERMINAL",0,0,"2\t ",,terminal_output +3469,9198504,"TERMINAL",0,0,"3\t ",,terminal_output +3470,9199555,"TERMINAL",0,0,"4\t ",,terminal_output +3471,9200591,"TERMINAL",0,0,"5\t ",,terminal_output +3472,9201660,"TERMINAL",0,0,"6\t ",,terminal_output +3473,9202683,"TERMINAL",0,0,"7\t ",,terminal_output +3474,9203725,"TERMINAL",0,0,"8\t ",,terminal_output +3475,9203733,"TERMINAL",0,0,"watch",,terminal_focus +3476,9204175,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3477,9207040,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +3478,9207393,"TERMINAL",0,0,"ls",,terminal_command +3479,9207424,"TERMINAL",0,0,"]633;E;2025-07-24 16:33:42 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C",,terminal_output +3480,9207500,"TERMINAL",0,0,"checkpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +3481,9209929,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +3482,9210231,"TERMINAL",0,0,"ls",,terminal_command +3483,9210282,"TERMINAL",0,0,"]633;E;2025-07-24 16:33:44 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C",,terminal_output +3484,9210456,"TERMINAL",0,0,"0000 3290439 3292331 3294600 3296575 3297671 3299068 3300663 3307618 3313562 causal lam_ckpt_dir train_dyn_causal_255M wrap\r\n3290283 3290440 3292332 3294601 3297569 3297693 3299069 3300672 3307619 3313563 checkpoints_alfred lam_main_test train_dyn_causal_356M\r\n3290284 3291405 3292333 3294602 3297575 3297706 3299258 3301025 3309662 3313564 coinrun maskgit-maskprob-fix train_dyn_causal_500M\r\n3290295 3292213 3292334 3294603 3297576 3297727 3299259 3301026 3309663 3313565 debug tokenizer train_dyn_new_arch-bugfixed-spatial-shift\r\n3290296 3292221 3292335 3296502 3297577 3299016 3299272 3301027 3309699 3313570 dyn tokenizer_ckpt_dir train_dyn_new_arch-bugfixed-temporal-shift\r\n3290366 3292258 3292336 3296540 3297578 3299062 3299579 3301029 3310436 3313571 dynamics_ckpt_dir train_dynamics_lr_schedule_const train_dyn_yolorun_new_arch\r\n3290367 3292328 3292337 3296571 3297582 3299063 3300233 3301030 3310437 3313572 interactive train_dynamics_lr_schedule_cos train_lam_minecraft_overfit_sample\r\n3290391 3292329 3292338 3296573 3297586 3299065 3300290 3301031 3311671 3316022 lam train_dynamics_lr_schedule_wsd train_tokenizer_batch_size_scaling_16_node\r\n3290392 3292330 3292339 3296574 3297606 3299066 3300658 3306801 3311672 big-runs lam-1-action train_dyn_causal_180M train_tokenizer_minecraft_overfit_sample\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +3485,9218146,"TERMINAL",0,0,"cd causal/",,terminal_command +3486,9218612,"TERMINAL",0,0,"ls",,terminal_command +3487,9222642,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +3488,9229395,"TERMINAL",0,0,"srun",,terminal_focus +3489,9231122,"TERMINAL",0,0,"bash",,terminal_focus +3490,9231513,"TERMINAL",0,0,"cd overfit-seed69-1/",,terminal_command +3491,9231891,"TERMINAL",0,0,"ls",,terminal_command +3492,9232942,"TERMINAL",0,0,"cd interactive/",,terminal_command +3493,9233240,"TERMINAL",0,0,"ls",,terminal_command +3494,9235245,"TERMINAL",0,0,"srun",,terminal_focus +3495,9236920,"TERMINAL",0,0,"q",,terminal_output +3496,9237303,"TERMINAL",0,0,"[?25lu[?25h[?25le[?25h[?25lu[?25h",,terminal_output +3497,9237437,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3498,9237550,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Thu Jul 24 16:34:12 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 15:59:57\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 15:59:57\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:21:00\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:06:23\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R20:27\t 1 hkn0901",,terminal_output +3499,9238572,"TERMINAL",0,0,"388148",,terminal_output +3500,9239586,"TERMINAL",0,0,"499259",,terminal_output +3501,9240232,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3502,9240833,"TERMINAL",0,0,"bash",,terminal_focus +3503,9244037,"TERMINAL",0,0,"cd 3373280/",,terminal_command +3504,9244357,"TERMINAL",0,0,"ls",,terminal_command +3505,9244382,"TERMINAL",0,0,"]633;E;2025-07-24 16:34:19 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C003000 004000 005000\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +3506,9247623,"TERMINAL",0,0,"pwd",,terminal_command +3507,9247634,"TERMINAL",0,0,"]633;E;2025-07-24 16:34:22 pwd;63badae8-90b1-4579-970f-d00997b22bed]633;C/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +3508,9249921,"TERMINAL",0,0,"srun",,terminal_focus +3509,9250787,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +3510,9251076,"TERMINAL",0,0,"[?25ls': sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch[?25h",,terminal_output +3511,9251246,"TERMINAL",0,0,"[?25ls\ra': sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch[?25h",,terminal_output +3512,9251423,"TERMINAL",0,0,"[?25ls\rm': sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch[?25h",,terminal_output +3513,9251693,"TERMINAL",0,0,"[?25las\rp': sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch[?25h\rl': sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +3514,9251933,"TERMINAL",0,0,"[?25las\ri': git checkout new-arch-sampling[?25h",,terminal_output +3515,9251986,"TERMINAL",0,0,"n': git checkout new-arch-sampling",,terminal_output +3516,9252128,"TERMINAL",0,0,"[?25lsg': git checkout new-arch-sampling[?25h",,terminal_output +3517,9253852,"TERMINAL",0,0,"[?25ls.': sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_dyn_new_arch-bugfixed-spatial-shift/3359343[?25h",,terminal_output +3518,9255423,"TERMINAL",0,0,"\rjafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_dyn_new_arch-bugfixed-spatial-shift/3359343",,terminal_output +3519,9255889,"TERMINAL",0,0,"",,terminal_output +3520,9256075,"TERMINAL",0,0,"",,terminal_output +3521,9257039,"TERMINAL",0,0,"\r\n\r",,terminal_output +3522,9257708,"TERMINAL",0,0,"\r\n\r",,terminal_output +3523,9258425,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3524,9258723,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r\n[?2004l\rsh: slurm/jobs/mihir/horeka/yolo-runs/sampling.sh: No such file or directory\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3525,9259989,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3526,9261255,"TERMINAL",0,0,"",,terminal_output +3527,9261505,"TERMINAL",0,0,"",,terminal_output +3528,9261674,"TERMINAL",0,0,"",,terminal_output +3529,9262138,"TERMINAL",0,0,"",,terminal_output +3530,9262836,"TERMINAL",0,0,"",,terminal_output +3531,9263197,"TERMINAL",0,0,"",,terminal_output +3532,9266013,"TERMINAL",0,0,"[?25ls/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280[?25h",,terminal_output +3533,9266774,"TERMINAL",0,0,"[?25lun/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280[?25h[?25lu/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280 \r[?25h[?25lr\r\n\r[?25h[?25l-[?25h[?25lo[?25h[?25ll[?25h[?25lo[?25h[?25ly[?25h[?25l/[?25h",,terminal_output +3534,9267164,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3535,9267968,"TERMINAL",0,0,"[?25lore[?25h[?25l/h[?25h[?25l/[?25h[?25lr[?25h[?25li[?25h[?25lh[?25h[?25li[?25h[?25lm[?25h[?25l/[?25h[?25ls[?25h",,terminal_output +3536,9268121,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3537,9268336,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +3538,9268620,"TERMINAL",0,0,"[1@d",,terminal_output +3539,9268751,"TERMINAL",0,0,"[2@ev",,terminal_output +3540,9269373,"TERMINAL",0,0,"\r\ndev/\r\n(jafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/dev/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r",,terminal_output +3541,9271395,"TERMINAL",0,0,"",,terminal_output +3542,9271948,"TERMINAL",0,0,"",,terminal_output +3543,9272197,"TERMINAL",0,0,"\r\nalfred/ franz/ mihir/ \r\n(jafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/dev/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r",,terminal_output +3544,9273312,"TERMINAL",0,0,"[1@m",,terminal_output +3545,9273464,"TERMINAL",0,0,"[5@ihir/",,terminal_output +3546,9274122,"TERMINAL",0,0,"",,terminal_output +3547,9274229,"TERMINAL",0,0,"\r\ncremers/ horeka/ \r\n(jafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/dev/mihir/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r",,terminal_output +3548,9275184,"TERMINAL",0,0,"[?25ls[1@h[?25h",,terminal_output +3549,9275536,"TERMINAL",0,0,"[6@oreka/",,terminal_output +3550,9275723,"TERMINAL",0,0,"",,terminal_output +3551,9276122,"TERMINAL",0,0,"\r\ncausal_fit_modelsizes/ overfit_batch_tiny/ overfit_sample_tiny/ train_dynamics.sh train_tokenizer.sh \r\noverfit_batch/ overfit_sample/ sync_runner.sh train_lam.sh yolo-runs/ \r\n(jafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/dev/mihir/horeka/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r",,terminal_output +3552,9280406,"TERMINAL",0,0,"[?25ls[1@y[?25h",,terminal_output +3553,9280672,"TERMINAL",0,0,"olo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3554,9281209,"TERMINAL",0,0,"",,terminal_output +3555,9281406,"TERMINAL",0,0,"\r\n\r\r\nsampling.sh tester.sh \r\n(jafar) [tum_cte0515@hkn0901 jafar]$ sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3556,9282537,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r[?25h\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --start_frame=0 \\r\n --data_dir $array_records_dir\r\n\r\n# srun python sample.py \\r\n # --checkpoint $dynamics_ckpt_dir \\r\n # --start_frame=0 \\r\n # --batch_size=12 \\r\n # --seq_len=2 \\r\n # --data_dir $array_records_dir\r\n",,terminal_output +3557,9282687,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3558,9282869,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3559,9287932,"TERMINAL",0,0,"2025-07-24 16:35:02.577207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3560,9289929,"models/dynamics.py",0,0,"",python,tab +3561,9291553,"models/dynamics.py",4251,0,"",python,selection_mouse +3562,9296223,"TERMINAL",0,0,"2025-07-24 16:35:10.899910: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3563,9302638,"TERMINAL",0,0,"2025-07-24 16:35:17.332751: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3564,9306359,"TERMINAL",0,0,"2025-07-24 16:35:20.977184: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3565,9308927,"TERMINAL",0,0,"2025-07-24 16:35:23.529970: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3566,9310252,"TERMINAL",0,0,"2025-07-24 16:35:24.941362: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3567,9311669,"TERMINAL",0,0,"2025-07-24 16:35:26.354529: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3568,9312611,"TERMINAL",0,0,"2025-07-24 16:35:27.297975: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3569,9316394,"TERMINAL",0,0,"2025-07-24 16:35:31.088134: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3570,9318853,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 4000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/004000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 5000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/005000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 3000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/003000/metrics/metrics not found.\r\n",,terminal_output +3571,9325307,"TERMINAL",0,0,"2025-07-24 16:35:39.932705: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3572,9331043,"TERMINAL",0,0,"2025-07-24 16:35:45.651220: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3573,9332884,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 189, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 150, in _autoreg_sample\r\n generated_vid = sampling_fn(params, batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 141, in _sampling_wrapper\r\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 178, in sample_causal\r\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/dynamics.py"", line 122, in __call__\r\n rng1, _rng = jax.random.split(batch[""mask_rng""])\r\nKeyError: 'mask_rng'\r\n",,terminal_output +3574,9334334,"TERMINAL",0,0,"srun: error: hkn0901: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3575,9346045,"models/dynamics.py",0,0,"",python,tab +3576,9346889,"models/dynamics.py",4071,0,"",python,selection_mouse +3577,9348396,"models/dynamics.py",4032,67," logits = self.dynamics(vid_embed_padded + noise)[:, :, :-1]",python,selection_command +3578,9348632,"models/dynamics.py",3961,138," noise = 0.25 * jax.random.normal(_rng, vid_embed_padded.shape)\n logits = self.dynamics(vid_embed_padded + noise)[:, :, :-1]",python,selection_command +3579,9348775,"models/dynamics.py",3904,195," rng1, _rng = jax.random.split(batch[""mask_rng""])\n noise = 0.25 * jax.random.normal(_rng, vid_embed_padded.shape)\n logits = self.dynamics(vid_embed_padded + noise)[:, :, :-1]",python,selection_command +3580,9349016,"models/dynamics.py",3912,0,"",python,selection_command +3581,9349720,"models/dynamics.py",4040,0,"#",python,content +3582,9349720,"models/dynamics.py",3969,0,"#",python,content +3583,9349720,"models/dynamics.py",3912,0,"#",python,content +3584,9349722,"models/dynamics.py",3913,0,"",python,selection_keyboard +3585,9349788,"models/dynamics.py",4043,0," ",python,content +3586,9349789,"models/dynamics.py",3971,0," ",python,content +3587,9349789,"models/dynamics.py",3913,0," ",python,content +3588,9349789,"models/dynamics.py",3914,0,"",python,selection_keyboard +3589,9350060,"models/dynamics.py",3913,0,"",python,selection_command +3590,9350280,"models/dynamics.py",3972,0,"",python,selection_command +3591,9350442,"models/dynamics.py",4045,0,"",python,selection_command +3592,9350577,"models/dynamics.py",4106,0,"",python,selection_command +3593,9350708,"models/dynamics.py",4116,0,"",python,selection_command +3594,9351056,"models/dynamics.py",4115,0,"",python,selection_command +3595,9352979,"models/dynamics.py",4115,1,"",python,content +3596,9353132,"models/dynamics.py",4115,1,"",python,content +3597,9356137,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3598,9356641,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --start_frame=0 \\r\n --data_dir $array_records_dir\r\n\r\n# srun python sample.py \\r\n # --checkpoint $dynamics_ckpt_dir \\r\n # --start_frame=0 \\r\n # --batch_size=12 \\r\n # --seq_len=2 \\r\n # --data_dir $array_records_dir\r\n",,terminal_output +3599,9356763,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3600,9356934,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3601,9357554,"models/dynamics.py",0,0,"",python,tab +3602,9359690,"TERMINAL",0,0,"2025-07-24 16:36:14.383610: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3603,9367977,"TERMINAL",0,0,"2025-07-24 16:36:22.671523: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3604,9374045,"TERMINAL",0,0,"2025-07-24 16:36:28.666587: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3605,9376436,"models/lam.py",0,0,"",python,tab +3606,9377221,"TERMINAL",0,0,"2025-07-24 16:36:31.859469: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3607,9378950,"models/lam.py",3147,0,"",python,selection_mouse +3608,9379589,"TERMINAL",0,0,"2025-07-24 16:36:34.248156: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3609,9379796,"models/lam.py",3147,1,"",python,content +3610,9379945,"models/lam.py",3147,1,"",python,content +3611,9380894,"TERMINAL",0,0,"2025-07-24 16:36:35.587151: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3612,9380906,"models/lam.py",3147,0,"0",python,content +3613,9380907,"models/lam.py",3148,0,"",python,selection_keyboard +3614,9381009,"models/lam.py",3147,0,"",python,selection_command +3615,9381184,"models/lam.py",3097,0,"",python,selection_command +3616,9381344,"models/lam.py",3040,0,"",python,selection_command +3617,9381485,"models/lam.py",3015,0,"",python,selection_command +3618,9381602,"models/lam.py",2993,0,"",python,selection_command +3619,9382022,"models/lam.py",2942,0,"",python,selection_command +3620,9382186,"models/lam.py",2943,0,"",python,selection_command +3621,9382286,"TERMINAL",0,0,"2025-07-24 16:36:36.961190: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3622,9382665,"models/lam.py",2944,0,"",python,selection_command +3623,9382735,"models/lam.py",2945,0,"",python,selection_command +3624,9382736,"models/lam.py",2946,0,"",python,selection_command +3625,9382739,"models/lam.py",2947,0,"",python,selection_command +3626,9382773,"models/lam.py",2948,0,"",python,selection_command +3627,9382806,"models/lam.py",2949,0,"",python,selection_command +3628,9382840,"models/lam.py",2950,0,"",python,selection_command +3629,9382871,"models/lam.py",2951,0,"",python,selection_command +3630,9382903,"models/lam.py",2952,0,"",python,selection_command +3631,9382927,"models/lam.py",2953,0,"",python,selection_command +3632,9382966,"models/lam.py",2954,0,"",python,selection_command +3633,9383018,"models/lam.py",2955,0,"",python,selection_command +3634,9383019,"models/lam.py",2956,0,"",python,selection_command +3635,9383052,"models/lam.py",2957,0,"",python,selection_command +3636,9383083,"models/lam.py",2958,0,"",python,selection_command +3637,9383110,"models/lam.py",2959,0,"",python,selection_command +3638,9383138,"models/lam.py",2960,0,"",python,selection_command +3639,9383179,"models/lam.py",2961,0,"",python,selection_command +3640,9383197,"models/lam.py",2962,0,"",python,selection_command +3641,9383240,"TERMINAL",0,0,"2025-07-24 16:36:37.874991: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3642,9383258,"models/lam.py",2963,0,"",python,selection_command +3643,9383364,"models/lam.py",2964,0,"",python,selection_command +3644,9383759,"models/lam.py",2963,0,"",python,selection_command +3645,9384350,"models/lam.py",2963,7,"",python,content +3646,9385746,"models/lam.py",2963,0,"a",python,content +3647,9385747,"models/lam.py",2964,0,"",python,selection_keyboard +3648,9385849,"models/lam.py",2964,0,"c",python,content +3649,9385849,"models/lam.py",2965,0,"",python,selection_keyboard +3650,9386432,"models/lam.py",2963,2,"action_pad",python,content +3651,9386811,"TERMINAL",0,0,"2025-07-24 16:36:41.471091: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3652,9386974,"models/lam.py",2972,0,"",python,selection_command +3653,9387173,"models/lam.py",2973,0,"",python,selection_command +3654,9387321,"models/lam.py",2974,0,"",python,selection_command +3655,9387494,"models/lam.py",2975,0,"",python,selection_command +3656,9387780,"models/lam.py",2975,10,"",python,content +3657,9388104,"models/lam.py",2975,0,"p",python,content +3658,9388105,"models/lam.py",2976,0,"",python,selection_keyboard +3659,9388181,"models/lam.py",2976,0,"a",python,content +3660,9388182,"models/lam.py",2977,0,"",python,selection_keyboard +3661,9388869,"models/lam.py",2975,2,"patches",python,content +3662,9388988,"models/lam.py",2981,0,"",python,selection_command +3663,9389102,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 4000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/004000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 5000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/005000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 3000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/003000/metrics/metrics not found.\r\n",,terminal_output +3664,9390958,"models/lam.py",2993,0,"",python,selection_mouse +3665,9391476,"models/lam.py",2960,0,"",python,selection_mouse +3666,9394892,"TERMINAL",0,0,"2025-07-24 16:36:49.589113: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3667,9396604,"TERMINAL",0,0,"bash",,terminal_focus +3668,9398947,"TERMINAL",0,0,"git status",,terminal_command +3669,9398963,"TERMINAL",0,0,"]633;E;2025-07-24 16:36:53 git status;406cfb31-2341-454a-afa8-cae7781806b2]633;COn branch new-arch-sampling\r\nYour branch is up to date with 'origin/new-arch-sampling'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\tmodified: models/lam.py\r\n\tmodified: train_dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\tutils/logger_bak.py\r\n\tutils/visualizer.py\r\n\tweekend-job-requeuer.sh\r\n\tweekend-job-starter.sh\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3670,9400351,"TERMINAL",0,0,"2025-07-24 16:36:54.974076: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3671,9420318,"TERMINAL",0,0,"git commit -am ""revert back to prev action-pad, patches order in lam""",,terminal_command +3672,9420364,"TERMINAL",0,0,"]633;E;2025-07-24 16:37:15 git commit -am ""revert back to prev action-pad, patches order in lam"";406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +3673,9420696,"TERMINAL",0,0,"[new-arch-sampling c1986fe] revert back to prev action-pad, patches order in lam\r\n 3 files changed, 10 insertions(+), 10 deletions(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3674,9424385,"TERMINAL",0,0,"srun",,terminal_focus +3675,9440206,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nSampling token 50 from frame 1\r\nSampling token 51 from frame 1\r\nSampling token 52 from frame 1\r\nSampling token 53 from frame 1\r\nSampling token 54 from frame 1\r\nSampling token 55 from frame 1\r\nSampling token 56 from frame 1\r\nSampling token 57 from frame 1\r\nSampling token 58 from frame 1\r\nSampling token 59 from frame 1\r\nSampling token 60 from frame 1\r\nSampling token 61 from frame 1\r\nSampling token 62 from frame 1\r\nSampling token 63 from frame 1\r\nSampling token 64 from frame 1\r\nSampling token 65 from frame 1\r\nSampling token 66 from frame 1\r\nSampling token 67 from frame 1\r\nSampling token 68 from frame 1\r\nSampling token 69 from frame 1\r\nSampling token 70 from frame 1\r\nSampling token 71 from frame 1\r\nSampling token 72 from frame 1\r\nSampling token 73 from frame 1\r\nSampling token 74 from frame 1\r\nSampling token 75 from frame 1\r\nSampling token 76 from frame 1\r\nSampling token 77 from frame 1\r\nSampling token 78 from frame 1\r\nSampling token 79 from frame 1\r\nSampling token 80 from frame 1\r\nSampling token 81 from frame 1\r\nSampling token 82 from frame 1\r\nSampling token 83 from frame 1\r\nSampling token 84 from frame 1\r\nSampling token 85 from frame 1\r\nSampling token 86 from frame 1\r\nSampling token 87 from frame 1\r\nSampling token 88 from frame 1\r\nSampling token 89 from frame 1\r\nSampling token 90 from frame 1\r\nSampling token 91 from frame 1\r\nSampling token 92 from frame 1\r\nSampling token 93 from frame 1\r\nSampling token 94 from frame 1\r\nSampling token 95 from frame 1\r\nSampling token 96 from frame 1\r\nSampling token 97 from frame 1\r\nSampling token 98 from frame 1\r\nSampling token 99 from frame 1\r\nSampling token 100 from frame 1\r\nSampling token 101 from frame 1\r\nSampling token 102 from frame 1\r\nSampling token 103 from frame 1\r\nSampling token 104 from frame 1\r\nSampling token 105 from frame 1\r\nSampling token 106 from frame 1\r\nSampling token 107 from frame 1\r\nSampling token 108 from frame 1\r\nSampling token 109 from frame 1\r\nSampling token 110 from frame 1\r\nSampling token 111 from frame 1\r\nSampling token 112 from frame 1\r\nSampling token 113 from frame 1\r\nSampling token 114 from frame 1\r\nSampling token 115 from frame 1\r\nSampling token 116 from frame 1\r\nSampling token 117 from frame 1\r\nSampling token 118 from frame 1\r\nSampling token 119 from frame 1\r\nSampling token 120 from frame 1\r\nSampling token 121 from frame 1\r\nSampling token 122 from frame 1\r\nSampling token 123 from frame 1\r\nSampling token 124 from frame 1\r\nSampling token 125 from frame 1\r\nSampling token 126 from frame 1\r\nSampling token 127 from frame 1\r\nSampling token 128 from frame 1\r\nSampling token 129 from frame 1\r\nSampling token 130 from frame 1\r\nSampling token 131 from frame 1\r\nSampling token 132 from frame 1\r\nSampling token 133 from frame 1\r\nSampling token 134 from frame 1\r\nSampling token 135 from frame 1\r\nSampling token 136 from frame 1\r\nSampling token 137 from frame 1\r\nSampling token 138 from frame 1\r\nSampling token 139 from frame 1\r\nSampling token 140 from frame 1\r\nSampling token 141 from frame 1\r\nSampling token 142 from frame 1\r\nSampling token 143 from frame 1\r\nSampling token 144 from frame 1\r\nSampling token 145 from frame 1\r\nSampling token 146 from frame 1\r\nSampling token 147 from frame 1\r\nSampling token 148 from frame 1\r\nSampling token 149 from frame 1\r\nSampling token 150 from frame 1\r\nSampling token 151 from frame 1\r\nSampling token 152 from frame 1\r\nSampling token 153 from frame 1\r\nSampling token 154 from frame 1\r\nSampling token 155 from frame 1\r\nSampling token 156 from frame 1\r\nSampling token 157 from frame 1\r\nSampling token 158 from frame 1\r\nSampling token 159 from frame 1\r\nSampling token 160 from frame 1\r\nSampling token 161 from frame 1\r\nSampling token 162 from frame 1\r\nSampling token 163 from frame 1\r\nSampling token 164 from frame 1\r\nSampling token 165 from frame 1\r\nSampling token 166 from frame 1\r\nSampling token 167 from frame 1\r\nSampling token 168 from frame 1\r\nSampling token 169 from frame 1\r\nSampling token 170 from frame 1\r\nSampling token 171 from frame 1\r\nSampling token 172 from frame 1\r\nSampling token 173 from frame 1\r\nSampling token 174 from frame 1\r\nSampling token 175 from frame 1\r\nSampling token 176 from frame 1\r\nSampling token 177 from frame 1\r\nSampling token 178 from frame 1\r\nSampling token 179 from frame 1\r\nSampling token 180 from frame 1\r\nSampling token 181 from frame 1\r\nSampling token 182 from frame 1\r\nSampling token 183 from frame 1\r\nSampling token 184 from frame 1\r\nSampling token 185 from frame 1\r\nSampling token 186 from frame 1\r\nSampling token 187 from frame 1\r\nSampling token 188 from frame 1\r\nSampling token 189 from frame 1\r\nSampling token 190 from frame 1\r\nSampling token 191 from frame 1\r\nSampling token 192 from frame 1\r\nSampling token 193 from frame 1\r\nSampling token 194 from frame 1\r\nSampling token 195 from frame 1\r\nSampling token 196 from frame 1\r\nSampling token 197 from frame 1\r\nSampling token 198 from frame 1\r\nSampling token 199 from frame 1\r\nSampling token 200 from frame 1\r\nSampling token 201 from frame 1\r\nSampling token 202 from frame 1\r\nSampling token 203 from frame 1\r\nSampling token 204 from frame 1\r\nSampling token 205 from frame 1\r\nSampling token 206 from frame 1\r\nSampling token 207 from frame 1\r\nSampling token 208 from frame 1\r\nSampling token 209 from frame 1\r\nSampling token 210 from frame 1\r\nSampling token 211 from frame 1\r\nSampling token 212 from frame 1\r\nSampling token 213 from frame 1\r\nSampling token 214 from frame 1\r\nSampling token 215 from frame 1\r\nSampling token 216 from frame 1\r\nSampling token 217 from frame 1\r\nSampling token 218 from frame 1\r\nSampling token 219 from frame 1\r\nSampling token 220 from frame 1\r\nSampling token 221 from frame 1\r\nSampling token 222 from frame 1\r\nSampling token 223 from frame 1\r\nSampling token 224 from frame 1\r\nSampling token 225 from frame 1\r\nSampling token 226 from frame 1\r\nSampling token 227 from frame 1\r\nSampling token 228 from frame 1\r\nSampling token 229 from frame 1\r\nSampling token 230 from frame 1\r\nSampling token 231 from frame 1\r\nSampling token 232 from frame 1\r\nSampling token 233 from frame 1\r\nSampling token 234 from frame 1\r\nSampling token 235 from frame 1\r\nSampling token 236 from frame 1\r\nSampling token 237 from frame 1\r\nSampling token 238 from frame 1\r\nSampling token 239 from frame 1\r\nSampling token 240 from frame 1\r\nSampling token 241 from frame 1\r\nSampling token 242 from frame 1\r\nSampling token 243 from frame 1\r\nSampling token 244 from frame 1\r\nSampling token 245 from frame 1\r\nSampling token 246 from frame 1\r\nSampling token 247 from frame 1\r\nSampling token 248 from frame 1\r\nSampling token 249 from frame 1\r\nSampling token 250 from frame 1\r\nSampling token 251 from frame 1\r\nSampling token 252 from frame 1\r\nSampling token 253 from frame 1\r\nSampling token 254 from frame 1\r\nSampling token 255 from frame 1\r\nSampling token 256 from frame 1\r\nSampling token 257 from frame 1\r\n",,terminal_output +3676,9473887,"TERMINAL",0,0,"Sampling token 258 from frame 1\r\nSampling token 259 from frame 1\r\nSampling token 260 from frame 1\r\nSampling token 261 from frame 1\r\nSampling token 262 from frame 1\r\nSampling token 263 from frame 1\r\nSampling token 264 from frame 1\r\nSampling token 265 from frame 1\r\nSampling token 266 from frame 1\r\nSampling token 267 from frame 1\r\nSampling token 268 from frame 1\r\nSampling token 269 from frame 1\r\nSampling token 270 from frame 1\r\nSampling token 271 from frame 1\r\nSampling token 272 from frame 1\r\nSampling token 273 from frame 1\r\nSampling token 274 from frame 1\r\nSampling token 275 from frame 1\r\nSampling token 276 from frame 1\r\nSampling token 277 from frame 1\r\nSampling token 278 from frame 1\r\nSampling token 279 from frame 1\r\nSampling token 280 from frame 1\r\nSampling token 281 from frame 1\r\nSampling token 282 from frame 1\r\nSampling token 283 from frame 1\r\nSampling token 284 from frame 1\r\nSampling token 285 from frame 1\r\nSampling token 286 from frame 1\r\nSampling token 287 from frame 1\r\nSampling token 288 from frame 1\r\nSampling token 289 from frame 1\r\nSampling token 290 from frame 1\r\nSampling token 291 from frame 1\r\nSampling token 292 from frame 1\r\nSampling token 293 from frame 1\r\nSampling token 294 from frame 1\r\nSampling token 295 from frame 1\r\nSampling token 296 from frame 1\r\nSampling token 297 from frame 1\r\nSampling token 298 from frame 1\r\nSampling token 299 from frame 1\r\nSampling token 300 from frame 1\r\nSampling token 301 from frame 1\r\nSampling token 302 from frame 1\r\nSampling token 303 from frame 1\r\nSampling token 304 from frame 1\r\nSampling token 305 from frame 1\r\nSampling token 306 from frame 1\r\nSampling token 307 from frame 1\r\nSampling token 308 from frame 1\r\nSampling token 309 from frame 1\r\nSampling token 310 from frame 1\r\nSampling token 311 from frame 1\r\nSampling token 312 from frame 1\r\nSampling token 313 from frame 1\r\nSampling token 314 from frame 1\r\nSampling token 315 from frame 1\r\nSampling token 316 from frame 1\r\nSampling token 317 from frame 1\r\nSampling token 318 from frame 1\r\nSampling token 319 from frame 1\r\nSampling token 320 from frame 1\r\nSampling token 321 from frame 1\r\nSampling token 322 from frame 1\r\nSampling token 323 from frame 1\r\nSampling token 324 from frame 1\r\nSampling token 325 from frame 1\r\nSampling token 326 from frame 1\r\nSampling token 327 from frame 1\r\nSampling token 328 from frame 1\r\nSampling token 329 from frame 1\r\nSampling token 330 from frame 1\r\nSampling token 331 from frame 1\r\nSampling token 332 from frame 1\r\nSampling token 333 from frame 1\r\nSampling token 334 from frame 1\r\nSampling token 335 from frame 1\r\nSampling token 336 from frame 1\r\nSampling token 337 from frame 1\r\nSampling token 338 from frame 1\r\nSampling token 339 from frame 1\r\nSampling token 340 from frame 1\r\nSampling token 341 from frame 1\r\nSampling token 342 from frame 1\r\nSampling token 343 from frame 1\r\nSampling token 344 from frame 1\r\nSampling token 345 from frame 1\r\nSampling token 346 from frame 1\r\nSampling token 347 from frame 1\r\nSampling token 348 from frame 1\r\nSampling token 349 from frame 1\r\nSampling token 350 from frame 1\r\nSampling token 351 from frame 1\r\nSampling token 352 from frame 1\r\nSampling token 353 from frame 1\r\nSampling token 354 from frame 1\r\nSampling token 355 from frame 1\r\nSampling token 356 from frame 1\r\nSampling token 357 from frame 1\r\nSampling token 358 from frame 1\r\nSampling token 359 from frame 1\r\nSampling token 360 from frame 1\r\nSampling token 361 from frame 1\r\nSampling token 362 from frame 1\r\nSampling token 363 from frame 1\r\nSampling token 364 from frame 1\r\nSampling token 365 from frame 1\r\nSampling token 366 from frame 1\r\nSampling token 367 from frame 1\r\nSampling token 368 from frame 1\r\nSampling token 369 from frame 1\r\nSampling token 370 from frame 1\r\nSampling token 371 from frame 1\r\nSampling token 372 from frame 1\r\nSampling token 373 from frame 1\r\nSampling token 374 from frame 1\r\nSampling token 375 from frame 1\r\nSampling token 376 from frame 1\r\nSampling token 377 from frame 1\r\nSampling token 378 from frame 1\r\nSampling token 379 from frame 1\r\nSampling token 380 from frame 1\r\nSampling token 381 from frame 1\r\nSampling token 382 from frame 1\r\nSampling token 383 from frame 1\r\nSampling token 384 from frame 1\r\nSampling token 385 from frame 1\r\nSampling token 386 from frame 1\r\nSampling token 387 from frame 1\r\nSampling token 388 from frame 1\r\nSampling token 389 from frame 1\r\nSampling token 390 from frame 1\r\nSampling token 391 from frame 1\r\nSampling token 392 from frame 1\r\nSampling token 393 from frame 1\r\nSampling token 394 from frame 1\r\nSampling token 395 from frame 1\r\nSampling token 396 from frame 1\r\nSampling token 397 from frame 1\r\nSampling token 398 from frame 1\r\nSampling token 399 from frame 1\r\nSampling token 400 from frame 1\r\nSampling token 401 from frame 1\r\nSampling token 402 from frame 1\r\nSampling token 403 from frame 1\r\nSampling token 404 from frame 1\r\nSampling token 405 from frame 1\r\nSampling token 406 from frame 1\r\nSampling token 407 from frame 1\r\nSampling token 408 from frame 1\r\nSampling token 409 from frame 1\r\nSampling token 410 from frame 1\r\nSampling token 411 from frame 1\r\nSampling token 412 from frame 1\r\nSampling token 413 from frame 1\r\nSampling token 414 from frame 1\r\nSampling token 415 from frame 1\r\nSampling token 416 from frame 1\r\nSampling token 417 from frame 1\r\nSampling token 418 from frame 1\r\nSampling token 419 from frame 1\r\nSampling token 420 from frame 1\r\nSampling token 421 from frame 1\r\nSampling token 422 from frame 1\r\nSampling token 423 from frame 1\r\nSampling token 424 from frame 1\r\nSampling token 425 from frame 1\r\nSampling token 426 from frame 1\r\nSampling token 427 from frame 1\r\nSampling token 428 from frame 1\r\nSampling token 429 from frame 1\r\nSampling token 430 from frame 1\r\nSampling token 431 from frame 1\r\nSampling token 432 from frame 1\r\nSampling token 433 from frame 1\r\nSampling token 434 from frame 1\r\nSampling token 435 from frame 1\r\nSampling token 436 from frame 1\r\nSampling token 437 from frame 1\r\nSampling token 438 from frame 1\r\nSampling token 439 from frame 1\r\nSampling token 440 from frame 1\r\nSampling token 441 from frame 1\r\nSampling token 442 from frame 1\r\nSampling token 443 from frame 1\r\nSampling token 444 from frame 1\r\nSampling token 445 from frame 1\r\nSampling token 446 from frame 1\r\nSampling token 447 from frame 1\r\nSampling token 448 from frame 1\r\nSampling token 449 from frame 1\r\nSampling token 450 from frame 1\r\nSampling token 451 from frame 1\r\nSampling token 452 from frame 1\r\nSampling token 453 from frame 1\r\nSampling token 454 from frame 1\r\nSampling token 455 from frame 1\r\nSampling token 456 from frame 1\r\nSampling token 457 from frame 1\r\nSampling token 458 from frame 1\r\nSampling token 459 from frame 1\r\nSampling token 460 from frame 1\r\nSampling token 461 from frame 1\r\nSampling token 462 from frame 1\r\nSampling token 463 from frame 1\r\nSampling token 464 from frame 1\r\nSampling token 465 from frame 1\r\nSampling token 466 from frame 1\r\nSampling token 467 from frame 1\r\nSampling token 468 from frame 1\r\nSampling token 469 from frame 1\r\nSampling token 470 from frame 1\r\nSampling token 471 from frame 1\r\nSampling token 472 from frame 1\r\nSampling token 473 from frame 1\r\nSampling token 474 from frame 1\r\nSampling token 475 from frame 1\r\nSampling token 476 from frame 1\r\nSampling token 477 from frame 1\r\nSampling token 478 from frame 1\r\nSampling token 479 from frame 1\r\nSampling token 480 from frame 1\r\nSampling token 481 from frame 1\r\nSampling token 482 from frame 1\r\nSampling token 483 from frame 1\r\nSampling token 484 from frame 1\r\nSampling token 485 from frame 1\r\nSampling token 486 from frame 1\r\nSampling token 487 from frame 1\r\nSampling token 488 from frame 1\r\nSampling token 489 from frame 1\r\nSampling token 490 from frame 1\r\nSampling token 491 from frame 1\r\nSampling token 492 from frame 1\r\nSampling token 493 from frame 1\r\nSampling token 494 from frame 1\r\nSampling token 495 from frame 1\r\nSampling token 496 from frame 1\r\nSampling token 497 from frame 1\r\nSampling token 498 from frame 1\r\nSampling token 499 from frame 1\r\nSampling token 500 from frame 1\r\nSampling token 501 from frame 1\r\nSampling token 502 from frame 1\r\nSampling token 503 from frame 1\r\nSampling token 504 from frame 1\r\nSampling token 505 from frame 1\r\nSampling token 506 from frame 1\r\nSampling token 507 from frame 1\r\nSampling token 508 from frame 1\r\nSampling token 509 from frame 1\r\nSampling token 510 from frame 1\r\nSampling token 511 from frame 1\r\nSampling token 512 from frame 1\r\nSampling token 513 from frame 1\r\n",,terminal_output +3677,9507847,"TERMINAL",0,0,"Sampling token 514 from frame 1\r\nSampling token 515 from frame 1\r\nSampling token 516 from frame 1\r\nSampling token 517 from frame 1\r\nSampling token 518 from frame 1\r\nSampling token 519 from frame 1\r\nSampling token 520 from frame 1\r\nSampling token 521 from frame 1\r\nSampling token 522 from frame 1\r\nSampling token 523 from frame 1\r\nSampling token 524 from frame 1\r\nSampling token 525 from frame 1\r\nSampling token 526 from frame 1\r\nSampling token 527 from frame 1\r\nSampling token 528 from frame 1\r\nSampling token 529 from frame 1\r\nSampling token 530 from frame 1\r\nSampling token 531 from frame 1\r\nSampling token 532 from frame 1\r\nSampling token 533 from frame 1\r\nSampling token 534 from frame 1\r\nSampling token 535 from frame 1\r\nSampling token 536 from frame 1\r\nSampling token 537 from frame 1\r\nSampling token 538 from frame 1\r\nSampling token 539 from frame 1\r\nSampling token 540 from frame 1\r\nSampling token 541 from frame 1\r\nSampling token 542 from frame 1\r\nSampling token 543 from frame 1\r\nSampling token 544 from frame 1\r\nSampling token 545 from frame 1\r\nSampling token 546 from frame 1\r\nSampling token 547 from frame 1\r\nSampling token 548 from frame 1\r\nSampling token 549 from frame 1\r\nSampling token 550 from frame 1\r\nSampling token 551 from frame 1\r\nSampling token 552 from frame 1\r\nSampling token 553 from frame 1\r\nSampling token 554 from frame 1\r\nSampling token 555 from frame 1\r\nSampling token 556 from frame 1\r\nSampling token 557 from frame 1\r\nSampling token 558 from frame 1\r\nSampling token 559 from frame 1\r\nSampling token 560 from frame 1\r\nSampling token 561 from frame 1\r\nSampling token 562 from frame 1\r\nSampling token 563 from frame 1\r\nSampling token 564 from frame 1\r\nSampling token 565 from frame 1\r\nSampling token 566 from frame 1\r\nSampling token 567 from frame 1\r\nSampling token 568 from frame 1\r\nSampling token 569 from frame 1\r\nSampling token 570 from frame 1\r\nSampling token 571 from frame 1\r\nSampling token 572 from frame 1\r\nSampling token 573 from frame 1\r\nSampling token 574 from frame 1\r\nSampling token 575 from frame 1\r\nSampling token 576 from frame 1\r\nSampling token 577 from frame 1\r\nSampling token 578 from frame 1\r\nSampling token 579 from frame 1\r\nSampling token 580 from frame 1\r\nSampling token 581 from frame 1\r\nSampling token 582 from frame 1\r\nSampling token 583 from frame 1\r\nSampling token 584 from frame 1\r\nSampling token 585 from frame 1\r\nSampling token 586 from frame 1\r\nSampling token 587 from frame 1\r\nSampling token 588 from frame 1\r\nSampling token 589 from frame 1\r\nSampling token 590 from frame 1\r\nSampling token 591 from frame 1\r\nSampling token 592 from frame 1\r\nSampling token 593 from frame 1\r\nSampling token 594 from frame 1\r\nSampling token 595 from frame 1\r\nSampling token 596 from frame 1\r\nSampling token 597 from frame 1\r\nSampling token 598 from frame 1\r\nSampling token 599 from frame 1\r\nSampling token 600 from frame 1\r\nSampling token 601 from frame 1\r\nSampling token 602 from frame 1\r\nSampling token 603 from frame 1\r\nSampling token 604 from frame 1\r\nSampling token 605 from frame 1\r\nSampling token 606 from frame 1\r\nSampling token 607 from frame 1\r\nSampling token 608 from frame 1\r\nSampling token 609 from frame 1\r\nSampling token 610 from frame 1\r\nSampling token 611 from frame 1\r\nSampling token 612 from frame 1\r\nSampling token 613 from frame 1\r\nSampling token 614 from frame 1\r\nSampling token 615 from frame 1\r\nSampling token 616 from frame 1\r\nSampling token 617 from frame 1\r\nSampling token 618 from frame 1\r\nSampling token 619 from frame 1\r\nSampling token 620 from frame 1\r\nSampling token 621 from frame 1\r\nSampling token 622 from frame 1\r\nSampling token 623 from frame 1\r\nSampling token 624 from frame 1\r\nSampling token 625 from frame 1\r\nSampling token 626 from frame 1\r\nSampling token 627 from frame 1\r\nSampling token 628 from frame 1\r\nSampling token 629 from frame 1\r\nSampling token 630 from frame 1\r\nSampling token 631 from frame 1\r\nSampling token 632 from frame 1\r\nSampling token 633 from frame 1\r\nSampling token 634 from frame 1\r\nSampling token 635 from frame 1\r\nSampling token 636 from frame 1\r\nSampling token 637 from frame 1\r\nSampling token 638 from frame 1\r\nSampling token 639 from frame 1\r\nSampling token 640 from frame 1\r\nSampling token 641 from frame 1\r\nSampling token 642 from frame 1\r\nSampling token 643 from frame 1\r\nSampling token 644 from frame 1\r\nSampling token 645 from frame 1\r\nSampling token 646 from frame 1\r\nSampling token 647 from frame 1\r\nSampling token 648 from frame 1\r\nSampling token 649 from frame 1\r\nSampling token 650 from frame 1\r\nSampling token 651 from frame 1\r\nSampling token 652 from frame 1\r\nSampling token 653 from frame 1\r\nSampling token 654 from frame 1\r\nSampling token 655 from frame 1\r\nSampling token 656 from frame 1\r\nSampling token 657 from frame 1\r\nSampling token 658 from frame 1\r\nSampling token 659 from frame 1\r\nSampling token 660 from frame 1\r\nSampling token 661 from frame 1\r\nSampling token 662 from frame 1\r\nSampling token 663 from frame 1\r\nSampling token 664 from frame 1\r\nSampling token 665 from frame 1\r\nSampling token 666 from frame 1\r\nSampling token 667 from frame 1\r\nSampling token 668 from frame 1\r\nSampling token 669 from frame 1\r\nSampling token 670 from frame 1\r\nSampling token 671 from frame 1\r\nSampling token 672 from frame 1\r\nSampling token 673 from frame 1\r\nSampling token 674 from frame 1\r\nSampling token 675 from frame 1\r\nSampling token 676 from frame 1\r\nSampling token 677 from frame 1\r\nSampling token 678 from frame 1\r\nSampling token 679 from frame 1\r\nSampling token 680 from frame 1\r\nSampling token 681 from frame 1\r\nSampling token 682 from frame 1\r\nSampling token 683 from frame 1\r\nSampling token 684 from frame 1\r\nSampling token 685 from frame 1\r\nSampling token 686 from frame 1\r\nSampling token 687 from frame 1\r\nSampling token 688 from frame 1\r\nSampling token 689 from frame 1\r\nSampling token 690 from frame 1\r\nSampling token 691 from frame 1\r\nSampling token 692 from frame 1\r\nSampling token 693 from frame 1\r\nSampling token 694 from frame 1\r\nSampling token 695 from frame 1\r\nSampling token 696 from frame 1\r\nSampling token 697 from frame 1\r\nSampling token 698 from frame 1\r\nSampling token 699 from frame 1\r\nSampling token 700 from frame 1\r\nSampling token 701 from frame 1\r\nSampling token 702 from frame 1\r\nSampling token 703 from frame 1\r\nSampling token 704 from frame 1\r\nSampling token 705 from frame 1\r\nSampling token 706 from frame 1\r\nSampling token 707 from frame 1\r\nSampling token 708 from frame 1\r\nSampling token 709 from frame 1\r\nSampling token 710 from frame 1\r\nSampling token 711 from frame 1\r\nSampling token 712 from frame 1\r\nSampling token 713 from frame 1\r\nSampling token 714 from frame 1\r\nSampling token 715 from frame 1\r\nSampling token 716 from frame 1\r\nSampling token 717 from frame 1\r\nSampling token 718 from frame 1\r\nSampling token 719 from frame 1\r\nSampling token 720 from frame 1\r\nSampling token 721 from frame 1\r\nSampling token 722 from frame 1\r\nSampling token 723 from frame 1\r\nSampling token 724 from frame 1\r\nSampling token 725 from frame 1\r\nSampling token 726 from frame 1\r\nSampling token 727 from frame 1\r\nSampling token 728 from frame 1\r\nSampling token 729 from frame 1\r\nSampling token 730 from frame 1\r\nSampling token 731 from frame 1\r\nSampling token 732 from frame 1\r\nSampling token 733 from frame 1\r\nSampling token 734 from frame 1\r\nSampling token 735 from frame 1\r\nSampling token 736 from frame 1\r\nSampling token 737 from frame 1\r\nSampling token 738 from frame 1\r\nSampling token 739 from frame 1\r\nSampling token 740 from frame 1\r\nSampling token 741 from frame 1\r\nSampling token 742 from frame 1\r\nSampling token 743 from frame 1\r\nSampling token 744 from frame 1\r\nSampling token 745 from frame 1\r\nSampling token 746 from frame 1\r\nSampling token 747 from frame 1\r\nSampling token 748 from frame 1\r\nSampling token 749 from frame 1\r\nSampling token 750 from frame 1\r\nSampling token 751 from frame 1\r\nSampling token 752 from frame 1\r\nSampling token 753 from frame 1\r\nSampling token 754 from frame 1\r\nSampling token 755 from frame 1\r\nSampling token 756 from frame 1\r\nSampling token 757 from frame 1\r\nSampling token 758 from frame 1\r\nSampling token 759 from frame 1\r\nSampling token 760 from frame 1\r\nSampling token 761 from frame 1\r\nSampling token 762 from frame 1\r\nSampling token 763 from frame 1\r\nSampling token 764 from frame 1\r\nSampling token 765 from frame 1\r\nSampling token 766 from frame 1\r\nSampling token 767 from frame 1\r\nSampling token 768 from frame 1\r\nSampling token 769 from frame 1\r\n",,terminal_output +3678,9533382,"TERMINAL",0,0,"Sampling token 770 from frame 1\r\nSampling token 771 from frame 1\r\nSampling token 772 from frame 1\r\nSampling token 773 from frame 1\r\nSampling token 774 from frame 1\r\nSampling token 775 from frame 1\r\nSampling token 776 from frame 1\r\nSampling token 777 from frame 1\r\nSampling token 778 from frame 1\r\nSampling token 779 from frame 1\r\nSampling token 780 from frame 1\r\nSampling token 781 from frame 1\r\nSampling token 782 from frame 1\r\nSampling token 783 from frame 1\r\nSampling token 784 from frame 1\r\nSampling token 785 from frame 1\r\nSampling token 786 from frame 1\r\nSampling token 787 from frame 1\r\nSampling token 788 from frame 1\r\nSampling token 789 from frame 1\r\nSampling token 790 from frame 1\r\nSampling token 791 from frame 1\r\nSampling token 792 from frame 1\r\nSampling token 793 from frame 1\r\nSampling token 794 from frame 1\r\nSampling token 795 from frame 1\r\nSampling token 796 from frame 1\r\nSampling token 797 from frame 1\r\nSampling token 798 from frame 1\r\nSampling token 799 from frame 1\r\nSampling token 800 from frame 1\r\nSampling token 801 from frame 1\r\nSampling token 802 from frame 1\r\nSampling token 803 from frame 1\r\nSampling token 804 from frame 1\r\nSampling token 805 from frame 1\r\nSampling token 806 from frame 1\r\nSampling token 807 from frame 1\r\nSampling token 808 from frame 1\r\nSampling token 809 from frame 1\r\nSampling token 810 from frame 1\r\nSampling token 811 from frame 1\r\nSampling token 812 from frame 1\r\nSampling token 813 from frame 1\r\nSampling token 814 from frame 1\r\nSampling token 815 from frame 1\r\nSampling token 816 from frame 1\r\nSampling token 817 from frame 1\r\nSampling token 818 from frame 1\r\nSampling token 819 from frame 1\r\nSampling token 820 from frame 1\r\nSampling token 821 from frame 1\r\nSampling token 822 from frame 1\r\nSampling token 823 from frame 1\r\nSampling token 824 from frame 1\r\nSampling token 825 from frame 1\r\nSampling token 826 from frame 1\r\nSampling token 827 from frame 1\r\nSampling token 828 from frame 1\r\nSampling token 829 from frame 1\r\nSampling token 830 from frame 1\r\nSampling token 831 from frame 1\r\nSampling token 832 from frame 1\r\nSampling token 833 from frame 1\r\nSampling token 834 from frame 1\r\nSampling token 835 from frame 1\r\nSampling token 836 from frame 1\r\nSampling token 837 from frame 1\r\nSampling token 838 from frame 1\r\nSampling token 839 from frame 1\r\nSampling token 840 from frame 1\r\nSampling token 841 from frame 1\r\nSampling token 842 from frame 1\r\nSampling token 843 from frame 1\r\nSampling token 844 from frame 1\r\nSampling token 845 from frame 1\r\nSampling token 846 from frame 1\r\nSampling token 847 from frame 1\r\nSampling token 848 from frame 1\r\nSampling token 849 from frame 1\r\nSampling token 850 from frame 1\r\nSampling token 851 from frame 1\r\nSampling token 852 from frame 1\r\nSampling token 853 from frame 1\r\nSampling token 854 from frame 1\r\nSampling token 855 from frame 1\r\nSampling token 856 from frame 1\r\nSampling token 857 from frame 1\r\nSampling token 858 from frame 1\r\nSampling token 859 from frame 1\r\nSampling token 860 from frame 1\r\nSampling token 861 from frame 1\r\nSampling token 862 from frame 1\r\nSampling token 863 from frame 1\r\nSampling token 864 from frame 1\r\nSampling token 865 from frame 1\r\nSampling token 866 from frame 1\r\nSampling token 867 from frame 1\r\nSampling token 868 from frame 1\r\nSampling token 869 from frame 1\r\nSampling token 870 from frame 1\r\nSampling token 871 from frame 1\r\nSampling token 872 from frame 1\r\nSampling token 873 from frame 1\r\nSampling token 874 from frame 1\r\nSampling token 875 from frame 1\r\nSampling token 876 from frame 1\r\nSampling token 877 from frame 1\r\nSampling token 878 from frame 1\r\nSampling token 879 from frame 1\r\nSampling token 880 from frame 1\r\nSampling token 881 from frame 1\r\nSampling token 882 from frame 1\r\nSampling token 883 from frame 1\r\nSampling token 884 from frame 1\r\nSampling token 885 from frame 1\r\nSampling token 886 from frame 1\r\nSampling token 887 from frame 1\r\nSampling token 888 from frame 1\r\nSampling token 889 from frame 1\r\nSampling token 890 from frame 1\r\nSampling token 891 from frame 1\r\nSampling token 892 from frame 1\r\nSampling token 893 from frame 1\r\nSampling token 894 from frame 1\r\nSampling token 895 from frame 1\r\nSampling token 896 from frame 1\r\nSampling token 897 from frame 1\r\nSampling token 898 from frame 1\r\nSampling token 899 from frame 1\r\nSampling token 900 from frame 1\r\nSampling token 901 from frame 1\r\nSampling token 902 from frame 1\r\nSampling token 903 from frame 1\r\nSampling token 904 from frame 1\r\nSampling token 905 from frame 1\r\nSampling token 906 from frame 1\r\nSampling token 907 from frame 1\r\nSampling token 908 from frame 1\r\nSampling token 909 from frame 1\r\nSampling token 910 from frame 1\r\nSampling token 911 from frame 1\r\nSampling token 912 from frame 1\r\nSampling token 913 from frame 1\r\nSampling token 914 from frame 1\r\nSampling token 915 from frame 1\r\nSampling token 916 from frame 1\r\nSampling token 917 from frame 1\r\nSampling token 918 from frame 1\r\nSampling token 919 from frame 1\r\nautoreg sampling done. calculating ssim and saving video\r\nSSIM: 0.8565104007720947\r\n",,terminal_output +3679,9534966,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3680,9588749,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3681,9591171,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\nsrun python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --start_frame=0 \\n --data_dir $array_records_dir\n\n# srun python sample.py \\n # --checkpoint $dynamics_ckpt_dir \\n # --start_frame=0 \\n # --batch_size=12 \\n # --seq_len=2 \\n # --data_dir $array_records_dir\n",shellscript,tab +3682,9593263,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_mouse +3683,9594191,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,1,"",shellscript,content +3684,9594602,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,0,"5",shellscript,content +3685,9594603,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_keyboard +3686,9595094,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,1,"",shellscript,content +3687,9595324,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,0,"1",shellscript,content +3688,9595324,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_keyboard +3689,9596224,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,1,"",shellscript,content +3690,9597672,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,0,"4",shellscript,content +3691,9597673,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_keyboard +3692,9600585,"TERMINAL",0,0,"\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=4 \\r\n --start_frame=0 \\r\n --data_dir $array_records_dir\r\n\r\n# srun python sample.py \\r\n # --checkpoint $dynamics_ckpt_dir \\r\n # --start_frame=0 \\r\n # --batch_size=12 \\r\n # --seq_len=2 \\r\n # --data_dir $array_records_dir\r\n",,terminal_output +3693,9600763,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3694,9600878,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3695,9603717,"TERMINAL",0,0,"2025-07-24 16:40:18.300468: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3696,9612243,"TERMINAL",0,0,"2025-07-24 16:40:26.932349: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3697,9619006,"TERMINAL",0,0,"2025-07-24 16:40:33.613015: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3698,9629588,"TERMINAL",0,0,"2025-07-24 16:40:44.285206: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3699,9632098,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 4000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/004000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 5000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/005000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 3000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280/003000/metrics/metrics not found.\r\n",,terminal_output +3700,9638392,"TERMINAL",0,0,"2025-07-24 16:40:53.087778: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3701,9644377,"TERMINAL",0,0,"2025-07-24 16:40:58.955263: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3702,9684615,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nSampling token 50 from frame 1\r\nSampling token 51 from frame 1\r\nSampling token 52 from frame 1\r\nSampling token 53 from frame 1\r\nSampling token 54 from frame 1\r\nSampling token 55 from frame 1\r\nSampling token 56 from frame 1\r\nSampling token 57 from frame 1\r\nSampling token 58 from frame 1\r\nSampling token 59 from frame 1\r\nSampling token 60 from frame 1\r\nSampling token 61 from frame 1\r\nSampling token 62 from frame 1\r\nSampling token 63 from frame 1\r\nSampling token 64 from frame 1\r\nSampling token 65 from frame 1\r\nSampling token 66 from frame 1\r\nSampling token 67 from frame 1\r\nSampling token 68 from frame 1\r\nSampling token 69 from frame 1\r\nSampling token 70 from frame 1\r\nSampling token 71 from frame 1\r\nSampling token 72 from frame 1\r\nSampling token 73 from frame 1\r\nSampling token 74 from frame 1\r\nSampling token 75 from frame 1\r\nSampling token 76 from frame 1\r\nSampling token 77 from frame 1\r\nSampling token 78 from frame 1\r\nSampling token 79 from frame 1\r\nSampling token 80 from frame 1\r\nSampling token 81 from frame 1\r\nSampling token 82 from frame 1\r\nSampling token 83 from frame 1\r\nSampling token 84 from frame 1\r\nSampling token 85 from frame 1\r\nSampling token 86 from frame 1\r\nSampling token 87 from frame 1\r\nSampling token 88 from frame 1\r\nSampling token 89 from frame 1\r\nSampling token 90 from frame 1\r\nSampling token 91 from frame 1\r\nSampling token 92 from frame 1\r\nSampling token 93 from frame 1\r\nSampling token 94 from frame 1\r\nSampling token 95 from frame 1\r\nSampling token 96 from frame 1\r\nSampling token 97 from frame 1\r\nSampling token 98 from frame 1\r\nSampling token 99 from frame 1\r\nSampling token 100 from frame 1\r\nSampling token 101 from frame 1\r\nSampling token 102 from frame 1\r\nSampling token 103 from frame 1\r\nSampling token 104 from frame 1\r\nSampling token 105 from frame 1\r\nSampling token 106 from frame 1\r\nSampling token 107 from frame 1\r\nSampling token 108 from frame 1\r\nSampling token 109 from frame 1\r\nSampling token 110 from frame 1\r\nSampling token 111 from frame 1\r\nSampling token 112 from frame 1\r\nSampling token 113 from frame 1\r\nSampling token 114 from frame 1\r\nSampling token 115 from frame 1\r\nSampling token 116 from frame 1\r\nSampling token 117 from frame 1\r\nSampling token 118 from frame 1\r\nSampling token 119 from frame 1\r\nSampling token 120 from frame 1\r\nSampling token 121 from frame 1\r\nSampling token 122 from frame 1\r\nSampling token 123 from frame 1\r\nSampling token 124 from frame 1\r\nSampling token 125 from frame 1\r\nSampling token 126 from frame 1\r\nSampling token 127 from frame 1\r\nSampling token 128 from frame 1\r\nSampling token 129 from frame 1\r\nSampling token 130 from frame 1\r\nSampling token 131 from frame 1\r\nSampling token 132 from frame 1\r\nSampling token 133 from frame 1\r\nSampling token 134 from frame 1\r\nSampling token 135 from frame 1\r\nSampling token 136 from frame 1\r\nSampling token 137 from frame 1\r\nSampling token 138 from frame 1\r\nSampling token 139 from frame 1\r\nSampling token 140 from frame 1\r\nSampling token 141 from frame 1\r\nSampling token 142 from frame 1\r\nSampling token 143 from frame 1\r\nSampling token 144 from frame 1\r\nSampling token 145 from frame 1\r\nSampling token 146 from frame 1\r\nSampling token 147 from frame 1\r\nSampling token 148 from frame 1\r\nSampling token 149 from frame 1\r\nSampling token 150 from frame 1\r\nSampling token 151 from frame 1\r\nSampling token 152 from frame 1\r\nSampling token 153 from frame 1\r\nSampling token 154 from frame 1\r\nSampling token 155 from frame 1\r\nSampling token 156 from frame 1\r\nSampling token 157 from frame 1\r\nSampling token 158 from frame 1\r\nSampling token 159 from frame 1\r\nSampling token 160 from frame 1\r\nSampling token 161 from frame 1\r\nSampling token 162 from frame 1\r\nSampling token 163 from frame 1\r\nSampling token 164 from frame 1\r\nSampling token 165 from frame 1\r\nSampling token 166 from frame 1\r\nSampling token 167 from frame 1\r\nSampling token 168 from frame 1\r\nSampling token 169 from frame 1\r\nSampling token 170 from frame 1\r\nSampling token 171 from frame 1\r\nSampling token 172 from frame 1\r\nSampling token 173 from frame 1\r\nSampling token 174 from frame 1\r\nSampling token 175 from frame 1\r\nSampling token 176 from frame 1\r\nSampling token 177 from frame 1\r\nSampling token 178 from frame 1\r\nSampling token 179 from frame 1\r\nSampling token 180 from frame 1\r\nSampling token 181 from frame 1\r\nSampling token 182 from frame 1\r\nSampling token 183 from frame 1\r\nSampling token 184 from frame 1\r\nSampling token 185 from frame 1\r\nSampling token 186 from frame 1\r\nSampling token 187 from frame 1\r\nSampling token 188 from frame 1\r\nSampling token 189 from frame 1\r\nSampling token 190 from frame 1\r\nSampling token 191 from frame 1\r\nSampling token 192 from frame 1\r\nSampling token 193 from frame 1\r\nSampling token 194 from frame 1\r\nSampling token 195 from frame 1\r\nSampling token 196 from frame 1\r\nSampling token 197 from frame 1\r\nSampling token 198 from frame 1\r\nSampling token 199 from frame 1\r\nSampling token 200 from frame 1\r\nSampling token 201 from frame 1\r\nSampling token 202 from frame 1\r\nSampling token 203 from frame 1\r\nSampling token 204 from frame 1\r\nSampling token 205 from frame 1\r\nSampling token 206 from frame 1\r\nSampling token 207 from frame 1\r\nSampling token 208 from frame 1\r\nSampling token 209 from frame 1\r\nSampling token 210 from frame 1\r\nSampling token 211 from frame 1\r\nSampling token 212 from frame 1\r\nSampling token 213 from frame 1\r\nSampling token 214 from frame 1\r\nSampling token 215 from frame 1\r\nSampling token 216 from frame 1\r\nSampling token 217 from frame 1\r\nSampling token 218 from frame 1\r\nSampling token 219 from frame 1\r\nSampling token 220 from frame 1\r\nSampling token 221 from frame 1\r\nSampling token 222 from frame 1\r\nSampling token 223 from frame 1\r\nSampling token 224 from frame 1\r\nSampling token 225 from frame 1\r\nSampling token 226 from frame 1\r\nSampling token 227 from frame 1\r\nSampling token 228 from frame 1\r\nSampling token 229 from frame 1\r\nSampling token 230 from frame 1\r\nSampling token 231 from frame 1\r\nSampling token 232 from frame 1\r\nSampling token 233 from frame 1\r\nSampling token 234 from frame 1\r\nSampling token 235 from frame 1\r\nSampling token 236 from frame 1\r\nSampling token 237 from frame 1\r\nSampling token 238 from frame 1\r\nSampling token 239 from frame 1\r\nSampling token 240 from frame 1\r\nSampling token 241 from frame 1\r\nSampling token 242 from frame 1\r\nSampling token 243 from frame 1\r\nSampling token 244 from frame 1\r\nSampling token 245 from frame 1\r\nSampling token 246 from frame 1\r\nSampling token 247 from frame 1\r\nSampling token 248 from frame 1\r\nSampling token 249 from frame 1\r\nSampling token 250 from frame 1\r\nSampling token 251 from frame 1\r\nSampling token 252 from frame 1\r\nSampling token 253 from frame 1\r\nSampling token 254 from frame 1\r\nSampling token 255 from frame 1\r\nSampling token 256 from frame 1\r\nSampling token 257 from frame 1\r\n",,terminal_output +3703,9719719,"TERMINAL",0,0,"Sampling token 258 from frame 1\r\nSampling token 259 from frame 1\r\nSampling token 260 from frame 1\r\nSampling token 261 from frame 1\r\nSampling token 262 from frame 1\r\nSampling token 263 from frame 1\r\nSampling token 264 from frame 1\r\nSampling token 265 from frame 1\r\nSampling token 266 from frame 1\r\nSampling token 267 from frame 1\r\nSampling token 268 from frame 1\r\nSampling token 269 from frame 1\r\nSampling token 270 from frame 1\r\nSampling token 271 from frame 1\r\nSampling token 272 from frame 1\r\nSampling token 273 from frame 1\r\nSampling token 274 from frame 1\r\nSampling token 275 from frame 1\r\nSampling token 276 from frame 1\r\nSampling token 277 from frame 1\r\nSampling token 278 from frame 1\r\nSampling token 279 from frame 1\r\nSampling token 280 from frame 1\r\nSampling token 281 from frame 1\r\nSampling token 282 from frame 1\r\nSampling token 283 from frame 1\r\nSampling token 284 from frame 1\r\nSampling token 285 from frame 1\r\nSampling token 286 from frame 1\r\nSampling token 287 from frame 1\r\nSampling token 288 from frame 1\r\nSampling token 289 from frame 1\r\nSampling token 290 from frame 1\r\nSampling token 291 from frame 1\r\nSampling token 292 from frame 1\r\nSampling token 293 from frame 1\r\nSampling token 294 from frame 1\r\nSampling token 295 from frame 1\r\nSampling token 296 from frame 1\r\nSampling token 297 from frame 1\r\nSampling token 298 from frame 1\r\nSampling token 299 from frame 1\r\nSampling token 300 from frame 1\r\nSampling token 301 from frame 1\r\nSampling token 302 from frame 1\r\nSampling token 303 from frame 1\r\nSampling token 304 from frame 1\r\nSampling token 305 from frame 1\r\nSampling token 306 from frame 1\r\nSampling token 307 from frame 1\r\nSampling token 308 from frame 1\r\nSampling token 309 from frame 1\r\nSampling token 310 from frame 1\r\nSampling token 311 from frame 1\r\nSampling token 312 from frame 1\r\nSampling token 313 from frame 1\r\nSampling token 314 from frame 1\r\nSampling token 315 from frame 1\r\nSampling token 316 from frame 1\r\nSampling token 317 from frame 1\r\nSampling token 318 from frame 1\r\nSampling token 319 from frame 1\r\nSampling token 320 from frame 1\r\nSampling token 321 from frame 1\r\nSampling token 322 from frame 1\r\nSampling token 323 from frame 1\r\nSampling token 324 from frame 1\r\nSampling token 325 from frame 1\r\nSampling token 326 from frame 1\r\nSampling token 327 from frame 1\r\nSampling token 328 from frame 1\r\nSampling token 329 from frame 1\r\nSampling token 330 from frame 1\r\nSampling token 331 from frame 1\r\nSampling token 332 from frame 1\r\nSampling token 333 from frame 1\r\nSampling token 334 from frame 1\r\nSampling token 335 from frame 1\r\nSampling token 336 from frame 1\r\nSampling token 337 from frame 1\r\nSampling token 338 from frame 1\r\nSampling token 339 from frame 1\r\nSampling token 340 from frame 1\r\nSampling token 341 from frame 1\r\nSampling token 342 from frame 1\r\nSampling token 343 from frame 1\r\nSampling token 344 from frame 1\r\nSampling token 345 from frame 1\r\nSampling token 346 from frame 1\r\nSampling token 347 from frame 1\r\nSampling token 348 from frame 1\r\nSampling token 349 from frame 1\r\nSampling token 350 from frame 1\r\nSampling token 351 from frame 1\r\nSampling token 352 from frame 1\r\nSampling token 353 from frame 1\r\nSampling token 354 from frame 1\r\nSampling token 355 from frame 1\r\nSampling token 356 from frame 1\r\nSampling token 357 from frame 1\r\nSampling token 358 from frame 1\r\nSampling token 359 from frame 1\r\nSampling token 360 from frame 1\r\nSampling token 361 from frame 1\r\nSampling token 362 from frame 1\r\nSampling token 363 from frame 1\r\nSampling token 364 from frame 1\r\nSampling token 365 from frame 1\r\nSampling token 366 from frame 1\r\nSampling token 367 from frame 1\r\nSampling token 368 from frame 1\r\nSampling token 369 from frame 1\r\nSampling token 370 from frame 1\r\nSampling token 371 from frame 1\r\nSampling token 372 from frame 1\r\nSampling token 373 from frame 1\r\nSampling token 374 from frame 1\r\nSampling token 375 from frame 1\r\nSampling token 376 from frame 1\r\nSampling token 377 from frame 1\r\nSampling token 378 from frame 1\r\nSampling token 379 from frame 1\r\nSampling token 380 from frame 1\r\nSampling token 381 from frame 1\r\nSampling token 382 from frame 1\r\nSampling token 383 from frame 1\r\nSampling token 384 from frame 1\r\nSampling token 385 from frame 1\r\nSampling token 386 from frame 1\r\nSampling token 387 from frame 1\r\nSampling token 388 from frame 1\r\nSampling token 389 from frame 1\r\nSampling token 390 from frame 1\r\nSampling token 391 from frame 1\r\nSampling token 392 from frame 1\r\nSampling token 393 from frame 1\r\nSampling token 394 from frame 1\r\nSampling token 395 from frame 1\r\nSampling token 396 from frame 1\r\nSampling token 397 from frame 1\r\nSampling token 398 from frame 1\r\nSampling token 399 from frame 1\r\nSampling token 400 from frame 1\r\nSampling token 401 from frame 1\r\nSampling token 402 from frame 1\r\nSampling token 403 from frame 1\r\nSampling token 404 from frame 1\r\nSampling token 405 from frame 1\r\nSampling token 406 from frame 1\r\nSampling token 407 from frame 1\r\nSampling token 408 from frame 1\r\nSampling token 409 from frame 1\r\nSampling token 410 from frame 1\r\nSampling token 411 from frame 1\r\nSampling token 412 from frame 1\r\nSampling token 413 from frame 1\r\nSampling token 414 from frame 1\r\nSampling token 415 from frame 1\r\nSampling token 416 from frame 1\r\nSampling token 417 from frame 1\r\nSampling token 418 from frame 1\r\nSampling token 419 from frame 1\r\nSampling token 420 from frame 1\r\nSampling token 421 from frame 1\r\nSampling token 422 from frame 1\r\nSampling token 423 from frame 1\r\nSampling token 424 from frame 1\r\nSampling token 425 from frame 1\r\nSampling token 426 from frame 1\r\nSampling token 427 from frame 1\r\nSampling token 428 from frame 1\r\nSampling token 429 from frame 1\r\nSampling token 430 from frame 1\r\nSampling token 431 from frame 1\r\nSampling token 432 from frame 1\r\nSampling token 433 from frame 1\r\nSampling token 434 from frame 1\r\nSampling token 435 from frame 1\r\nSampling token 436 from frame 1\r\nSampling token 437 from frame 1\r\nSampling token 438 from frame 1\r\nSampling token 439 from frame 1\r\nSampling token 440 from frame 1\r\nSampling token 441 from frame 1\r\nSampling token 442 from frame 1\r\nSampling token 443 from frame 1\r\nSampling token 444 from frame 1\r\nSampling token 445 from frame 1\r\nSampling token 446 from frame 1\r\nSampling token 447 from frame 1\r\nSampling token 448 from frame 1\r\nSampling token 449 from frame 1\r\nSampling token 450 from frame 1\r\nSampling token 451 from frame 1\r\nSampling token 452 from frame 1\r\nSampling token 453 from frame 1\r\nSampling token 454 from frame 1\r\nSampling token 455 from frame 1\r\nSampling token 456 from frame 1\r\nSampling token 457 from frame 1\r\nSampling token 458 from frame 1\r\nSampling token 459 from frame 1\r\nSampling token 460 from frame 1\r\nSampling token 461 from frame 1\r\nSampling token 462 from frame 1\r\nSampling token 463 from frame 1\r\nSampling token 464 from frame 1\r\nSampling token 465 from frame 1\r\nSampling token 466 from frame 1\r\nSampling token 467 from frame 1\r\nSampling token 468 from frame 1\r\nSampling token 469 from frame 1\r\nSampling token 470 from frame 1\r\nSampling token 471 from frame 1\r\nSampling token 472 from frame 1\r\nSampling token 473 from frame 1\r\nSampling token 474 from frame 1\r\nSampling token 475 from frame 1\r\nSampling token 476 from frame 1\r\nSampling token 477 from frame 1\r\nSampling token 478 from frame 1\r\nSampling token 479 from frame 1\r\nSampling token 480 from frame 1\r\nSampling token 481 from frame 1\r\nSampling token 482 from frame 1\r\nSampling token 483 from frame 1\r\nSampling token 484 from frame 1\r\nSampling token 485 from frame 1\r\nSampling token 486 from frame 1\r\nSampling token 487 from frame 1\r\nSampling token 488 from frame 1\r\nSampling token 489 from frame 1\r\nSampling token 490 from frame 1\r\nSampling token 491 from frame 1\r\nSampling token 492 from frame 1\r\nSampling token 493 from frame 1\r\nSampling token 494 from frame 1\r\nSampling token 495 from frame 1\r\nSampling token 496 from frame 1\r\nSampling token 497 from frame 1\r\nSampling token 498 from frame 1\r\nSampling token 499 from frame 1\r\nSampling token 500 from frame 1\r\nSampling token 501 from frame 1\r\nSampling token 502 from frame 1\r\nSampling token 503 from frame 1\r\nSampling token 504 from frame 1\r\nSampling token 505 from frame 1\r\nSampling token 506 from frame 1\r\nSampling token 507 from frame 1\r\nSampling token 508 from frame 1\r\nSampling token 509 from frame 1\r\nSampling token 510 from frame 1\r\nSampling token 511 from frame 1\r\nSampling token 512 from frame 1\r\nSampling token 513 from frame 1\r\n",,terminal_output +3704,9720027,"TERMINAL",0,0,"bash",,terminal_focus +3705,9722625,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +3706,9727448,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",83,0,"",shellscript,selection_mouse +3707,9728325,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",84,0,"",shellscript,selection_command +3708,9728645,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",83,1,"",shellscript,content +3709,9728833,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",82,1,"",shellscript,content +3710,9728968,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",82,0,"1",shellscript,content +3711,9728969,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",83,0,"",shellscript,selection_keyboard +3712,9729790,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",106,0,"",shellscript,selection_command +3713,9730303,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",83,0,"",shellscript,selection_command +3714,9730522,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",82,0,"",shellscript,selection_command +3715,9731326,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",82,0,"0",shellscript,content +3716,9731327,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",83,0,"",shellscript,selection_keyboard +3717,9733792,"models/dynamics.py",0,0,"",python,tab +3718,9742859,"TERMINAL",0,0,"srun",,terminal_focus +3719,9744218,"TERMINAL",0,0,"bash",,terminal_focus +3720,9754361,"TERMINAL",0,0,"Sampling token 514 from frame 1\r\nSampling token 515 from frame 1\r\nSampling token 516 from frame 1\r\nSampling token 517 from frame 1\r\nSampling token 518 from frame 1\r\nSampling token 519 from frame 1\r\nSampling token 520 from frame 1\r\nSampling token 521 from frame 1\r\nSampling token 522 from frame 1\r\nSampling token 523 from frame 1\r\nSampling token 524 from frame 1\r\nSampling token 525 from frame 1\r\nSampling token 526 from frame 1\r\nSampling token 527 from frame 1\r\nSampling token 528 from frame 1\r\nSampling token 529 from frame 1\r\nSampling token 530 from frame 1\r\nSampling token 531 from frame 1\r\nSampling token 532 from frame 1\r\nSampling token 533 from frame 1\r\nSampling token 534 from frame 1\r\nSampling token 535 from frame 1\r\nSampling token 536 from frame 1\r\nSampling token 537 from frame 1\r\nSampling token 538 from frame 1\r\nSampling token 539 from frame 1\r\nSampling token 540 from frame 1\r\nSampling token 541 from frame 1\r\nSampling token 542 from frame 1\r\nSampling token 543 from frame 1\r\nSampling token 544 from frame 1\r\nSampling token 545 from frame 1\r\nSampling token 546 from frame 1\r\nSampling token 547 from frame 1\r\nSampling token 548 from frame 1\r\nSampling token 549 from frame 1\r\nSampling token 550 from frame 1\r\nSampling token 551 from frame 1\r\nSampling token 552 from frame 1\r\nSampling token 553 from frame 1\r\nSampling token 554 from frame 1\r\nSampling token 555 from frame 1\r\nSampling token 556 from frame 1\r\nSampling token 557 from frame 1\r\nSampling token 558 from frame 1\r\nSampling token 559 from frame 1\r\nSampling token 560 from frame 1\r\nSampling token 561 from frame 1\r\nSampling token 562 from frame 1\r\nSampling token 563 from frame 1\r\nSampling token 564 from frame 1\r\nSampling token 565 from frame 1\r\nSampling token 566 from frame 1\r\nSampling token 567 from frame 1\r\nSampling token 568 from frame 1\r\nSampling token 569 from frame 1\r\nSampling token 570 from frame 1\r\nSampling token 571 from frame 1\r\nSampling token 572 from frame 1\r\nSampling token 573 from frame 1\r\nSampling token 574 from frame 1\r\nSampling token 575 from frame 1\r\nSampling token 576 from frame 1\r\nSampling token 577 from frame 1\r\nSampling token 578 from frame 1\r\nSampling token 579 from frame 1\r\nSampling token 580 from frame 1\r\nSampling token 581 from frame 1\r\nSampling token 582 from frame 1\r\nSampling token 583 from frame 1\r\nSampling token 584 from frame 1\r\nSampling token 585 from frame 1\r\nSampling token 586 from frame 1\r\nSampling token 587 from frame 1\r\nSampling token 588 from frame 1\r\nSampling token 589 from frame 1\r\nSampling token 590 from frame 1\r\nSampling token 591 from frame 1\r\nSampling token 592 from frame 1\r\nSampling token 593 from frame 1\r\nSampling token 594 from frame 1\r\nSampling token 595 from frame 1\r\nSampling token 596 from frame 1\r\nSampling token 597 from frame 1\r\nSampling token 598 from frame 1\r\nSampling token 599 from frame 1\r\nSampling token 600 from frame 1\r\nSampling token 601 from frame 1\r\nSampling token 602 from frame 1\r\nSampling token 603 from frame 1\r\nSampling token 604 from frame 1\r\nSampling token 605 from frame 1\r\nSampling token 606 from frame 1\r\nSampling token 607 from frame 1\r\nSampling token 608 from frame 1\r\nSampling token 609 from frame 1\r\nSampling token 610 from frame 1\r\nSampling token 611 from frame 1\r\nSampling token 612 from frame 1\r\nSampling token 613 from frame 1\r\nSampling token 614 from frame 1\r\nSampling token 615 from frame 1\r\nSampling token 616 from frame 1\r\nSampling token 617 from frame 1\r\nSampling token 618 from frame 1\r\nSampling token 619 from frame 1\r\nSampling token 620 from frame 1\r\nSampling token 621 from frame 1\r\nSampling token 622 from frame 1\r\nSampling token 623 from frame 1\r\nSampling token 624 from frame 1\r\nSampling token 625 from frame 1\r\nSampling token 626 from frame 1\r\nSampling token 627 from frame 1\r\nSampling token 628 from frame 1\r\nSampling token 629 from frame 1\r\nSampling token 630 from frame 1\r\nSampling token 631 from frame 1\r\nSampling token 632 from frame 1\r\nSampling token 633 from frame 1\r\nSampling token 634 from frame 1\r\nSampling token 635 from frame 1\r\nSampling token 636 from frame 1\r\nSampling token 637 from frame 1\r\nSampling token 638 from frame 1\r\nSampling token 639 from frame 1\r\nSampling token 640 from frame 1\r\nSampling token 641 from frame 1\r\nSampling token 642 from frame 1\r\nSampling token 643 from frame 1\r\nSampling token 644 from frame 1\r\nSampling token 645 from frame 1\r\nSampling token 646 from frame 1\r\nSampling token 647 from frame 1\r\nSampling token 648 from frame 1\r\nSampling token 649 from frame 1\r\nSampling token 650 from frame 1\r\nSampling token 651 from frame 1\r\nSampling token 652 from frame 1\r\nSampling token 653 from frame 1\r\nSampling token 654 from frame 1\r\nSampling token 655 from frame 1\r\nSampling token 656 from frame 1\r\nSampling token 657 from frame 1\r\nSampling token 658 from frame 1\r\nSampling token 659 from frame 1\r\nSampling token 660 from frame 1\r\nSampling token 661 from frame 1\r\nSampling token 662 from frame 1\r\nSampling token 663 from frame 1\r\nSampling token 664 from frame 1\r\nSampling token 665 from frame 1\r\nSampling token 666 from frame 1\r\nSampling token 667 from frame 1\r\nSampling token 668 from frame 1\r\nSampling token 669 from frame 1\r\nSampling token 670 from frame 1\r\nSampling token 671 from frame 1\r\nSampling token 672 from frame 1\r\nSampling token 673 from frame 1\r\nSampling token 674 from frame 1\r\nSampling token 675 from frame 1\r\nSampling token 676 from frame 1\r\nSampling token 677 from frame 1\r\nSampling token 678 from frame 1\r\nSampling token 679 from frame 1\r\nSampling token 680 from frame 1\r\nSampling token 681 from frame 1\r\nSampling token 682 from frame 1\r\nSampling token 683 from frame 1\r\nSampling token 684 from frame 1\r\nSampling token 685 from frame 1\r\nSampling token 686 from frame 1\r\nSampling token 687 from frame 1\r\nSampling token 688 from frame 1\r\nSampling token 689 from frame 1\r\nSampling token 690 from frame 1\r\nSampling token 691 from frame 1\r\nSampling token 692 from frame 1\r\nSampling token 693 from frame 1\r\nSampling token 694 from frame 1\r\nSampling token 695 from frame 1\r\nSampling token 696 from frame 1\r\nSampling token 697 from frame 1\r\nSampling token 698 from frame 1\r\nSampling token 699 from frame 1\r\nSampling token 700 from frame 1\r\nSampling token 701 from frame 1\r\nSampling token 702 from frame 1\r\nSampling token 703 from frame 1\r\nSampling token 704 from frame 1\r\nSampling token 705 from frame 1\r\nSampling token 706 from frame 1\r\nSampling token 707 from frame 1\r\nSampling token 708 from frame 1\r\nSampling token 709 from frame 1\r\nSampling token 710 from frame 1\r\nSampling token 711 from frame 1\r\nSampling token 712 from frame 1\r\nSampling token 713 from frame 1\r\nSampling token 714 from frame 1\r\nSampling token 715 from frame 1\r\nSampling token 716 from frame 1\r\nSampling token 717 from frame 1\r\nSampling token 718 from frame 1\r\nSampling token 719 from frame 1\r\nSampling token 720 from frame 1\r\nSampling token 721 from frame 1\r\nSampling token 722 from frame 1\r\nSampling token 723 from frame 1\r\nSampling token 724 from frame 1\r\nSampling token 725 from frame 1\r\nSampling token 726 from frame 1\r\nSampling token 727 from frame 1\r\nSampling token 728 from frame 1\r\nSampling token 729 from frame 1\r\nSampling token 730 from frame 1\r\nSampling token 731 from frame 1\r\nSampling token 732 from frame 1\r\nSampling token 733 from frame 1\r\nSampling token 734 from frame 1\r\nSampling token 735 from frame 1\r\nSampling token 736 from frame 1\r\nSampling token 737 from frame 1\r\nSampling token 738 from frame 1\r\nSampling token 739 from frame 1\r\nSampling token 740 from frame 1\r\nSampling token 741 from frame 1\r\nSampling token 742 from frame 1\r\nSampling token 743 from frame 1\r\nSampling token 744 from frame 1\r\nSampling token 745 from frame 1\r\nSampling token 746 from frame 1\r\nSampling token 747 from frame 1\r\nSampling token 748 from frame 1\r\nSampling token 749 from frame 1\r\nSampling token 750 from frame 1\r\nSampling token 751 from frame 1\r\nSampling token 752 from frame 1\r\nSampling token 753 from frame 1\r\nSampling token 754 from frame 1\r\nSampling token 755 from frame 1\r\nSampling token 756 from frame 1\r\nSampling token 757 from frame 1\r\nSampling token 758 from frame 1\r\nSampling token 759 from frame 1\r\nSampling token 760 from frame 1\r\nSampling token 761 from frame 1\r\nSampling token 762 from frame 1\r\nSampling token 763 from frame 1\r\nSampling token 764 from frame 1\r\nSampling token 765 from frame 1\r\nSampling token 766 from frame 1\r\nSampling token 767 from frame 1\r\nSampling token 768 from frame 1\r\nSampling token 769 from frame 1\r\n",,terminal_output +3721,9788365,"TERMINAL",0,0,"Sampling token 770 from frame 1\r\nSampling token 771 from frame 1\r\nSampling token 772 from frame 1\r\nSampling token 773 from frame 1\r\nSampling token 774 from frame 1\r\nSampling token 775 from frame 1\r\nSampling token 776 from frame 1\r\nSampling token 777 from frame 1\r\nSampling token 778 from frame 1\r\nSampling token 779 from frame 1\r\nSampling token 780 from frame 1\r\nSampling token 781 from frame 1\r\nSampling token 782 from frame 1\r\nSampling token 783 from frame 1\r\nSampling token 784 from frame 1\r\nSampling token 785 from frame 1\r\nSampling token 786 from frame 1\r\nSampling token 787 from frame 1\r\nSampling token 788 from frame 1\r\nSampling token 789 from frame 1\r\nSampling token 790 from frame 1\r\nSampling token 791 from frame 1\r\nSampling token 792 from frame 1\r\nSampling token 793 from frame 1\r\nSampling token 794 from frame 1\r\nSampling token 795 from frame 1\r\nSampling token 796 from frame 1\r\nSampling token 797 from frame 1\r\nSampling token 798 from frame 1\r\nSampling token 799 from frame 1\r\nSampling token 800 from frame 1\r\nSampling token 801 from frame 1\r\nSampling token 802 from frame 1\r\nSampling token 803 from frame 1\r\nSampling token 804 from frame 1\r\nSampling token 805 from frame 1\r\nSampling token 806 from frame 1\r\nSampling token 807 from frame 1\r\nSampling token 808 from frame 1\r\nSampling token 809 from frame 1\r\nSampling token 810 from frame 1\r\nSampling token 811 from frame 1\r\nSampling token 812 from frame 1\r\nSampling token 813 from frame 1\r\nSampling token 814 from frame 1\r\nSampling token 815 from frame 1\r\nSampling token 816 from frame 1\r\nSampling token 817 from frame 1\r\nSampling token 818 from frame 1\r\nSampling token 819 from frame 1\r\nSampling token 820 from frame 1\r\nSampling token 821 from frame 1\r\nSampling token 822 from frame 1\r\nSampling token 823 from frame 1\r\nSampling token 824 from frame 1\r\nSampling token 825 from frame 1\r\nSampling token 826 from frame 1\r\nSampling token 827 from frame 1\r\nSampling token 828 from frame 1\r\nSampling token 829 from frame 1\r\nSampling token 830 from frame 1\r\nSampling token 831 from frame 1\r\nSampling token 832 from frame 1\r\nSampling token 833 from frame 1\r\nSampling token 834 from frame 1\r\nSampling token 835 from frame 1\r\nSampling token 836 from frame 1\r\nSampling token 837 from frame 1\r\nSampling token 838 from frame 1\r\nSampling token 839 from frame 1\r\nSampling token 840 from frame 1\r\nSampling token 841 from frame 1\r\nSampling token 842 from frame 1\r\nSampling token 843 from frame 1\r\nSampling token 844 from frame 1\r\nSampling token 845 from frame 1\r\nSampling token 846 from frame 1\r\nSampling token 847 from frame 1\r\nSampling token 848 from frame 1\r\nSampling token 849 from frame 1\r\nSampling token 850 from frame 1\r\nSampling token 851 from frame 1\r\nSampling token 852 from frame 1\r\nSampling token 853 from frame 1\r\nSampling token 854 from frame 1\r\nSampling token 855 from frame 1\r\nSampling token 856 from frame 1\r\nSampling token 857 from frame 1\r\nSampling token 858 from frame 1\r\nSampling token 859 from frame 1\r\nSampling token 860 from frame 1\r\nSampling token 861 from frame 1\r\nSampling token 862 from frame 1\r\nSampling token 863 from frame 1\r\nSampling token 864 from frame 1\r\nSampling token 865 from frame 1\r\nSampling token 866 from frame 1\r\nSampling token 867 from frame 1\r\nSampling token 868 from frame 1\r\nSampling token 869 from frame 1\r\nSampling token 870 from frame 1\r\nSampling token 871 from frame 1\r\nSampling token 872 from frame 1\r\nSampling token 873 from frame 1\r\nSampling token 874 from frame 1\r\nSampling token 875 from frame 1\r\nSampling token 876 from frame 1\r\nSampling token 877 from frame 1\r\nSampling token 878 from frame 1\r\nSampling token 879 from frame 1\r\nSampling token 880 from frame 1\r\nSampling token 881 from frame 1\r\nSampling token 882 from frame 1\r\nSampling token 883 from frame 1\r\nSampling token 884 from frame 1\r\nSampling token 885 from frame 1\r\nSampling token 886 from frame 1\r\nSampling token 887 from frame 1\r\nSampling token 888 from frame 1\r\nSampling token 889 from frame 1\r\nSampling token 890 from frame 1\r\nSampling token 891 from frame 1\r\nSampling token 892 from frame 1\r\nSampling token 893 from frame 1\r\nSampling token 894 from frame 1\r\nSampling token 895 from frame 1\r\nSampling token 896 from frame 1\r\nSampling token 897 from frame 1\r\nSampling token 898 from frame 1\r\nSampling token 899 from frame 1\r\nSampling token 900 from frame 1\r\nSampling token 901 from frame 1\r\nSampling token 902 from frame 1\r\nSampling token 903 from frame 1\r\nSampling token 904 from frame 1\r\nSampling token 905 from frame 1\r\nSampling token 906 from frame 1\r\nSampling token 907 from frame 1\r\nSampling token 908 from frame 1\r\nSampling token 909 from frame 1\r\nSampling token 910 from frame 1\r\nSampling token 911 from frame 1\r\nSampling token 912 from frame 1\r\nSampling token 913 from frame 1\r\nSampling token 914 from frame 1\r\nSampling token 915 from frame 1\r\nSampling token 916 from frame 1\r\nSampling token 917 from frame 1\r\nSampling token 918 from frame 1\r\nSampling token 919 from frame 1\r\nSampling token 0 from frame 2\r\nSampling token 1 from frame 2\r\nSampling token 2 from frame 2\r\nSampling token 3 from frame 2\r\nSampling token 4 from frame 2\r\nSampling token 5 from frame 2\r\nSampling token 6 from frame 2\r\nSampling token 7 from frame 2\r\nSampling token 8 from frame 2\r\nSampling token 9 from frame 2\r\nSampling token 10 from frame 2\r\nSampling token 11 from frame 2\r\nSampling token 12 from frame 2\r\nSampling token 13 from frame 2\r\nSampling token 14 from frame 2\r\nSampling token 15 from frame 2\r\nSampling token 16 from frame 2\r\nSampling token 17 from frame 2\r\nSampling token 18 from frame 2\r\nSampling token 19 from frame 2\r\nSampling token 20 from frame 2\r\nSampling token 21 from frame 2\r\nSampling token 22 from frame 2\r\nSampling token 23 from frame 2\r\nSampling token 24 from frame 2\r\nSampling token 25 from frame 2\r\nSampling token 26 from frame 2\r\nSampling token 27 from frame 2\r\nSampling token 28 from frame 2\r\nSampling token 29 from frame 2\r\nSampling token 30 from frame 2\r\nSampling token 31 from frame 2\r\nSampling token 32 from frame 2\r\nSampling token 33 from frame 2\r\nSampling token 34 from frame 2\r\nSampling token 35 from frame 2\r\nSampling token 36 from frame 2\r\nSampling token 37 from frame 2\r\nSampling token 38 from frame 2\r\nSampling token 39 from frame 2\r\nSampling token 40 from frame 2\r\nSampling token 41 from frame 2\r\nSampling token 42 from frame 2\r\nSampling token 43 from frame 2\r\nSampling token 44 from frame 2\r\nSampling token 45 from frame 2\r\nSampling token 46 from frame 2\r\nSampling token 47 from frame 2\r\nSampling token 48 from frame 2\r\nSampling token 49 from frame 2\r\nSampling token 50 from frame 2\r\nSampling token 51 from frame 2\r\nSampling token 52 from frame 2\r\nSampling token 53 from frame 2\r\nSampling token 54 from frame 2\r\nSampling token 55 from frame 2\r\nSampling token 56 from frame 2\r\nSampling token 57 from frame 2\r\nSampling token 58 from frame 2\r\nSampling token 59 from frame 2\r\nSampling token 60 from frame 2\r\nSampling token 61 from frame 2\r\nSampling token 62 from frame 2\r\nSampling token 63 from frame 2\r\nSampling token 64 from frame 2\r\nSampling token 65 from frame 2\r\nSampling token 66 from frame 2\r\nSampling token 67 from frame 2\r\nSampling token 68 from frame 2\r\nSampling token 69 from frame 2\r\nSampling token 70 from frame 2\r\nSampling token 71 from frame 2\r\nSampling token 72 from frame 2\r\nSampling token 73 from frame 2\r\nSampling token 74 from frame 2\r\nSampling token 75 from frame 2\r\nSampling token 76 from frame 2\r\nSampling token 77 from frame 2\r\nSampling token 78 from frame 2\r\nSampling token 79 from frame 2\r\nSampling token 80 from frame 2\r\nSampling token 81 from frame 2\r\nSampling token 82 from frame 2\r\nSampling token 83 from frame 2\r\nSampling token 84 from frame 2\r\nSampling token 85 from frame 2\r\nSampling token 86 from frame 2\r\nSampling token 87 from frame 2\r\nSampling token 88 from frame 2\r\nSampling token 89 from frame 2\r\nSampling token 90 from frame 2\r\nSampling token 91 from frame 2\r\nSampling token 92 from frame 2\r\nSampling token 93 from frame 2\r\nSampling token 94 from frame 2\r\nSampling token 95 from frame 2\r\nSampling token 96 from frame 2\r\nSampling token 97 from frame 2\r\nSampling token 98 from frame 2\r\nSampling token 99 from frame 2\r\nSampling token 100 from frame 2\r\nSampling token 101 from frame 2\r\nSampling token 102 from frame 2\r\nSampling token 103 from frame 2\r\nSampling token 104 from frame 2\r\nSampling token 105 from frame 2\r\nSampling token 106 from frame 2\r\nSampling token 107 from frame 2\r\nSampling token 108 from frame 2\r\n",,terminal_output +3722,9801892,"TERMINAL",0,0,"srun",,terminal_focus +3723,9821855,"TERMINAL",0,0,"Sampling token 109 from frame 2\r\nSampling token 110 from frame 2\r\nSampling token 111 from frame 2\r\nSampling token 112 from frame 2\r\nSampling token 113 from frame 2\r\nSampling token 114 from frame 2\r\nSampling token 115 from frame 2\r\nSampling token 116 from frame 2\r\nSampling token 117 from frame 2\r\nSampling token 118 from frame 2\r\nSampling token 119 from frame 2\r\nSampling token 120 from frame 2\r\nSampling token 121 from frame 2\r\nSampling token 122 from frame 2\r\nSampling token 123 from frame 2\r\nSampling token 124 from frame 2\r\nSampling token 125 from frame 2\r\nSampling token 126 from frame 2\r\nSampling token 127 from frame 2\r\nSampling token 128 from frame 2\r\nSampling token 129 from frame 2\r\nSampling token 130 from frame 2\r\nSampling token 131 from frame 2\r\nSampling token 132 from frame 2\r\nSampling token 133 from frame 2\r\nSampling token 134 from frame 2\r\nSampling token 135 from frame 2\r\nSampling token 136 from frame 2\r\nSampling token 137 from frame 2\r\nSampling token 138 from frame 2\r\nSampling token 139 from frame 2\r\nSampling token 140 from frame 2\r\nSampling token 141 from frame 2\r\nSampling token 142 from frame 2\r\nSampling token 143 from frame 2\r\nSampling token 144 from frame 2\r\nSampling token 145 from frame 2\r\nSampling token 146 from frame 2\r\nSampling token 147 from frame 2\r\nSampling token 148 from frame 2\r\nSampling token 149 from frame 2\r\nSampling token 150 from frame 2\r\nSampling token 151 from frame 2\r\nSampling token 152 from frame 2\r\nSampling token 153 from frame 2\r\nSampling token 154 from frame 2\r\nSampling token 155 from frame 2\r\nSampling token 156 from frame 2\r\nSampling token 157 from frame 2\r\nSampling token 158 from frame 2\r\nSampling token 159 from frame 2\r\nSampling token 160 from frame 2\r\nSampling token 161 from frame 2\r\nSampling token 162 from frame 2\r\nSampling token 163 from frame 2\r\nSampling token 164 from frame 2\r\nSampling token 165 from frame 2\r\nSampling token 166 from frame 2\r\nSampling token 167 from frame 2\r\nSampling token 168 from frame 2\r\nSampling token 169 from frame 2\r\nSampling token 170 from frame 2\r\nSampling token 171 from frame 2\r\nSampling token 172 from frame 2\r\nSampling token 173 from frame 2\r\nSampling token 174 from frame 2\r\nSampling token 175 from frame 2\r\nSampling token 176 from frame 2\r\nSampling token 177 from frame 2\r\nSampling token 178 from frame 2\r\nSampling token 179 from frame 2\r\nSampling token 180 from frame 2\r\nSampling token 181 from frame 2\r\nSampling token 182 from frame 2\r\nSampling token 183 from frame 2\r\nSampling token 184 from frame 2\r\nSampling token 185 from frame 2\r\nSampling token 186 from frame 2\r\nSampling token 187 from frame 2\r\nSampling token 188 from frame 2\r\nSampling token 189 from frame 2\r\nSampling token 190 from frame 2\r\nSampling token 191 from frame 2\r\nSampling token 192 from frame 2\r\nSampling token 193 from frame 2\r\nSampling token 194 from frame 2\r\nSampling token 195 from frame 2\r\nSampling token 196 from frame 2\r\nSampling token 197 from frame 2\r\nSampling token 198 from frame 2\r\nSampling token 199 from frame 2\r\nSampling token 200 from frame 2\r\nSampling token 201 from frame 2\r\nSampling token 202 from frame 2\r\nSampling token 203 from frame 2\r\nSampling token 204 from frame 2\r\nSampling token 205 from frame 2\r\nSampling token 206 from frame 2\r\nSampling token 207 from frame 2\r\nSampling token 208 from frame 2\r\nSampling token 209 from frame 2\r\nSampling token 210 from frame 2\r\nSampling token 211 from frame 2\r\nSampling token 212 from frame 2\r\nSampling token 213 from frame 2\r\nSampling token 214 from frame 2\r\nSampling token 215 from frame 2\r\nSampling token 216 from frame 2\r\nSampling token 217 from frame 2\r\nSampling token 218 from frame 2\r\nSampling token 219 from frame 2\r\nSampling token 220 from frame 2\r\nSampling token 221 from frame 2\r\nSampling token 222 from frame 2\r\nSampling token 223 from frame 2\r\nSampling token 224 from frame 2\r\nSampling token 225 from frame 2\r\nSampling token 226 from frame 2\r\nSampling token 227 from frame 2\r\nSampling token 228 from frame 2\r\nSampling token 229 from frame 2\r\nSampling token 230 from frame 2\r\nSampling token 231 from frame 2\r\nSampling token 232 from frame 2\r\nSampling token 233 from frame 2\r\nSampling token 234 from frame 2\r\nSampling token 235 from frame 2\r\nSampling token 236 from frame 2\r\nSampling token 237 from frame 2\r\nSampling token 238 from frame 2\r\nSampling token 239 from frame 2\r\nSampling token 240 from frame 2\r\nSampling token 241 from frame 2\r\nSampling token 242 from frame 2\r\nSampling token 243 from frame 2\r\nSampling token 244 from frame 2\r\nSampling token 245 from frame 2\r\nSampling token 246 from frame 2\r\nSampling token 247 from frame 2\r\nSampling token 248 from frame 2\r\nSampling token 249 from frame 2\r\nSampling token 250 from frame 2\r\nSampling token 251 from frame 2\r\nSampling token 252 from frame 2\r\nSampling token 253 from frame 2\r\nSampling token 254 from frame 2\r\nSampling token 255 from frame 2\r\nSampling token 256 from frame 2\r\nSampling token 257 from frame 2\r\nSampling token 258 from frame 2\r\nSampling token 259 from frame 2\r\nSampling token 260 from frame 2\r\nSampling token 261 from frame 2\r\nSampling token 262 from frame 2\r\nSampling token 263 from frame 2\r\nSampling token 264 from frame 2\r\nSampling token 265 from frame 2\r\nSampling token 266 from frame 2\r\nSampling token 267 from frame 2\r\nSampling token 268 from frame 2\r\nSampling token 269 from frame 2\r\nSampling token 270 from frame 2\r\nSampling token 271 from frame 2\r\nSampling token 272 from frame 2\r\nSampling token 273 from frame 2\r\nSampling token 274 from frame 2\r\nSampling token 275 from frame 2\r\nSampling token 276 from frame 2\r\nSampling token 277 from frame 2\r\nSampling token 278 from frame 2\r\nSampling token 279 from frame 2\r\nSampling token 280 from frame 2\r\nSampling token 281 from frame 2\r\nSampling token 282 from frame 2\r\nSampling token 283 from frame 2\r\nSampling token 284 from frame 2\r\nSampling token 285 from frame 2\r\nSampling token 286 from frame 2\r\nSampling token 287 from frame 2\r\nSampling token 288 from frame 2\r\nSampling token 289 from frame 2\r\nSampling token 290 from frame 2\r\nSampling token 291 from frame 2\r\nSampling token 292 from frame 2\r\nSampling token 293 from frame 2\r\nSampling token 294 from frame 2\r\nSampling token 295 from frame 2\r\nSampling token 296 from frame 2\r\nSampling token 297 from frame 2\r\nSampling token 298 from frame 2\r\nSampling token 299 from frame 2\r\nSampling token 300 from frame 2\r\nSampling token 301 from frame 2\r\nSampling token 302 from frame 2\r\nSampling token 303 from frame 2\r\nSampling token 304 from frame 2\r\nSampling token 305 from frame 2\r\nSampling token 306 from frame 2\r\nSampling token 307 from frame 2\r\nSampling token 308 from frame 2\r\nSampling token 309 from frame 2\r\nSampling token 310 from frame 2\r\nSampling token 311 from frame 2\r\nSampling token 312 from frame 2\r\nSampling token 313 from frame 2\r\nSampling token 314 from frame 2\r\nSampling token 315 from frame 2\r\nSampling token 316 from frame 2\r\nSampling token 317 from frame 2\r\nSampling token 318 from frame 2\r\nSampling token 319 from frame 2\r\nSampling token 320 from frame 2\r\nSampling token 321 from frame 2\r\nSampling token 322 from frame 2\r\nSampling token 323 from frame 2\r\nSampling token 324 from frame 2\r\nSampling token 325 from frame 2\r\nSampling token 326 from frame 2\r\nSampling token 327 from frame 2\r\nSampling token 328 from frame 2\r\nSampling token 329 from frame 2\r\nSampling token 330 from frame 2\r\nSampling token 331 from frame 2\r\nSampling token 332 from frame 2\r\nSampling token 333 from frame 2\r\nSampling token 334 from frame 2\r\nSampling token 335 from frame 2\r\nSampling token 336 from frame 2\r\nSampling token 337 from frame 2\r\nSampling token 338 from frame 2\r\nSampling token 339 from frame 2\r\nSampling token 340 from frame 2\r\nSampling token 341 from frame 2\r\nSampling token 342 from frame 2\r\nSampling token 343 from frame 2\r\nSampling token 344 from frame 2\r\nSampling token 345 from frame 2\r\nSampling token 346 from frame 2\r\nSampling token 347 from frame 2\r\nSampling token 348 from frame 2\r\nSampling token 349 from frame 2\r\nSampling token 350 from frame 2\r\nSampling token 351 from frame 2\r\nSampling token 352 from frame 2\r\nSampling token 353 from frame 2\r\nSampling token 354 from frame 2\r\nSampling token 355 from frame 2\r\nSampling token 356 from frame 2\r\nSampling token 357 from frame 2\r\nSampling token 358 from frame 2\r\nSampling token 359 from frame 2\r\nSampling token 360 from frame 2\r\nSampling token 361 from frame 2\r\nSampling token 362 from frame 2\r\nSampling token 363 from frame 2\r\nSampling token 364 from frame 2\r\n",,terminal_output +3724,9855282,"TERMINAL",0,0,"Sampling token 365 from frame 2\r\nSampling token 366 from frame 2\r\nSampling token 367 from frame 2\r\nSampling token 368 from frame 2\r\nSampling token 369 from frame 2\r\nSampling token 370 from frame 2\r\nSampling token 371 from frame 2\r\nSampling token 372 from frame 2\r\nSampling token 373 from frame 2\r\nSampling token 374 from frame 2\r\nSampling token 375 from frame 2\r\nSampling token 376 from frame 2\r\nSampling token 377 from frame 2\r\nSampling token 378 from frame 2\r\nSampling token 379 from frame 2\r\nSampling token 380 from frame 2\r\nSampling token 381 from frame 2\r\nSampling token 382 from frame 2\r\nSampling token 383 from frame 2\r\nSampling token 384 from frame 2\r\nSampling token 385 from frame 2\r\nSampling token 386 from frame 2\r\nSampling token 387 from frame 2\r\nSampling token 388 from frame 2\r\nSampling token 389 from frame 2\r\nSampling token 390 from frame 2\r\nSampling token 391 from frame 2\r\nSampling token 392 from frame 2\r\nSampling token 393 from frame 2\r\nSampling token 394 from frame 2\r\nSampling token 395 from frame 2\r\nSampling token 396 from frame 2\r\nSampling token 397 from frame 2\r\nSampling token 398 from frame 2\r\nSampling token 399 from frame 2\r\nSampling token 400 from frame 2\r\nSampling token 401 from frame 2\r\nSampling token 402 from frame 2\r\nSampling token 403 from frame 2\r\nSampling token 404 from frame 2\r\nSampling token 405 from frame 2\r\nSampling token 406 from frame 2\r\nSampling token 407 from frame 2\r\nSampling token 408 from frame 2\r\nSampling token 409 from frame 2\r\nSampling token 410 from frame 2\r\nSampling token 411 from frame 2\r\nSampling token 412 from frame 2\r\nSampling token 413 from frame 2\r\nSampling token 414 from frame 2\r\nSampling token 415 from frame 2\r\nSampling token 416 from frame 2\r\nSampling token 417 from frame 2\r\nSampling token 418 from frame 2\r\nSampling token 419 from frame 2\r\nSampling token 420 from frame 2\r\nSampling token 421 from frame 2\r\nSampling token 422 from frame 2\r\nSampling token 423 from frame 2\r\nSampling token 424 from frame 2\r\nSampling token 425 from frame 2\r\nSampling token 426 from frame 2\r\nSampling token 427 from frame 2\r\nSampling token 428 from frame 2\r\nSampling token 429 from frame 2\r\nSampling token 430 from frame 2\r\nSampling token 431 from frame 2\r\nSampling token 432 from frame 2\r\nSampling token 433 from frame 2\r\nSampling token 434 from frame 2\r\nSampling token 435 from frame 2\r\nSampling token 436 from frame 2\r\nSampling token 437 from frame 2\r\nSampling token 438 from frame 2\r\nSampling token 439 from frame 2\r\nSampling token 440 from frame 2\r\nSampling token 441 from frame 2\r\nSampling token 442 from frame 2\r\nSampling token 443 from frame 2\r\nSampling token 444 from frame 2\r\nSampling token 445 from frame 2\r\nSampling token 446 from frame 2\r\nSampling token 447 from frame 2\r\nSampling token 448 from frame 2\r\nSampling token 449 from frame 2\r\nSampling token 450 from frame 2\r\nSampling token 451 from frame 2\r\nSampling token 452 from frame 2\r\nSampling token 453 from frame 2\r\nSampling token 454 from frame 2\r\nSampling token 455 from frame 2\r\nSampling token 456 from frame 2\r\nSampling token 457 from frame 2\r\nSampling token 458 from frame 2\r\nSampling token 459 from frame 2\r\nSampling token 460 from frame 2\r\nSampling token 461 from frame 2\r\nSampling token 462 from frame 2\r\nSampling token 463 from frame 2\r\nSampling token 464 from frame 2\r\nSampling token 465 from frame 2\r\nSampling token 466 from frame 2\r\nSampling token 467 from frame 2\r\nSampling token 468 from frame 2\r\nSampling token 469 from frame 2\r\nSampling token 470 from frame 2\r\nSampling token 471 from frame 2\r\nSampling token 472 from frame 2\r\nSampling token 473 from frame 2\r\nSampling token 474 from frame 2\r\nSampling token 475 from frame 2\r\nSampling token 476 from frame 2\r\nSampling token 477 from frame 2\r\nSampling token 478 from frame 2\r\nSampling token 479 from frame 2\r\nSampling token 480 from frame 2\r\nSampling token 481 from frame 2\r\nSampling token 482 from frame 2\r\nSampling token 483 from frame 2\r\nSampling token 484 from frame 2\r\nSampling token 485 from frame 2\r\nSampling token 486 from frame 2\r\nSampling token 487 from frame 2\r\nSampling token 488 from frame 2\r\nSampling token 489 from frame 2\r\nSampling token 490 from frame 2\r\nSampling token 491 from frame 2\r\nSampling token 492 from frame 2\r\nSampling token 493 from frame 2\r\nSampling token 494 from frame 2\r\nSampling token 495 from frame 2\r\nSampling token 496 from frame 2\r\nSampling token 497 from frame 2\r\nSampling token 498 from frame 2\r\nSampling token 499 from frame 2\r\nSampling token 500 from frame 2\r\nSampling token 501 from frame 2\r\nSampling token 502 from frame 2\r\nSampling token 503 from frame 2\r\nSampling token 504 from frame 2\r\nSampling token 505 from frame 2\r\nSampling token 506 from frame 2\r\nSampling token 507 from frame 2\r\nSampling token 508 from frame 2\r\nSampling token 509 from frame 2\r\nSampling token 510 from frame 2\r\nSampling token 511 from frame 2\r\nSampling token 512 from frame 2\r\nSampling token 513 from frame 2\r\nSampling token 514 from frame 2\r\nSampling token 515 from frame 2\r\nSampling token 516 from frame 2\r\nSampling token 517 from frame 2\r\nSampling token 518 from frame 2\r\nSampling token 519 from frame 2\r\nSampling token 520 from frame 2\r\nSampling token 521 from frame 2\r\nSampling token 522 from frame 2\r\nSampling token 523 from frame 2\r\nSampling token 524 from frame 2\r\nSampling token 525 from frame 2\r\nSampling token 526 from frame 2\r\nSampling token 527 from frame 2\r\nSampling token 528 from frame 2\r\nSampling token 529 from frame 2\r\nSampling token 530 from frame 2\r\nSampling token 531 from frame 2\r\nSampling token 532 from frame 2\r\nSampling token 533 from frame 2\r\nSampling token 534 from frame 2\r\nSampling token 535 from frame 2\r\nSampling token 536 from frame 2\r\nSampling token 537 from frame 2\r\nSampling token 538 from frame 2\r\nSampling token 539 from frame 2\r\nSampling token 540 from frame 2\r\nSampling token 541 from frame 2\r\nSampling token 542 from frame 2\r\nSampling token 543 from frame 2\r\nSampling token 544 from frame 2\r\nSampling token 545 from frame 2\r\nSampling token 546 from frame 2\r\nSampling token 547 from frame 2\r\nSampling token 548 from frame 2\r\nSampling token 549 from frame 2\r\nSampling token 550 from frame 2\r\nSampling token 551 from frame 2\r\nSampling token 552 from frame 2\r\nSampling token 553 from frame 2\r\nSampling token 554 from frame 2\r\nSampling token 555 from frame 2\r\nSampling token 556 from frame 2\r\nSampling token 557 from frame 2\r\nSampling token 558 from frame 2\r\nSampling token 559 from frame 2\r\nSampling token 560 from frame 2\r\nSampling token 561 from frame 2\r\nSampling token 562 from frame 2\r\nSampling token 563 from frame 2\r\nSampling token 564 from frame 2\r\nSampling token 565 from frame 2\r\nSampling token 566 from frame 2\r\nSampling token 567 from frame 2\r\nSampling token 568 from frame 2\r\nSampling token 569 from frame 2\r\nSampling token 570 from frame 2\r\nSampling token 571 from frame 2\r\nSampling token 572 from frame 2\r\nSampling token 573 from frame 2\r\nSampling token 574 from frame 2\r\nSampling token 575 from frame 2\r\nSampling token 576 from frame 2\r\nSampling token 577 from frame 2\r\nSampling token 578 from frame 2\r\nSampling token 579 from frame 2\r\nSampling token 580 from frame 2\r\nSampling token 581 from frame 2\r\nSampling token 582 from frame 2\r\nSampling token 583 from frame 2\r\nSampling token 584 from frame 2\r\nSampling token 585 from frame 2\r\nSampling token 586 from frame 2\r\nSampling token 587 from frame 2\r\nSampling token 588 from frame 2\r\nSampling token 589 from frame 2\r\nSampling token 590 from frame 2\r\nSampling token 591 from frame 2\r\nSampling token 592 from frame 2\r\nSampling token 593 from frame 2\r\nSampling token 594 from frame 2\r\nSampling token 595 from frame 2\r\nSampling token 596 from frame 2\r\nSampling token 597 from frame 2\r\nSampling token 598 from frame 2\r\nSampling token 599 from frame 2\r\nSampling token 600 from frame 2\r\nSampling token 601 from frame 2\r\nSampling token 602 from frame 2\r\nSampling token 603 from frame 2\r\nSampling token 604 from frame 2\r\nSampling token 605 from frame 2\r\nSampling token 606 from frame 2\r\nSampling token 607 from frame 2\r\nSampling token 608 from frame 2\r\nSampling token 609 from frame 2\r\nSampling token 610 from frame 2\r\nSampling token 611 from frame 2\r\nSampling token 612 from frame 2\r\nSampling token 613 from frame 2\r\nSampling token 614 from frame 2\r\nSampling token 615 from frame 2\r\nSampling token 616 from frame 2\r\nSampling token 617 from frame 2\r\nSampling token 618 from frame 2\r\nSampling token 619 from frame 2\r\nSampling token 620 from frame 2\r\n",,terminal_output +3725,9888719,"TERMINAL",0,0,"Sampling token 621 from frame 2\r\nSampling token 622 from frame 2\r\nSampling token 623 from frame 2\r\nSampling token 624 from frame 2\r\nSampling token 625 from frame 2\r\nSampling token 626 from frame 2\r\nSampling token 627 from frame 2\r\nSampling token 628 from frame 2\r\nSampling token 629 from frame 2\r\nSampling token 630 from frame 2\r\nSampling token 631 from frame 2\r\nSampling token 632 from frame 2\r\nSampling token 633 from frame 2\r\nSampling token 634 from frame 2\r\nSampling token 635 from frame 2\r\nSampling token 636 from frame 2\r\nSampling token 637 from frame 2\r\nSampling token 638 from frame 2\r\nSampling token 639 from frame 2\r\nSampling token 640 from frame 2\r\nSampling token 641 from frame 2\r\nSampling token 642 from frame 2\r\nSampling token 643 from frame 2\r\nSampling token 644 from frame 2\r\nSampling token 645 from frame 2\r\nSampling token 646 from frame 2\r\nSampling token 647 from frame 2\r\nSampling token 648 from frame 2\r\nSampling token 649 from frame 2\r\nSampling token 650 from frame 2\r\nSampling token 651 from frame 2\r\nSampling token 652 from frame 2\r\nSampling token 653 from frame 2\r\nSampling token 654 from frame 2\r\nSampling token 655 from frame 2\r\nSampling token 656 from frame 2\r\nSampling token 657 from frame 2\r\nSampling token 658 from frame 2\r\nSampling token 659 from frame 2\r\nSampling token 660 from frame 2\r\nSampling token 661 from frame 2\r\nSampling token 662 from frame 2\r\nSampling token 663 from frame 2\r\nSampling token 664 from frame 2\r\nSampling token 665 from frame 2\r\nSampling token 666 from frame 2\r\nSampling token 667 from frame 2\r\nSampling token 668 from frame 2\r\nSampling token 669 from frame 2\r\nSampling token 670 from frame 2\r\nSampling token 671 from frame 2\r\nSampling token 672 from frame 2\r\nSampling token 673 from frame 2\r\nSampling token 674 from frame 2\r\nSampling token 675 from frame 2\r\nSampling token 676 from frame 2\r\nSampling token 677 from frame 2\r\nSampling token 678 from frame 2\r\nSampling token 679 from frame 2\r\nSampling token 680 from frame 2\r\nSampling token 681 from frame 2\r\nSampling token 682 from frame 2\r\nSampling token 683 from frame 2\r\nSampling token 684 from frame 2\r\nSampling token 685 from frame 2\r\nSampling token 686 from frame 2\r\nSampling token 687 from frame 2\r\nSampling token 688 from frame 2\r\nSampling token 689 from frame 2\r\nSampling token 690 from frame 2\r\nSampling token 691 from frame 2\r\nSampling token 692 from frame 2\r\nSampling token 693 from frame 2\r\nSampling token 694 from frame 2\r\nSampling token 695 from frame 2\r\nSampling token 696 from frame 2\r\nSampling token 697 from frame 2\r\nSampling token 698 from frame 2\r\nSampling token 699 from frame 2\r\nSampling token 700 from frame 2\r\nSampling token 701 from frame 2\r\nSampling token 702 from frame 2\r\nSampling token 703 from frame 2\r\nSampling token 704 from frame 2\r\nSampling token 705 from frame 2\r\nSampling token 706 from frame 2\r\nSampling token 707 from frame 2\r\nSampling token 708 from frame 2\r\nSampling token 709 from frame 2\r\nSampling token 710 from frame 2\r\nSampling token 711 from frame 2\r\nSampling token 712 from frame 2\r\nSampling token 713 from frame 2\r\nSampling token 714 from frame 2\r\nSampling token 715 from frame 2\r\nSampling token 716 from frame 2\r\nSampling token 717 from frame 2\r\nSampling token 718 from frame 2\r\nSampling token 719 from frame 2\r\nSampling token 720 from frame 2\r\nSampling token 721 from frame 2\r\nSampling token 722 from frame 2\r\nSampling token 723 from frame 2\r\nSampling token 724 from frame 2\r\nSampling token 725 from frame 2\r\nSampling token 726 from frame 2\r\nSampling token 727 from frame 2\r\nSampling token 728 from frame 2\r\nSampling token 729 from frame 2\r\nSampling token 730 from frame 2\r\nSampling token 731 from frame 2\r\nSampling token 732 from frame 2\r\nSampling token 733 from frame 2\r\nSampling token 734 from frame 2\r\nSampling token 735 from frame 2\r\nSampling token 736 from frame 2\r\nSampling token 737 from frame 2\r\nSampling token 738 from frame 2\r\nSampling token 739 from frame 2\r\nSampling token 740 from frame 2\r\nSampling token 741 from frame 2\r\nSampling token 742 from frame 2\r\nSampling token 743 from frame 2\r\nSampling token 744 from frame 2\r\nSampling token 745 from frame 2\r\nSampling token 746 from frame 2\r\nSampling token 747 from frame 2\r\nSampling token 748 from frame 2\r\nSampling token 749 from frame 2\r\nSampling token 750 from frame 2\r\nSampling token 751 from frame 2\r\nSampling token 752 from frame 2\r\nSampling token 753 from frame 2\r\nSampling token 754 from frame 2\r\nSampling token 755 from frame 2\r\nSampling token 756 from frame 2\r\nSampling token 757 from frame 2\r\nSampling token 758 from frame 2\r\nSampling token 759 from frame 2\r\nSampling token 760 from frame 2\r\nSampling token 761 from frame 2\r\nSampling token 762 from frame 2\r\nSampling token 763 from frame 2\r\nSampling token 764 from frame 2\r\nSampling token 765 from frame 2\r\nSampling token 766 from frame 2\r\nSampling token 767 from frame 2\r\nSampling token 768 from frame 2\r\nSampling token 769 from frame 2\r\nSampling token 770 from frame 2\r\nSampling token 771 from frame 2\r\nSampling token 772 from frame 2\r\nSampling token 773 from frame 2\r\nSampling token 774 from frame 2\r\nSampling token 775 from frame 2\r\nSampling token 776 from frame 2\r\nSampling token 777 from frame 2\r\nSampling token 778 from frame 2\r\nSampling token 779 from frame 2\r\nSampling token 780 from frame 2\r\nSampling token 781 from frame 2\r\nSampling token 782 from frame 2\r\nSampling token 783 from frame 2\r\nSampling token 784 from frame 2\r\nSampling token 785 from frame 2\r\nSampling token 786 from frame 2\r\nSampling token 787 from frame 2\r\nSampling token 788 from frame 2\r\nSampling token 789 from frame 2\r\nSampling token 790 from frame 2\r\nSampling token 791 from frame 2\r\nSampling token 792 from frame 2\r\nSampling token 793 from frame 2\r\nSampling token 794 from frame 2\r\nSampling token 795 from frame 2\r\nSampling token 796 from frame 2\r\nSampling token 797 from frame 2\r\nSampling token 798 from frame 2\r\nSampling token 799 from frame 2\r\nSampling token 800 from frame 2\r\nSampling token 801 from frame 2\r\nSampling token 802 from frame 2\r\nSampling token 803 from frame 2\r\nSampling token 804 from frame 2\r\nSampling token 805 from frame 2\r\nSampling token 806 from frame 2\r\nSampling token 807 from frame 2\r\nSampling token 808 from frame 2\r\nSampling token 809 from frame 2\r\nSampling token 810 from frame 2\r\nSampling token 811 from frame 2\r\nSampling token 812 from frame 2\r\nSampling token 813 from frame 2\r\nSampling token 814 from frame 2\r\nSampling token 815 from frame 2\r\nSampling token 816 from frame 2\r\nSampling token 817 from frame 2\r\nSampling token 818 from frame 2\r\nSampling token 819 from frame 2\r\nSampling token 820 from frame 2\r\nSampling token 821 from frame 2\r\nSampling token 822 from frame 2\r\nSampling token 823 from frame 2\r\nSampling token 824 from frame 2\r\nSampling token 825 from frame 2\r\nSampling token 826 from frame 2\r\nSampling token 827 from frame 2\r\nSampling token 828 from frame 2\r\nSampling token 829 from frame 2\r\nSampling token 830 from frame 2\r\nSampling token 831 from frame 2\r\nSampling token 832 from frame 2\r\nSampling token 833 from frame 2\r\nSampling token 834 from frame 2\r\nSampling token 835 from frame 2\r\nSampling token 836 from frame 2\r\nSampling token 837 from frame 2\r\nSampling token 838 from frame 2\r\nSampling token 839 from frame 2\r\nSampling token 840 from frame 2\r\nSampling token 841 from frame 2\r\nSampling token 842 from frame 2\r\nSampling token 843 from frame 2\r\nSampling token 844 from frame 2\r\nSampling token 845 from frame 2\r\nSampling token 846 from frame 2\r\nSampling token 847 from frame 2\r\nSampling token 848 from frame 2\r\nSampling token 849 from frame 2\r\nSampling token 850 from frame 2\r\nSampling token 851 from frame 2\r\nSampling token 852 from frame 2\r\nSampling token 853 from frame 2\r\nSampling token 854 from frame 2\r\nSampling token 855 from frame 2\r\nSampling token 856 from frame 2\r\nSampling token 857 from frame 2\r\nSampling token 858 from frame 2\r\nSampling token 859 from frame 2\r\nSampling token 860 from frame 2\r\nSampling token 861 from frame 2\r\nSampling token 862 from frame 2\r\nSampling token 863 from frame 2\r\nSampling token 864 from frame 2\r\nSampling token 865 from frame 2\r\nSampling token 866 from frame 2\r\nSampling token 867 from frame 2\r\nSampling token 868 from frame 2\r\nSampling token 869 from frame 2\r\nSampling token 870 from frame 2\r\nSampling token 871 from frame 2\r\nSampling token 872 from frame 2\r\nSampling token 873 from frame 2\r\nSampling token 874 from frame 2\r\nSampling token 875 from frame 2\r\nSampling token 876 from frame 2\r\n",,terminal_output +3726,9922555,"TERMINAL",0,0,"Sampling token 877 from frame 2\r\nSampling token 878 from frame 2\r\nSampling token 879 from frame 2\r\nSampling token 880 from frame 2\r\nSampling token 881 from frame 2\r\nSampling token 882 from frame 2\r\nSampling token 883 from frame 2\r\nSampling token 884 from frame 2\r\nSampling token 885 from frame 2\r\nSampling token 886 from frame 2\r\nSampling token 887 from frame 2\r\nSampling token 888 from frame 2\r\nSampling token 889 from frame 2\r\nSampling token 890 from frame 2\r\nSampling token 891 from frame 2\r\nSampling token 892 from frame 2\r\nSampling token 893 from frame 2\r\nSampling token 894 from frame 2\r\nSampling token 895 from frame 2\r\nSampling token 896 from frame 2\r\nSampling token 897 from frame 2\r\nSampling token 898 from frame 2\r\nSampling token 899 from frame 2\r\nSampling token 900 from frame 2\r\nSampling token 901 from frame 2\r\nSampling token 902 from frame 2\r\nSampling token 903 from frame 2\r\nSampling token 904 from frame 2\r\nSampling token 905 from frame 2\r\nSampling token 906 from frame 2\r\nSampling token 907 from frame 2\r\nSampling token 908 from frame 2\r\nSampling token 909 from frame 2\r\nSampling token 910 from frame 2\r\nSampling token 911 from frame 2\r\nSampling token 912 from frame 2\r\nSampling token 913 from frame 2\r\nSampling token 914 from frame 2\r\nSampling token 915 from frame 2\r\nSampling token 916 from frame 2\r\nSampling token 917 from frame 2\r\nSampling token 918 from frame 2\r\nSampling token 919 from frame 2\r\nSampling token 0 from frame 3\r\nSampling token 1 from frame 3\r\nSampling token 2 from frame 3\r\nSampling token 3 from frame 3\r\nSampling token 4 from frame 3\r\nSampling token 5 from frame 3\r\nSampling token 6 from frame 3\r\nSampling token 7 from frame 3\r\nSampling token 8 from frame 3\r\nSampling token 9 from frame 3\r\nSampling token 10 from frame 3\r\nSampling token 11 from frame 3\r\nSampling token 12 from frame 3\r\nSampling token 13 from frame 3\r\nSampling token 14 from frame 3\r\nSampling token 15 from frame 3\r\nSampling token 16 from frame 3\r\nSampling token 17 from frame 3\r\nSampling token 18 from frame 3\r\nSampling token 19 from frame 3\r\nSampling token 20 from frame 3\r\nSampling token 21 from frame 3\r\nSampling token 22 from frame 3\r\nSampling token 23 from frame 3\r\nSampling token 24 from frame 3\r\nSampling token 25 from frame 3\r\nSampling token 26 from frame 3\r\nSampling token 27 from frame 3\r\nSampling token 28 from frame 3\r\nSampling token 29 from frame 3\r\nSampling token 30 from frame 3\r\nSampling token 31 from frame 3\r\nSampling token 32 from frame 3\r\nSampling token 33 from frame 3\r\nSampling token 34 from frame 3\r\nSampling token 35 from frame 3\r\nSampling token 36 from frame 3\r\nSampling token 37 from frame 3\r\nSampling token 38 from frame 3\r\nSampling token 39 from frame 3\r\nSampling token 40 from frame 3\r\nSampling token 41 from frame 3\r\nSampling token 42 from frame 3\r\nSampling token 43 from frame 3\r\nSampling token 44 from frame 3\r\nSampling token 45 from frame 3\r\nSampling token 46 from frame 3\r\nSampling token 47 from frame 3\r\nSampling token 48 from frame 3\r\nSampling token 49 from frame 3\r\nSampling token 50 from frame 3\r\nSampling token 51 from frame 3\r\nSampling token 52 from frame 3\r\nSampling token 53 from frame 3\r\nSampling token 54 from frame 3\r\nSampling token 55 from frame 3\r\nSampling token 56 from frame 3\r\nSampling token 57 from frame 3\r\nSampling token 58 from frame 3\r\nSampling token 59 from frame 3\r\nSampling token 60 from frame 3\r\nSampling token 61 from frame 3\r\nSampling token 62 from frame 3\r\nSampling token 63 from frame 3\r\nSampling token 64 from frame 3\r\nSampling token 65 from frame 3\r\nSampling token 66 from frame 3\r\nSampling token 67 from frame 3\r\nSampling token 68 from frame 3\r\nSampling token 69 from frame 3\r\nSampling token 70 from frame 3\r\nSampling token 71 from frame 3\r\nSampling token 72 from frame 3\r\nSampling token 73 from frame 3\r\nSampling token 74 from frame 3\r\nSampling token 75 from frame 3\r\nSampling token 76 from frame 3\r\nSampling token 77 from frame 3\r\nSampling token 78 from frame 3\r\nSampling token 79 from frame 3\r\nSampling token 80 from frame 3\r\nSampling token 81 from frame 3\r\nSampling token 82 from frame 3\r\nSampling token 83 from frame 3\r\nSampling token 84 from frame 3\r\nSampling token 85 from frame 3\r\nSampling token 86 from frame 3\r\nSampling token 87 from frame 3\r\nSampling token 88 from frame 3\r\nSampling token 89 from frame 3\r\nSampling token 90 from frame 3\r\nSampling token 91 from frame 3\r\nSampling token 92 from frame 3\r\nSampling token 93 from frame 3\r\nSampling token 94 from frame 3\r\nSampling token 95 from frame 3\r\nSampling token 96 from frame 3\r\nSampling token 97 from frame 3\r\nSampling token 98 from frame 3\r\nSampling token 99 from frame 3\r\nSampling token 100 from frame 3\r\nSampling token 101 from frame 3\r\nSampling token 102 from frame 3\r\nSampling token 103 from frame 3\r\nSampling token 104 from frame 3\r\nSampling token 105 from frame 3\r\nSampling token 106 from frame 3\r\nSampling token 107 from frame 3\r\nSampling token 108 from frame 3\r\nSampling token 109 from frame 3\r\nSampling token 110 from frame 3\r\nSampling token 111 from frame 3\r\nSampling token 112 from frame 3\r\nSampling token 113 from frame 3\r\nSampling token 114 from frame 3\r\nSampling token 115 from frame 3\r\nSampling token 116 from frame 3\r\nSampling token 117 from frame 3\r\nSampling token 118 from frame 3\r\nSampling token 119 from frame 3\r\nSampling token 120 from frame 3\r\nSampling token 121 from frame 3\r\nSampling token 122 from frame 3\r\nSampling token 123 from frame 3\r\nSampling token 124 from frame 3\r\nSampling token 125 from frame 3\r\nSampling token 126 from frame 3\r\nSampling token 127 from frame 3\r\nSampling token 128 from frame 3\r\nSampling token 129 from frame 3\r\nSampling token 130 from frame 3\r\nSampling token 131 from frame 3\r\nSampling token 132 from frame 3\r\nSampling token 133 from frame 3\r\nSampling token 134 from frame 3\r\nSampling token 135 from frame 3\r\nSampling token 136 from frame 3\r\nSampling token 137 from frame 3\r\nSampling token 138 from frame 3\r\nSampling token 139 from frame 3\r\nSampling token 140 from frame 3\r\nSampling token 141 from frame 3\r\nSampling token 142 from frame 3\r\nSampling token 143 from frame 3\r\nSampling token 144 from frame 3\r\nSampling token 145 from frame 3\r\nSampling token 146 from frame 3\r\nSampling token 147 from frame 3\r\nSampling token 148 from frame 3\r\nSampling token 149 from frame 3\r\nSampling token 150 from frame 3\r\nSampling token 151 from frame 3\r\nSampling token 152 from frame 3\r\nSampling token 153 from frame 3\r\nSampling token 154 from frame 3\r\nSampling token 155 from frame 3\r\nSampling token 156 from frame 3\r\nSampling token 157 from frame 3\r\nSampling token 158 from frame 3\r\nSampling token 159 from frame 3\r\nSampling token 160 from frame 3\r\nSampling token 161 from frame 3\r\nSampling token 162 from frame 3\r\nSampling token 163 from frame 3\r\nSampling token 164 from frame 3\r\nSampling token 165 from frame 3\r\nSampling token 166 from frame 3\r\nSampling token 167 from frame 3\r\nSampling token 168 from frame 3\r\nSampling token 169 from frame 3\r\nSampling token 170 from frame 3\r\nSampling token 171 from frame 3\r\nSampling token 172 from frame 3\r\nSampling token 173 from frame 3\r\nSampling token 174 from frame 3\r\nSampling token 175 from frame 3\r\nSampling token 176 from frame 3\r\nSampling token 177 from frame 3\r\nSampling token 178 from frame 3\r\nSampling token 179 from frame 3\r\nSampling token 180 from frame 3\r\nSampling token 181 from frame 3\r\nSampling token 182 from frame 3\r\nSampling token 183 from frame 3\r\nSampling token 184 from frame 3\r\nSampling token 185 from frame 3\r\nSampling token 186 from frame 3\r\nSampling token 187 from frame 3\r\nSampling token 188 from frame 3\r\nSampling token 189 from frame 3\r\nSampling token 190 from frame 3\r\nSampling token 191 from frame 3\r\nSampling token 192 from frame 3\r\nSampling token 193 from frame 3\r\nSampling token 194 from frame 3\r\nSampling token 195 from frame 3\r\nSampling token 196 from frame 3\r\nSampling token 197 from frame 3\r\nSampling token 198 from frame 3\r\nSampling token 199 from frame 3\r\nSampling token 200 from frame 3\r\nSampling token 201 from frame 3\r\nSampling token 202 from frame 3\r\nSampling token 203 from frame 3\r\nSampling token 204 from frame 3\r\nSampling token 205 from frame 3\r\nSampling token 206 from frame 3\r\nSampling token 207 from frame 3\r\nSampling token 208 from frame 3\r\nSampling token 209 from frame 3\r\nSampling token 210 from frame 3\r\nSampling token 211 from frame 3\r\nSampling token 212 from frame 3\r\nSampling token 213 from frame 3\r\nSampling token 214 from frame 3\r\nSampling token 215 from frame 3\r\n",,terminal_output +3727,9931208,"sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n # sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n sampling_fn = nn.apply(_sampling_wrapper, genie)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(params, batch)\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\n# grain_iterator = _get_dataloader_iterator()\n# video_batch = next(grain_iterator)\n# video_batch = np.load(""overfit_dir/single_sample_corner.npy"")\nvideo_batch = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n\n\nvideo_batch = video_batch.astype(args.dtype) / 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, : args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +3728,9938912,"sample.py",5722,0,"",python,selection_mouse +3729,9939079,"sample.py",5718,5,"apply",python,selection_mouse +3730,9945066,"sample.py",5763,0,"",python,selection_mouse +3731,9945334,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n tokenizer_ffn_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n lam_ffn_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_ffn_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(N):\n jax.debug.print(""Sampling token {} from frame {}"", n, t)\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens,\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(\n jnp.float32\n ) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +3732,9949166,"genie.py",11114,0,"",python,selection_mouse +3733,9952522,"genie.py",11117,0,"",python,selection_mouse +3734,9955986,"TERMINAL",0,0,"Sampling token 216 from frame 3\r\nSampling token 217 from frame 3\r\nSampling token 218 from frame 3\r\nSampling token 219 from frame 3\r\nSampling token 220 from frame 3\r\nSampling token 221 from frame 3\r\nSampling token 222 from frame 3\r\nSampling token 223 from frame 3\r\nSampling token 224 from frame 3\r\nSampling token 225 from frame 3\r\nSampling token 226 from frame 3\r\nSampling token 227 from frame 3\r\nSampling token 228 from frame 3\r\nSampling token 229 from frame 3\r\nSampling token 230 from frame 3\r\nSampling token 231 from frame 3\r\nSampling token 232 from frame 3\r\nSampling token 233 from frame 3\r\nSampling token 234 from frame 3\r\nSampling token 235 from frame 3\r\nSampling token 236 from frame 3\r\nSampling token 237 from frame 3\r\nSampling token 238 from frame 3\r\nSampling token 239 from frame 3\r\nSampling token 240 from frame 3\r\nSampling token 241 from frame 3\r\nSampling token 242 from frame 3\r\nSampling token 243 from frame 3\r\nSampling token 244 from frame 3\r\nSampling token 245 from frame 3\r\nSampling token 246 from frame 3\r\nSampling token 247 from frame 3\r\nSampling token 248 from frame 3\r\nSampling token 249 from frame 3\r\nSampling token 250 from frame 3\r\nSampling token 251 from frame 3\r\nSampling token 252 from frame 3\r\nSampling token 253 from frame 3\r\nSampling token 254 from frame 3\r\nSampling token 255 from frame 3\r\nSampling token 256 from frame 3\r\nSampling token 257 from frame 3\r\nSampling token 258 from frame 3\r\nSampling token 259 from frame 3\r\nSampling token 260 from frame 3\r\nSampling token 261 from frame 3\r\nSampling token 262 from frame 3\r\nSampling token 263 from frame 3\r\nSampling token 264 from frame 3\r\nSampling token 265 from frame 3\r\nSampling token 266 from frame 3\r\nSampling token 267 from frame 3\r\nSampling token 268 from frame 3\r\nSampling token 269 from frame 3\r\nSampling token 270 from frame 3\r\nSampling token 271 from frame 3\r\nSampling token 272 from frame 3\r\nSampling token 273 from frame 3\r\nSampling token 274 from frame 3\r\nSampling token 275 from frame 3\r\nSampling token 276 from frame 3\r\nSampling token 277 from frame 3\r\nSampling token 278 from frame 3\r\nSampling token 279 from frame 3\r\nSampling token 280 from frame 3\r\nSampling token 281 from frame 3\r\nSampling token 282 from frame 3\r\nSampling token 283 from frame 3\r\nSampling token 284 from frame 3\r\nSampling token 285 from frame 3\r\nSampling token 286 from frame 3\r\nSampling token 287 from frame 3\r\nSampling token 288 from frame 3\r\nSampling token 289 from frame 3\r\nSampling token 290 from frame 3\r\nSampling token 291 from frame 3\r\nSampling token 292 from frame 3\r\nSampling token 293 from frame 3\r\nSampling token 294 from frame 3\r\nSampling token 295 from frame 3\r\nSampling token 296 from frame 3\r\nSampling token 297 from frame 3\r\nSampling token 298 from frame 3\r\nSampling token 299 from frame 3\r\nSampling token 300 from frame 3\r\nSampling token 301 from frame 3\r\nSampling token 302 from frame 3\r\nSampling token 303 from frame 3\r\nSampling token 304 from frame 3\r\nSampling token 305 from frame 3\r\nSampling token 306 from frame 3\r\nSampling token 307 from frame 3\r\nSampling token 308 from frame 3\r\nSampling token 309 from frame 3\r\nSampling token 310 from frame 3\r\nSampling token 311 from frame 3\r\nSampling token 312 from frame 3\r\nSampling token 313 from frame 3\r\nSampling token 314 from frame 3\r\nSampling token 315 from frame 3\r\nSampling token 316 from frame 3\r\nSampling token 317 from frame 3\r\nSampling token 318 from frame 3\r\nSampling token 319 from frame 3\r\nSampling token 320 from frame 3\r\nSampling token 321 from frame 3\r\nSampling token 322 from frame 3\r\nSampling token 323 from frame 3\r\nSampling token 324 from frame 3\r\nSampling token 325 from frame 3\r\nSampling token 326 from frame 3\r\nSampling token 327 from frame 3\r\nSampling token 328 from frame 3\r\nSampling token 329 from frame 3\r\nSampling token 330 from frame 3\r\nSampling token 331 from frame 3\r\nSampling token 332 from frame 3\r\nSampling token 333 from frame 3\r\nSampling token 334 from frame 3\r\nSampling token 335 from frame 3\r\nSampling token 336 from frame 3\r\nSampling token 337 from frame 3\r\nSampling token 338 from frame 3\r\nSampling token 339 from frame 3\r\nSampling token 340 from frame 3\r\nSampling token 341 from frame 3\r\nSampling token 342 from frame 3\r\nSampling token 343 from frame 3\r\nSampling token 344 from frame 3\r\nSampling token 345 from frame 3\r\nSampling token 346 from frame 3\r\nSampling token 347 from frame 3\r\nSampling token 348 from frame 3\r\nSampling token 349 from frame 3\r\nSampling token 350 from frame 3\r\nSampling token 351 from frame 3\r\nSampling token 352 from frame 3\r\nSampling token 353 from frame 3\r\nSampling token 354 from frame 3\r\nSampling token 355 from frame 3\r\nSampling token 356 from frame 3\r\nSampling token 357 from frame 3\r\nSampling token 358 from frame 3\r\nSampling token 359 from frame 3\r\nSampling token 360 from frame 3\r\nSampling token 361 from frame 3\r\nSampling token 362 from frame 3\r\nSampling token 363 from frame 3\r\nSampling token 364 from frame 3\r\nSampling token 365 from frame 3\r\nSampling token 366 from frame 3\r\nSampling token 367 from frame 3\r\nSampling token 368 from frame 3\r\nSampling token 369 from frame 3\r\nSampling token 370 from frame 3\r\nSampling token 371 from frame 3\r\nSampling token 372 from frame 3\r\nSampling token 373 from frame 3\r\nSampling token 374 from frame 3\r\nSampling token 375 from frame 3\r\nSampling token 376 from frame 3\r\nSampling token 377 from frame 3\r\nSampling token 378 from frame 3\r\nSampling token 379 from frame 3\r\nSampling token 380 from frame 3\r\nSampling token 381 from frame 3\r\nSampling token 382 from frame 3\r\nSampling token 383 from frame 3\r\nSampling token 384 from frame 3\r\nSampling token 385 from frame 3\r\nSampling token 386 from frame 3\r\nSampling token 387 from frame 3\r\nSampling token 388 from frame 3\r\nSampling token 389 from frame 3\r\nSampling token 390 from frame 3\r\nSampling token 391 from frame 3\r\nSampling token 392 from frame 3\r\nSampling token 393 from frame 3\r\nSampling token 394 from frame 3\r\nSampling token 395 from frame 3\r\nSampling token 396 from frame 3\r\nSampling token 397 from frame 3\r\nSampling token 398 from frame 3\r\nSampling token 399 from frame 3\r\nSampling token 400 from frame 3\r\nSampling token 401 from frame 3\r\nSampling token 402 from frame 3\r\nSampling token 403 from frame 3\r\nSampling token 404 from frame 3\r\nSampling token 405 from frame 3\r\nSampling token 406 from frame 3\r\nSampling token 407 from frame 3\r\nSampling token 408 from frame 3\r\nSampling token 409 from frame 3\r\nSampling token 410 from frame 3\r\nSampling token 411 from frame 3\r\nSampling token 412 from frame 3\r\nSampling token 413 from frame 3\r\nSampling token 414 from frame 3\r\nSampling token 415 from frame 3\r\nSampling token 416 from frame 3\r\nSampling token 417 from frame 3\r\nSampling token 418 from frame 3\r\nSampling token 419 from frame 3\r\nSampling token 420 from frame 3\r\nSampling token 421 from frame 3\r\nSampling token 422 from frame 3\r\nSampling token 423 from frame 3\r\nSampling token 424 from frame 3\r\nSampling token 425 from frame 3\r\nSampling token 426 from frame 3\r\nSampling token 427 from frame 3\r\nSampling token 428 from frame 3\r\nSampling token 429 from frame 3\r\nSampling token 430 from frame 3\r\nSampling token 431 from frame 3\r\nSampling token 432 from frame 3\r\nSampling token 433 from frame 3\r\nSampling token 434 from frame 3\r\nSampling token 435 from frame 3\r\nSampling token 436 from frame 3\r\nSampling token 437 from frame 3\r\nSampling token 438 from frame 3\r\nSampling token 439 from frame 3\r\nSampling token 440 from frame 3\r\nSampling token 441 from frame 3\r\nSampling token 442 from frame 3\r\nSampling token 443 from frame 3\r\nSampling token 444 from frame 3\r\nSampling token 445 from frame 3\r\nSampling token 446 from frame 3\r\nSampling token 447 from frame 3\r\nSampling token 448 from frame 3\r\nSampling token 449 from frame 3\r\nSampling token 450 from frame 3\r\nSampling token 451 from frame 3\r\nSampling token 452 from frame 3\r\nSampling token 453 from frame 3\r\nSampling token 454 from frame 3\r\nSampling token 455 from frame 3\r\nSampling token 456 from frame 3\r\nSampling token 457 from frame 3\r\nSampling token 458 from frame 3\r\nSampling token 459 from frame 3\r\nSampling token 460 from frame 3\r\nSampling token 461 from frame 3\r\nSampling token 462 from frame 3\r\nSampling token 463 from frame 3\r\nSampling token 464 from frame 3\r\nSampling token 465 from frame 3\r\nSampling token 466 from frame 3\r\nSampling token 467 from frame 3\r\nSampling token 468 from frame 3\r\nSampling token 469 from frame 3\r\nSampling token 470 from frame 3\r\nSampling token 471 from frame 3\r\n",,terminal_output +3735,9956206,"genie.py",10999,0,"",python,selection_mouse +3736,9956531,"sample.py",0,0,"",python,tab +3737,9957823,"sample.py",5762,0,"",python,selection_mouse +3738,9958020,"genie.py",0,0,"",python,tab +3739,9961663,"genie.py",11114,0,"",python,selection_mouse +3740,9978074,"models/lam.py",0,0,"",python,tab +3741,9981008,"models/lam.py",2596,0,"",python,selection_mouse +3742,9989614,"TERMINAL",0,0,"Sampling token 472 from frame 3\r\nSampling token 473 from frame 3\r\nSampling token 474 from frame 3\r\nSampling token 475 from frame 3\r\nSampling token 476 from frame 3\r\nSampling token 477 from frame 3\r\nSampling token 478 from frame 3\r\nSampling token 479 from frame 3\r\nSampling token 480 from frame 3\r\nSampling token 481 from frame 3\r\nSampling token 482 from frame 3\r\nSampling token 483 from frame 3\r\nSampling token 484 from frame 3\r\nSampling token 485 from frame 3\r\nSampling token 486 from frame 3\r\nSampling token 487 from frame 3\r\nSampling token 488 from frame 3\r\nSampling token 489 from frame 3\r\nSampling token 490 from frame 3\r\nSampling token 491 from frame 3\r\nSampling token 492 from frame 3\r\nSampling token 493 from frame 3\r\nSampling token 494 from frame 3\r\nSampling token 495 from frame 3\r\nSampling token 496 from frame 3\r\nSampling token 497 from frame 3\r\nSampling token 498 from frame 3\r\nSampling token 499 from frame 3\r\nSampling token 500 from frame 3\r\nSampling token 501 from frame 3\r\nSampling token 502 from frame 3\r\nSampling token 503 from frame 3\r\nSampling token 504 from frame 3\r\nSampling token 505 from frame 3\r\nSampling token 506 from frame 3\r\nSampling token 507 from frame 3\r\nSampling token 508 from frame 3\r\nSampling token 509 from frame 3\r\nSampling token 510 from frame 3\r\nSampling token 511 from frame 3\r\nSampling token 512 from frame 3\r\nSampling token 513 from frame 3\r\nSampling token 514 from frame 3\r\nSampling token 515 from frame 3\r\nSampling token 516 from frame 3\r\nSampling token 517 from frame 3\r\nSampling token 518 from frame 3\r\nSampling token 519 from frame 3\r\nSampling token 520 from frame 3\r\nSampling token 521 from frame 3\r\nSampling token 522 from frame 3\r\nSampling token 523 from frame 3\r\nSampling token 524 from frame 3\r\nSampling token 525 from frame 3\r\nSampling token 526 from frame 3\r\nSampling token 527 from frame 3\r\nSampling token 528 from frame 3\r\nSampling token 529 from frame 3\r\nSampling token 530 from frame 3\r\nSampling token 531 from frame 3\r\nSampling token 532 from frame 3\r\nSampling token 533 from frame 3\r\nSampling token 534 from frame 3\r\nSampling token 535 from frame 3\r\nSampling token 536 from frame 3\r\nSampling token 537 from frame 3\r\nSampling token 538 from frame 3\r\nSampling token 539 from frame 3\r\nSampling token 540 from frame 3\r\nSampling token 541 from frame 3\r\nSampling token 542 from frame 3\r\nSampling token 543 from frame 3\r\nSampling token 544 from frame 3\r\nSampling token 545 from frame 3\r\nSampling token 546 from frame 3\r\nSampling token 547 from frame 3\r\nSampling token 548 from frame 3\r\nSampling token 549 from frame 3\r\nSampling token 550 from frame 3\r\nSampling token 551 from frame 3\r\nSampling token 552 from frame 3\r\nSampling token 553 from frame 3\r\nSampling token 554 from frame 3\r\nSampling token 555 from frame 3\r\nSampling token 556 from frame 3\r\nSampling token 557 from frame 3\r\nSampling token 558 from frame 3\r\nSampling token 559 from frame 3\r\nSampling token 560 from frame 3\r\nSampling token 561 from frame 3\r\nSampling token 562 from frame 3\r\nSampling token 563 from frame 3\r\nSampling token 564 from frame 3\r\nSampling token 565 from frame 3\r\nSampling token 566 from frame 3\r\nSampling token 567 from frame 3\r\nSampling token 568 from frame 3\r\nSampling token 569 from frame 3\r\nSampling token 570 from frame 3\r\nSampling token 571 from frame 3\r\nSampling token 572 from frame 3\r\nSampling token 573 from frame 3\r\nSampling token 574 from frame 3\r\nSampling token 575 from frame 3\r\nSampling token 576 from frame 3\r\nSampling token 577 from frame 3\r\nSampling token 578 from frame 3\r\nSampling token 579 from frame 3\r\nSampling token 580 from frame 3\r\nSampling token 581 from frame 3\r\nSampling token 582 from frame 3\r\nSampling token 583 from frame 3\r\nSampling token 584 from frame 3\r\nSampling token 585 from frame 3\r\nSampling token 586 from frame 3\r\nSampling token 587 from frame 3\r\nSampling token 588 from frame 3\r\nSampling token 589 from frame 3\r\nSampling token 590 from frame 3\r\nSampling token 591 from frame 3\r\nSampling token 592 from frame 3\r\nSampling token 593 from frame 3\r\nSampling token 594 from frame 3\r\nSampling token 595 from frame 3\r\nSampling token 596 from frame 3\r\nSampling token 597 from frame 3\r\nSampling token 598 from frame 3\r\nSampling token 599 from frame 3\r\nSampling token 600 from frame 3\r\nSampling token 601 from frame 3\r\nSampling token 602 from frame 3\r\nSampling token 603 from frame 3\r\nSampling token 604 from frame 3\r\nSampling token 605 from frame 3\r\nSampling token 606 from frame 3\r\nSampling token 607 from frame 3\r\nSampling token 608 from frame 3\r\nSampling token 609 from frame 3\r\nSampling token 610 from frame 3\r\nSampling token 611 from frame 3\r\nSampling token 612 from frame 3\r\nSampling token 613 from frame 3\r\nSampling token 614 from frame 3\r\nSampling token 615 from frame 3\r\nSampling token 616 from frame 3\r\nSampling token 617 from frame 3\r\nSampling token 618 from frame 3\r\nSampling token 619 from frame 3\r\nSampling token 620 from frame 3\r\nSampling token 621 from frame 3\r\nSampling token 622 from frame 3\r\nSampling token 623 from frame 3\r\nSampling token 624 from frame 3\r\nSampling token 625 from frame 3\r\nSampling token 626 from frame 3\r\nSampling token 627 from frame 3\r\nSampling token 628 from frame 3\r\nSampling token 629 from frame 3\r\nSampling token 630 from frame 3\r\nSampling token 631 from frame 3\r\nSampling token 632 from frame 3\r\nSampling token 633 from frame 3\r\nSampling token 634 from frame 3\r\nSampling token 635 from frame 3\r\nSampling token 636 from frame 3\r\nSampling token 637 from frame 3\r\nSampling token 638 from frame 3\r\nSampling token 639 from frame 3\r\nSampling token 640 from frame 3\r\nSampling token 641 from frame 3\r\nSampling token 642 from frame 3\r\nSampling token 643 from frame 3\r\nSampling token 644 from frame 3\r\nSampling token 645 from frame 3\r\nSampling token 646 from frame 3\r\nSampling token 647 from frame 3\r\nSampling token 648 from frame 3\r\nSampling token 649 from frame 3\r\nSampling token 650 from frame 3\r\nSampling token 651 from frame 3\r\nSampling token 652 from frame 3\r\nSampling token 653 from frame 3\r\nSampling token 654 from frame 3\r\nSampling token 655 from frame 3\r\nSampling token 656 from frame 3\r\nSampling token 657 from frame 3\r\nSampling token 658 from frame 3\r\nSampling token 659 from frame 3\r\nSampling token 660 from frame 3\r\nSampling token 661 from frame 3\r\nSampling token 662 from frame 3\r\nSampling token 663 from frame 3\r\nSampling token 664 from frame 3\r\nSampling token 665 from frame 3\r\nSampling token 666 from frame 3\r\nSampling token 667 from frame 3\r\nSampling token 668 from frame 3\r\nSampling token 669 from frame 3\r\nSampling token 670 from frame 3\r\nSampling token 671 from frame 3\r\nSampling token 672 from frame 3\r\nSampling token 673 from frame 3\r\nSampling token 674 from frame 3\r\nSampling token 675 from frame 3\r\nSampling token 676 from frame 3\r\nSampling token 677 from frame 3\r\nSampling token 678 from frame 3\r\nSampling token 679 from frame 3\r\nSampling token 680 from frame 3\r\nSampling token 681 from frame 3\r\nSampling token 682 from frame 3\r\nSampling token 683 from frame 3\r\nSampling token 684 from frame 3\r\nSampling token 685 from frame 3\r\nSampling token 686 from frame 3\r\nSampling token 687 from frame 3\r\nSampling token 688 from frame 3\r\nSampling token 689 from frame 3\r\nSampling token 690 from frame 3\r\nSampling token 691 from frame 3\r\nSampling token 692 from frame 3\r\nSampling token 693 from frame 3\r\nSampling token 694 from frame 3\r\nSampling token 695 from frame 3\r\nSampling token 696 from frame 3\r\nSampling token 697 from frame 3\r\nSampling token 698 from frame 3\r\nSampling token 699 from frame 3\r\nSampling token 700 from frame 3\r\nSampling token 701 from frame 3\r\nSampling token 702 from frame 3\r\nSampling token 703 from frame 3\r\nSampling token 704 from frame 3\r\nSampling token 705 from frame 3\r\nSampling token 706 from frame 3\r\nSampling token 707 from frame 3\r\nSampling token 708 from frame 3\r\nSampling token 709 from frame 3\r\nSampling token 710 from frame 3\r\nSampling token 711 from frame 3\r\nSampling token 712 from frame 3\r\nSampling token 713 from frame 3\r\nSampling token 714 from frame 3\r\nSampling token 715 from frame 3\r\nSampling token 716 from frame 3\r\nSampling token 717 from frame 3\r\nSampling token 718 from frame 3\r\nSampling token 719 from frame 3\r\nSampling token 720 from frame 3\r\nSampling token 721 from frame 3\r\nSampling token 722 from frame 3\r\nSampling token 723 from frame 3\r\nSampling token 724 from frame 3\r\nSampling token 725 from frame 3\r\nSampling token 726 from frame 3\r\nSampling token 727 from frame 3\r\n",,terminal_output +3743,10020304,"TERMINAL",0,0,"Sampling token 728 from frame 3\r\nSampling token 729 from frame 3\r\nSampling token 730 from frame 3\r\nSampling token 731 from frame 3\r\nSampling token 732 from frame 3\r\nSampling token 733 from frame 3\r\nSampling token 734 from frame 3\r\nSampling token 735 from frame 3\r\nSampling token 736 from frame 3\r\nSampling token 737 from frame 3\r\nSampling token 738 from frame 3\r\nSampling token 739 from frame 3\r\nSampling token 740 from frame 3\r\nSampling token 741 from frame 3\r\nSampling token 742 from frame 3\r\nSampling token 743 from frame 3\r\nSampling token 744 from frame 3\r\nSampling token 745 from frame 3\r\nSampling token 746 from frame 3\r\nSampling token 747 from frame 3\r\nSampling token 748 from frame 3\r\nSampling token 749 from frame 3\r\nSampling token 750 from frame 3\r\nSampling token 751 from frame 3\r\nSampling token 752 from frame 3\r\nSampling token 753 from frame 3\r\nSampling token 754 from frame 3\r\nSampling token 755 from frame 3\r\nSampling token 756 from frame 3\r\nSampling token 757 from frame 3\r\nSampling token 758 from frame 3\r\nSampling token 759 from frame 3\r\nSampling token 760 from frame 3\r\nSampling token 761 from frame 3\r\nSampling token 762 from frame 3\r\nSampling token 763 from frame 3\r\nSampling token 764 from frame 3\r\nSampling token 765 from frame 3\r\nSampling token 766 from frame 3\r\nSampling token 767 from frame 3\r\nSampling token 768 from frame 3\r\nSampling token 769 from frame 3\r\nSampling token 770 from frame 3\r\nSampling token 771 from frame 3\r\nSampling token 772 from frame 3\r\nSampling token 773 from frame 3\r\nSampling token 774 from frame 3\r\nSampling token 775 from frame 3\r\nSampling token 776 from frame 3\r\nSampling token 777 from frame 3\r\nSampling token 778 from frame 3\r\nSampling token 779 from frame 3\r\nSampling token 780 from frame 3\r\nSampling token 781 from frame 3\r\nSampling token 782 from frame 3\r\nSampling token 783 from frame 3\r\nSampling token 784 from frame 3\r\nSampling token 785 from frame 3\r\nSampling token 786 from frame 3\r\nSampling token 787 from frame 3\r\nSampling token 788 from frame 3\r\nSampling token 789 from frame 3\r\nSampling token 790 from frame 3\r\nSampling token 791 from frame 3\r\nSampling token 792 from frame 3\r\nSampling token 793 from frame 3\r\nSampling token 794 from frame 3\r\nSampling token 795 from frame 3\r\nSampling token 796 from frame 3\r\nSampling token 797 from frame 3\r\nSampling token 798 from frame 3\r\nSampling token 799 from frame 3\r\nSampling token 800 from frame 3\r\nSampling token 801 from frame 3\r\nSampling token 802 from frame 3\r\nSampling token 803 from frame 3\r\nSampling token 804 from frame 3\r\nSampling token 805 from frame 3\r\nSampling token 806 from frame 3\r\nSampling token 807 from frame 3\r\nSampling token 808 from frame 3\r\nSampling token 809 from frame 3\r\nSampling token 810 from frame 3\r\nSampling token 811 from frame 3\r\nSampling token 812 from frame 3\r\nSampling token 813 from frame 3\r\nSampling token 814 from frame 3\r\nSampling token 815 from frame 3\r\nSampling token 816 from frame 3\r\nSampling token 817 from frame 3\r\nSampling token 818 from frame 3\r\nSampling token 819 from frame 3\r\nSampling token 820 from frame 3\r\nSampling token 821 from frame 3\r\nSampling token 822 from frame 3\r\nSampling token 823 from frame 3\r\nSampling token 824 from frame 3\r\nSampling token 825 from frame 3\r\nSampling token 826 from frame 3\r\nSampling token 827 from frame 3\r\nSampling token 828 from frame 3\r\nSampling token 829 from frame 3\r\nSampling token 830 from frame 3\r\nSampling token 831 from frame 3\r\nSampling token 832 from frame 3\r\nSampling token 833 from frame 3\r\nSampling token 834 from frame 3\r\nSampling token 835 from frame 3\r\nSampling token 836 from frame 3\r\nSampling token 837 from frame 3\r\nSampling token 838 from frame 3\r\nSampling token 839 from frame 3\r\nSampling token 840 from frame 3\r\nSampling token 841 from frame 3\r\nSampling token 842 from frame 3\r\nSampling token 843 from frame 3\r\nSampling token 844 from frame 3\r\nSampling token 845 from frame 3\r\nSampling token 846 from frame 3\r\nSampling token 847 from frame 3\r\nSampling token 848 from frame 3\r\nSampling token 849 from frame 3\r\nSampling token 850 from frame 3\r\nSampling token 851 from frame 3\r\nSampling token 852 from frame 3\r\nSampling token 853 from frame 3\r\nSampling token 854 from frame 3\r\nSampling token 855 from frame 3\r\nSampling token 856 from frame 3\r\nSampling token 857 from frame 3\r\nSampling token 858 from frame 3\r\nSampling token 859 from frame 3\r\nSampling token 860 from frame 3\r\nSampling token 861 from frame 3\r\nSampling token 862 from frame 3\r\nSampling token 863 from frame 3\r\nSampling token 864 from frame 3\r\nSampling token 865 from frame 3\r\nSampling token 866 from frame 3\r\nSampling token 867 from frame 3\r\nSampling token 868 from frame 3\r\nSampling token 869 from frame 3\r\nSampling token 870 from frame 3\r\nSampling token 871 from frame 3\r\nSampling token 872 from frame 3\r\nSampling token 873 from frame 3\r\nSampling token 874 from frame 3\r\nSampling token 875 from frame 3\r\nSampling token 876 from frame 3\r\nSampling token 877 from frame 3\r\nSampling token 878 from frame 3\r\nSampling token 879 from frame 3\r\nSampling token 880 from frame 3\r\nSampling token 881 from frame 3\r\nSampling token 882 from frame 3\r\nSampling token 883 from frame 3\r\nSampling token 884 from frame 3\r\nSampling token 885 from frame 3\r\nSampling token 886 from frame 3\r\nSampling token 887 from frame 3\r\nSampling token 888 from frame 3\r\nSampling token 889 from frame 3\r\nSampling token 890 from frame 3\r\nSampling token 891 from frame 3\r\nSampling token 892 from frame 3\r\nSampling token 893 from frame 3\r\nSampling token 894 from frame 3\r\nSampling token 895 from frame 3\r\nSampling token 896 from frame 3\r\nSampling token 897 from frame 3\r\nSampling token 898 from frame 3\r\nSampling token 899 from frame 3\r\nSampling token 900 from frame 3\r\nSampling token 901 from frame 3\r\nSampling token 902 from frame 3\r\nSampling token 903 from frame 3\r\nSampling token 904 from frame 3\r\nSampling token 905 from frame 3\r\nSampling token 906 from frame 3\r\nSampling token 907 from frame 3\r\nSampling token 908 from frame 3\r\nSampling token 909 from frame 3\r\nSampling token 910 from frame 3\r\nSampling token 911 from frame 3\r\nSampling token 912 from frame 3\r\nSampling token 913 from frame 3\r\nSampling token 914 from frame 3\r\nSampling token 915 from frame 3\r\nSampling token 916 from frame 3\r\nSampling token 917 from frame 3\r\nSampling token 918 from frame 3\r\nSampling token 919 from frame 3\r\nautoreg sampling done. calculating ssim and saving video\r\nSSIM: 0.860548734664917\r\n",,terminal_output +3744,10022519,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3745,10062981,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3746,10064174,"TERMINAL",0,0,"jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +3747,10065019,"TERMINAL",0,0,"queue\r\n\r",,terminal_output +3748,10065436,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +3749,10070018,"models/dynamics.py",0,0,"",python,tab +3750,10080315,"models/dynamics.py",4130,0,"",python,selection_mouse +3751,10080432,"models/dynamics.py",4129,8,"dynamics",python,selection_mouse +3752,10080638,"models/dynamics.py",4129,25,"dynamics(vid_embed_padded",python,selection_mouse +3753,10080676,"models/dynamics.py",4129,26,"dynamics(vid_embed_padded)",python,selection_mouse +3754,10080676,"models/dynamics.py",4129,27,"dynamics(vid_embed_padded)[",python,selection_mouse +3755,10080739,"models/dynamics.py",4129,29,"dynamics(vid_embed_padded)[:,",python,selection_mouse +3756,10080739,"models/dynamics.py",4129,31,"dynamics(vid_embed_padded)[:, :",python,selection_mouse +3757,10080740,"models/dynamics.py",4129,33,"dynamics(vid_embed_padded)[:, :, ",python,selection_mouse +3758,10080741,"models/dynamics.py",4129,35,"dynamics(vid_embed_padded)[:, :, :-",python,selection_mouse +3759,10080756,"models/dynamics.py",4129,36,"dynamics(vid_embed_padded)[:, :, :-1",python,selection_mouse +3760,10080783,"models/dynamics.py",4129,37,"dynamics(vid_embed_padded)[:, :, :-1]",python,selection_mouse +3761,10085681,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +3762,10085748,"TERMINAL",0,0,"#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=01:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --seed=69 \\r\n --num_steps=5000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n ",,terminal_output +3763,10085898,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3764,10086086,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3765,10088044,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3373280.6 task 0: running\r\n",,terminal_output +3766,10088212,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3373280.6\r\nsrun: forcing job termination\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 15, in \r\n import wandb\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/__init__.py"", line 22, in \r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3373280.6 ON hkn0901 CANCELLED AT 2025-07-24T16:48:22 ***\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +3767,10089559,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +3768,10094923,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",796,0,"",shellscript,selection_mouse +3769,10095932,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",798,0,"",shellscript,selection_mouse +3770,10096889,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",797,0,"",shellscript,selection_command +3771,10098330,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",797,0,"-",shellscript,content +3772,10098331,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",798,0,"",shellscript,selection_keyboard +3773,10099441,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",798,0,"n",shellscript,content +3774,10099442,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",799,0,"",shellscript,selection_keyboard +3775,10099620,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",799,0,"o",shellscript,content +3776,10099620,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",800,0,"",shellscript,selection_keyboard +3777,10099984,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",800,0,"n",shellscript,content +3778,10099985,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",801,0,"",shellscript,selection_keyboard +3779,10100171,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",801,0,"o",shellscript,content +3780,10100172,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",802,0,"",shellscript,selection_keyboard +3781,10100291,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",802,0,"u",shellscript,content +3782,10100291,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",803,0,"",shellscript,selection_keyboard +3783,10100446,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",803,0,"s",shellscript,content +3784,10100447,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",804,0,"",shellscript,selection_keyboard +3785,10100632,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",804,0,"e",shellscript,content +3786,10100633,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",805,0,"",shellscript,selection_keyboard +3787,10100862,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",804,1,"",shellscript,content +3788,10101005,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",803,1,"",shellscript,content +3789,10101114,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",802,1,"",shellscript,content +3790,10101237,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",801,1,"",shellscript,content +3791,10101366,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",800,1,"",shellscript,content +3792,10101497,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",799,1,"",shellscript,content +3793,10102116,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",799,0,"o",shellscript,content +3794,10102117,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",800,0,"",shellscript,selection_keyboard +3795,10103005,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",800,0,"-",shellscript,content +3796,10103006,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",801,0,"",shellscript,selection_keyboard +3797,10103243,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",801,0,"n",shellscript,content +3798,10103244,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",802,0,"",shellscript,selection_keyboard +3799,10103406,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",802,0,"o",shellscript,content +3800,10103407,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",803,0,"",shellscript,selection_keyboard +3801,10103520,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",803,0,"i",shellscript,content +3802,10103521,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",804,0,"",shellscript,selection_keyboard +3803,10103587,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",804,0,"s",shellscript,content +3804,10103588,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",805,0,"",shellscript,selection_keyboard +3805,10103759,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",805,0,"e",shellscript,content +3806,10103760,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",806,0,"",shellscript,selection_keyboard +3807,10109384,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1359,0,"",shellscript,selection_mouse +3808,10110000,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1359,0,"n",shellscript,content +3809,10110001,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1360,0,"",shellscript,selection_keyboard +3810,10110207,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1360,0,"o",shellscript,content +3811,10110208,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1361,0,"",shellscript,selection_keyboard +3812,10110438,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1361,0,"-",shellscript,content +3813,10110438,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1362,0,"",shellscript,selection_keyboard +3814,10111198,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1362,0,"n",shellscript,content +3815,10111198,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1363,0,"",shellscript,selection_keyboard +3816,10111662,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1363,0,"o",shellscript,content +3817,10111663,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1364,0,"",shellscript,selection_keyboard +3818,10111775,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1364,0,"i",shellscript,content +3819,10111775,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1365,0,"",shellscript,selection_keyboard +3820,10111838,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1365,0,"s",shellscript,content +3821,10111838,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1366,0,"",shellscript,selection_keyboard +3822,10112020,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1366,0,"e",shellscript,content +3823,10112021,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",1367,0,"",shellscript,selection_keyboard +3824,10114774,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +3825,10116457,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +3826,10117190,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=01:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1-no-noise/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --seed=69 \\r\n --num_steps=5000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-no-noise$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n ",,terminal_output +3827,10117380,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +3828,10117552,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3829,10118437,"TERMINAL",0,0,"bash",,terminal_focus +3830,10119633,"TERMINAL",0,0,"queue",,terminal_command +3831,10119686,"TERMINAL",0,0,"]633;E;2025-07-24 16:48:54 queue;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:48:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:39\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:39\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:42\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:05\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:09\t 1 hkn0901",,terminal_output +3832,10119796,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +3833,10120758,"TERMINAL",0,0,"540403610",,terminal_output +3834,10121785,"TERMINAL",0,0,"611471",,terminal_output +3835,10121897,"TERMINAL",0,0,"2025-07-24 16:48:56.552705: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3836,10122905,"TERMINAL",0,0,"722582",,terminal_output +3837,10123886,"TERMINAL",0,0,"833693",,terminal_output +3838,10124918,"TERMINAL",0,0,"9447104",,terminal_output +3839,10125612,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:45\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:45\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:48\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:11\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:15\t 1 hkn0901",,terminal_output +3840,10125658,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:45\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:45\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:48\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:11\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:15\t 1 hkn0901",,terminal_output +3841,10125725,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:45\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:45\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:48\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:11\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:15\t 1 hkn0901",,terminal_output +3842,10126044,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:45\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:45\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:48\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:11\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:15\t 1 hkn0901",,terminal_output +3843,10126149,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:45\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:45\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:48\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:11\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:15\t 1 hkn0901",,terminal_output +3844,10126825,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:46\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:46\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:49\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:12\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:16\t 1 hkn0901",,terminal_output +3845,10126915,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:46\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:46\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:49\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:12\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:16\t 1 hkn0901",,terminal_output +3846,10126969,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:46\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:46\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:49\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:12\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:16\t 1 hkn0901",,terminal_output +3847,10127937,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:02 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:47\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:47\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:50\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:13\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:17\t 1 hkn0901",,terminal_output +3848,10128006,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 16:49:02 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373277 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373276 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:14:47\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:14:47\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:35:50\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:21:13\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accel interact tum_cte0 R35:17\t 1 hkn0901",,terminal_output +3849,10129046,"TERMINAL",0,0,"388148",,terminal_output +3850,10130078,"TERMINAL",0,0,"499259",,terminal_output +3851,10131116,"TERMINAL",0,0,"550503620",,terminal_output +3852,10131417,"TERMINAL",0,0,"2025-07-24 16:49:06.057578: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3853,10132162,"TERMINAL",0,0,"611471",,terminal_output +3854,10133187,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +3855,10135492,"TERMINAL",0,0,"fqueue",,terminal_command +3856,10135574,"TERMINAL",0,0,"]633;E;2025-07-24 16:49:10 fqueue;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1991.localdomain: Thu Jul 24 16:49:10 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3373277\taccelerated train_dynamics_causal_8_node tum_cte0 PENDING\t 0:00 2-00:00:00\t8 (Priority)3373276\taccelerated train_dynamics_causal_2_node tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3371237\taccelerated train_dynamics_maskprob_fix_8_ tum_cte0 RUNNING 16:14:55 2-00:00:00\t8 hkn[0618,0625-0626,0628-0631,0634]3371238\taccelerated train_dynamics_maskprob_fix_2_ tum_cte0 RUNNING 16:14:55 2-00:00:00\t2 hkn[0706,0710]3372631\taccelerated train_dynamics_maskprob_fix_2_ tum_cte0 RUNNING 2:35:58 2-00:00:00\t2 hkn[0515,0622]3372629\taccelerated train_dynamics_maskprob_fix_8_ tum_cte0 RUNNING 3:21:21 2-00:00:00\t8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373280 dev_accelerated-interactive tum_cte0 RUNNING\t 35:25 1:00:001 hkn0901",,terminal_output +3857,10136627,"TERMINAL",0,0,"166926",,terminal_output +3858,10137665,"TERMINAL",0,0,"2776:0037",,terminal_output +3859,10138690,"TERMINAL",0,0,"388148",,terminal_output +3860,10139702,"TERMINAL",0,0,"499259",,terminal_output +3861,10140000,"TERMINAL",0,0,"bash",,terminal_focus +3862,10140827,"TERMINAL",0,0,"55:005:003630",,terminal_output +3863,10141778,"TERMINAL",0,0,"611471",,terminal_output +3864,10142860,"TERMINAL",0,0,"722582",,terminal_output +3865,10143903,"TERMINAL",0,0,"833693",,terminal_output +3866,10144928,"TERMINAL",0,0,"9447304",,terminal_output +3867,10145082,"TERMINAL",0,0,"scancel 3373277",,terminal_command +3868,10145099,"TERMINAL",0,0,"]633;E;2025-07-24 16:49:19 scancel 3373277;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +3869,10145947,"TERMINAL",0,0,"\r2055815",,terminal_output +3870,10146984,"TERMINAL",0,0,"166926",,terminal_output +3871,10147998,"TERMINAL",0,0,"2771037",,terminal_output +3872,10149019,"TERMINAL",0,0,"388148",,terminal_output +3873,10149940,"TERMINAL",0,0,"2025-07-24 16:49:24.526735: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3874,10150035,"TERMINAL",0,0,"499259",,terminal_output +3875,10150219,"TERMINAL",0,0,"scancel 3373276",,terminal_command +3876,10150240,"TERMINAL",0,0,"]633;E;2025-07-24 16:49:24 scancel 3373276;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +3877,10151070,"TERMINAL",0,0,"\r51237train_dynamics_maskprob_fix_8_RUNN16:15:18 hkn[0618,0625-0626,0628-0631,0634]82102706,0710]2631 2:36:13515,06222983:21:368410,0429,0520,0607,0610,0810,0814,0817]3280 dev_accelerated-interactive 35:40 1:00:00 1 hkn0901",,terminal_output +3878,10152102,"TERMINAL",0,0,"611471",,terminal_output +3879,10152120,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3880,10152809,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_164926-gqc4dswd\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-overfit-no-noise3373280\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/gqc4dswd\r\n",,terminal_output +3881,10153138,"TERMINAL",0,0,"722582",,terminal_output +3882,10154364,"TERMINAL",0,0,"srun",,terminal_focus +3883,10154766,"TERMINAL",0,0,"8447404",,terminal_output +3884,10155780,"TERMINAL",0,0,"3055815",,terminal_output +3885,10156807,"TERMINAL",0,0,"166926",,terminal_output +3886,10157843,"TERMINAL",0,0,"2772037",,terminal_output +3887,10158958,"TERMINAL",0,0,"388148",,terminal_output +3888,10159914,"TERMINAL",0,0,"499259",,terminal_output +3889,10160999,"TERMINAL",0,0,"520203650",,terminal_output +3890,10162028,"TERMINAL",0,0,"611471",,terminal_output +3891,10163006,"TERMINAL",0,0,"722582",,terminal_output +3892,10164075,"TERMINAL",0,0,"833693",,terminal_output +3893,10165074,"TERMINAL",0,0,"9447504",,terminal_output +3894,10166120,"TERMINAL",0,0,"4055815",,terminal_output +3895,10167139,"TERMINAL",0,0,"166926",,terminal_output +3896,10168270,"TERMINAL",0,0,"2773037",,terminal_output +3897,10169299,"TERMINAL",0,0,"388148",,terminal_output +3898,10170322,"TERMINAL",0,0,"499259",,terminal_output +3899,10171286,"TERMINAL",0,0,"53030366:00",,terminal_output +3900,10172365,"TERMINAL",0,0,"622582",,terminal_output +3901,10173390,"TERMINAL",0,0,"833693",,terminal_output +3902,10174382,"TERMINAL",0,0,"94472:004",,terminal_output +3903,10175416,"TERMINAL",0,0,"5055815",,terminal_output +3904,10176458,"TERMINAL",0,0,"166926",,terminal_output +3905,10177486,"TERMINAL",0,0,"2774037",,terminal_output +3906,10178613,"TERMINAL",0,0,"388148",,terminal_output +3907,10179639,"TERMINAL",0,0,"499259",,terminal_output +3908,10180661,"TERMINAL",0,0,"540403610",,terminal_output +3909,10181687,"TERMINAL",0,0,"611471",,terminal_output +3910,10182713,"TERMINAL",0,0,"722582",,terminal_output +3911,10183735,"TERMINAL",0,0,"833693",,terminal_output +3912,10184758,"TERMINAL",0,0,"9447104",,terminal_output +3913,10185771,"TERMINAL",0,0,"50:0055815",,terminal_output +3914,10186808,"TERMINAL",0,0,"166926",,terminal_output +3915,10186920,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +3916,10187930,"TERMINAL",0,0,"2775037",,terminal_output +3917,10188955,"TERMINAL",0,0,"388148",,terminal_output +3918,10189910,"TERMINAL",0,0,"499259",,terminal_output +3919,10190948,"TERMINAL",0,0,"550503620",,terminal_output +3920,10192008,"TERMINAL",0,0,"611471",,terminal_output +3921,10193053,"TERMINAL",0,0,"722582",,terminal_output +3922,10194077,"TERMINAL",0,0,"833693",,terminal_output +3923,10195034,"TERMINAL",0,0,"2025-07-24 16:50:09.728698: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:50:09.729164: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:50:09.731107: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:50:09.731126: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-24 16:50:09.731912: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3924,10195113,"TERMINAL",0,0,"9447204",,terminal_output +3925,10196123,"TERMINAL",0,0,"1055815",,terminal_output +3926,10197156,"TERMINAL",0,0,"166926",,terminal_output +3927,10198192,"TERMINAL",0,0,"2777:0037",,terminal_output +3928,10199228,"TERMINAL",0,0,"388148",,terminal_output +3929,10200263,"TERMINAL",0,0,"499259",,terminal_output +3930,10201292,"TERMINAL",0,0,"56:006:003630",,terminal_output +3931,10202329,"TERMINAL",0,0,"622582",,terminal_output +3932,10203397,"TERMINAL",0,0,"833693",,terminal_output +3933,10204397,"TERMINAL",0,0,"9447304",,terminal_output +3934,10205426,"TERMINAL",0,0,"2055815",,terminal_output +3935,10206627,"TERMINAL",0,0,"166926",,terminal_output +3936,10207499,"TERMINAL",0,0,"2771037",,terminal_output +3937,10208620,"TERMINAL",0,0,"388148",,terminal_output +3938,10209566,"TERMINAL",0,0,"499259",,terminal_output +3939,10210596,"TERMINAL",0,0,"510103640",,terminal_output +3940,10211632,"TERMINAL",0,0,"611471",,terminal_output +3941,10212671,"TERMINAL",0,0,"722582",,terminal_output +3942,10213695,"TERMINAL",0,0,"833693",,terminal_output +3943,10214761,"TERMINAL",0,0,"9447404",,terminal_output +3944,10215760,"TERMINAL",0,0,"3055815",,terminal_output +3945,10216806,"TERMINAL",0,0,"166926",,terminal_output +3946,10217829,"TERMINAL",0,0,"2772037",,terminal_output +3947,10218961,"TERMINAL",0,0,"388148",,terminal_output +3948,10219903,"TERMINAL",0,0,"499259",,terminal_output +3949,10220937,"TERMINAL",0,0,"520203650",,terminal_output +3950,10222031,"TERMINAL",0,0,"611471",,terminal_output +3951,10223053,"TERMINAL",0,0,"722582",,terminal_output +3952,10224085,"TERMINAL",0,0,"833693",,terminal_output +3953,10225079,"TERMINAL",0,0,"9447504",,terminal_output +3954,10226113,"TERMINAL",0,0,"4055815",,terminal_output +3955,10227145,"TERMINAL",0,0,"166926",,terminal_output +3956,10228278,"TERMINAL",0,0,"2773037",,terminal_output +3957,10229211,"TERMINAL",0,0,"388148",,terminal_output +3958,10229419,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 1603072, 'total': 52583120}\r\nStep 0, loss: 8.970947265625\r\nStep 1, loss: 8.920546531677246\r\nStep 2, loss: 8.609445571899414\r\nStep 3, loss: 8.226251602172852\r\nStep 4, loss: 8.90990924835205\r\nStep 5, loss: 8.682178497314453\r\nStep 6, loss: 7.778209686279297\r\nStep 7, loss: 7.734697341918945\r\nStep 8, loss: 7.636839389801025\r\nStep 9, loss: 7.535136699676514\r\nStep 10, loss: 7.442539691925049\r\nStep 11, loss: 7.3670430183410645\r\nStep 12, loss: 7.308379173278809\r\nStep 13, loss: 7.26580810546875\r\nStep 14, loss: 7.232280254364014\r\nStep 15, loss: 7.201952934265137\r\nStep 16, loss: 7.169801712036133\r\nStep 17, loss: 7.138280868530273\r\nStep 18, loss: 7.107698917388916\r\nStep 19, loss: 7.08099365234375\r\nStep 20, loss: 7.056901931762695\r\nStep 21, loss: 7.035549163818359\r\nStep 22, loss: 7.015260696411133\r\nStep 23, loss: 6.995325088500977\r\nStep 24, loss: 6.975472927093506\r\nStep 25, loss: 6.9559526443481445\r\nStep 26, loss: 6.9368743896484375\r\nStep 27, loss: 6.918121337890625\r\nStep 28, loss: 6.899341106414795\r\nStep 29, loss: 6.880728721618652\r\nStep 30, loss: 6.861785411834717\r\nStep 31, loss: 6.8434858322143555\r\nStep 32, loss: 6.825204849243164\r\nStep 33, loss: 6.807171821594238\r\nStep 34, loss: 6.788354396820068\r\nStep 35, loss: 6.769634246826172\r\nStep 36, loss: 6.751794815063477\r\nStep 37, loss: 6.735315799713135\r\nStep 38, loss: 6.7190423011779785\r\nStep 39, loss: 6.702373027801514\r\nStep 40, loss: 6.684907913208008\r\nStep 41, loss: 6.6677422523498535\r\nStep 42, loss: 6.651083469390869\r\nStep 43, loss: 6.634381294250488\r\nStep 44, loss: 6.617884159088135\r\nStep 45, loss: 6.60109281539917\r\nStep 46, loss: 6.584770679473877\r\nStep 47, loss: 6.5678510665893555\r\nStep 48, loss: 6.55124568939209\r\nStep 49, loss: 6.53401517868042\r\nStep 50, loss: 6.516782760620117\r\nStep 51, loss: 6.499182224273682\r\nStep 52, loss: 6.481422424316406\r\nStep 53, loss: 6.4634528160095215\r\nStep 54, loss: 6.444797515869141\r\nStep 55, loss: 6.425774097442627\r\nStep 56, loss: 6.407233715057373\r\nStep 57, loss: 6.388137340545654\r\nStep 58, loss: 6.3688812255859375\r\nStep 59, loss: 6.348977565765381\r\nStep 60, loss: 6.328503608703613\r\nStep 61, loss: 6.308149337768555\r\nStep 62, loss: 6.2876877784729\r\nStep 63, loss: 6.26708459854126\r\nStep 64, loss: 6.24594259262085\r\nStep 65, loss: 6.224304676055908\r\nStep 66, loss: 6.20258092880249\r\nStep 67, loss: 6.180483341217041\r\nStep 68, loss: 6.158143520355225\r\nStep 69, loss: 6.135705471038818\r\nStep 70, loss: 6.112542629241943\r\nStep 71, loss: 6.089583396911621\r\nStep 72, loss: 6.066068649291992\r\nStep 73, loss: 6.042571544647217\r\nStep 74, loss: 6.018616199493408\r\nStep 75, loss: 5.994479179382324\r\nStep 76, loss: 5.9700703620910645\r\nStep 77, loss: 5.945438861846924\r\nStep 78, loss: 5.920638561248779\r\nStep 79, loss: 5.895765781402588\r\nStep 80, loss: 5.869575500488281\r\nStep 81, loss: 5.844583511352539\r\nStep 82, loss: 5.819002151489258\r\nStep 83, loss: 5.793623924255371\r\nStep 84, loss: 7.228024005889893\r\nStep 85, loss: 7.073573589324951\r\nStep 86, loss: 6.900302886962891\r\nStep 87, loss: 6.734625339508057\r\nStep 88, loss: 6.587531089782715\r\nStep 89, loss: 6.463722229003906\r\nStep 90, loss: 6.361710071563721\r\nStep 91, loss: 6.2792744636535645\r\nStep 92, loss: 6.213166236877441\r\nStep 93, loss: 6.163778305053711\r\nStep 94, loss: 6.7910871505737305\r\nStep 95, loss: 6.134883880615234\r\nStep 96, loss: 6.093475818634033\r\nStep 97, loss: 6.07018518447876\r\nStep 98, loss: 6.23915433883667\r\nStep 99, loss: 6.106692314147949\r\nStep 100, loss: 6.003565311431885\r\nStep 101, loss: 5.846973419189453\r\nStep 102, loss: 5.662746906280518\r\nStep 103, loss: 5.643380641937256\r\nStep 104, loss: 5.6271586418151855\r\nStep 105, loss: 5.75154972076416\r\nStep 106, loss: 5.946041107177734\r\nStep 107, loss: 5.948188781738281\r\nStep 108, loss: 5.909734725952148\r\nStep 109, loss: 5.875985145568848\r\nStep 110, loss: 5.872694492340088\r\nStep 111, loss: 5.821035861968994\r\nStep 112, loss: 5.812377452850342\r\nStep 113, loss: 5.649721622467041\r\nStep 114, loss: 5.580887794494629\r\nStep 115, loss: 5.906697750091553\r\nStep 116, loss: 5.8124098777771\r\nStep 117, loss: 5.7715582847595215\r\nStep 118, loss: 5.762455463409424\r\nStep 119, loss: 5.8029656410217285\r\nStep 120, loss: 5.71148157119751\r\nStep 121, loss: 5.689708709716797\r\nStep 122, loss: 5.833056926727295\r\nStep 123, loss: 5.765722751617432\r\nStep 124, loss: 5.652393341064453\r\nStep 125, loss: 5.625919342041016\r\nStep 126, loss: 5.674338340759277\r\nStep 127, loss: 5.636416912078857\r\nStep 128, loss: 5.678064346313477\r\nStep 129, loss: 5.563101768493652\r\nStep 130, loss: 5.528877258300781\r\nStep 131, loss: 5.501719951629639\r\nStep 132, loss: 5.52341365814209\r\nStep 133, loss: 5.51692008972168\r\nStep 134, loss: 5.527283668518066\r\nStep 135, loss: 5.446900367736816\r\nStep 136, loss: 5.409412384033203\r\nStep 137, loss: 5.4295549392700195\r\nStep 138, loss: 5.423775672912598\r\nStep 139, loss: 5.407618999481201\r\nStep 140, loss: 5.339876174926758\r\nStep 141, loss: 5.420855522155762\r\nStep 142, loss: 5.326288223266602\r\nStep 143, loss: 5.365077972412109\r\nStep 144, loss: 5.416205883026123\r\nStep 145, loss: 5.257021427154541\r\nStep 146, loss: 5.383056163787842\r\nStep 147, loss: 5.3420820236206055\r\nStep 148, loss: 5.317862510681152\r\nStep 149, loss: 5.224394798278809\r\nStep 150, loss: 5.208554267883301\r\nStep 151, loss: 5.3093109130859375\r\nStep 152, loss: 5.177433967590332\r\nStep 153, loss: 5.318246841430664\r\nStep 154, loss: 5.236791610717773\r\nStep 155, loss: 5.1228790283203125\r\nStep 156, loss: 5.159187316894531\r\nStep 157, loss: 5.210168361663818\r\nStep 158, loss: 5.189594268798828\r\nStep 159, loss: 5.091365814208984\r\nStep 160, loss: 5.060897350311279\r\nStep 161, loss: 5.130372524261475\r\nStep 162, loss: 5.140561103820801\r\nStep 163, loss: 5.112198352813721\r\nStep 164, loss: 5.103490352630615\r\nStep 165, loss: 5.091122627258301\r\nStep 166, loss: 5.0626540184021\r\nStep 167, loss: 5.010951995849609\r\nStep 168, loss: 5.018534183502197\r\nStep 169, loss: 5.228350639343262\r\nStep 170, loss: 5.058743000030518\r\nStep 171, loss: 5.020871162414551\r\nStep 172, loss: 4.998404502868652\r\nStep 173, loss: 4.984234809875488\r\nStep 174, loss: 4.9419450759887695\r\nStep 175, loss: 4.9393439292907715\r\nStep 176, loss: 4.966088771820068\r\nStep 177, loss: 4.880429267883301\r\nStep 178, loss: 4.858409404754639\r\nStep 179, loss: 4.882241249084473\r\nStep 180, loss: 5.034587383270264\r\nStep 181, loss: 4.924006462097168\r\nStep 182, loss: 4.828492164611816\r\nStep 183, loss: 4.80021858215332\r\nStep 184, loss: 4.936275959014893\r\nStep 185, loss: 4.916489124298096\r\nStep 186, loss: 4.8523430824279785\r\nStep 187, loss: 4.83500337600708\r\nStep 188, loss: 4.77897310256958\r\nStep 189, loss: 4.745491981506348\r\nStep 190, loss: 5.023622035980225\r\nStep 191, loss: 4.995785713195801\r\nStep 192, loss: 4.937142372131348\r\nStep 193, loss: 4.864293098449707\r\nStep 194, loss: 4.821167945861816\r\nStep 195, loss: 4.857395172119141\r\nStep 196, loss: 4.811708927154541\r\nStep 197, loss: 4.715112686157227\r\nStep 198, loss: 4.688528537750244\r\nStep 199, loss: 4.657063007354736\r\nStep 200, loss: 4.644535064697266\r\nStep 201, loss: 4.787344455718994\r\nStep 202, loss: 4.793875694274902\r\nStep 203, loss: 4.748939514160156\r\nStep 204, loss: 4.896622180938721\r\nStep 205, loss: 4.757108688354492\r\nStep 206, loss: 4.707309246063232\r\nStep 207, loss: 4.850205898284912\r\nStep 208, loss: 4.783185005187988\r\nStep 209, loss: 4.66239595413208\r\nStep 210, loss: 4.644133567810059\r\nStep 211, loss: 4.655364513397217\r\nStep 212, loss: 4.646564960479736\r\nStep 213, loss: 4.645313739776611\r\nStep 214, loss: 4.695089817047119\r\nStep 215, loss: 4.616159915924072\r\nStep 216, loss: 4.571486473083496\r\nStep 217, loss: 4.556234836578369\r\nStep 218, loss: 4.635240077972412\r\nStep 219, loss: 4.61767578125\r\nStep 220, loss: 4.598281383514404\r\nStep 221, loss: 4.518854141235352\r\nStep 222, loss: 4.532962799072266\r\nStep 223, loss: 4.575425624847412\r\nStep 224, loss: 4.7765607833862305\r\nStep 225, loss: 4.724386215209961\r\nStep 226, loss: 4.544772148132324\r\nStep 227, loss: 4.558614253997803\r\nStep 228, loss: 4.622288227081299\r\nStep 229, loss: 4.593726634979248\r\nStep 230, loss: 4.532766342163086\r\nStep 231, loss: 4.459880828857422\r\nStep 232, loss: 4.447173595428467\r\nStep 233, loss: 4.465651512145996\r\nStep 234, loss: 4.436201095581055\r\nStep 235, loss: 4.392554759979248\r\nStep 236, loss: 4.373525619506836\r\nStep 237, loss: 4.354145050048828\r\nStep 238, loss: 4.604005336761475\r\n",,terminal_output +3959,10230324,"TERMINAL",0,0,"499259",,terminal_output +3960,10231276,"TERMINAL",0,0,"53030367:00",,terminal_output +3961,10232377,"TERMINAL",0,0,"622582",,terminal_output +3962,10233402,"TERMINAL",0,0,"833693",,terminal_output +3963,10234381,"TERMINAL",0,0,"94473:004",,terminal_output +3964,10235041,"TERMINAL",0,0,"Step 239, loss: 4.5637030601501465\r\nStep 240, loss: 4.449256420135498\r\nStep 241, loss: 4.472448825836182\r\nStep 242, loss: 4.465570449829102\r\nStep 243, loss: 4.380570411682129\r\nStep 244, loss: 4.407931327819824\r\nStep 245, loss: 4.324913024902344\r\nStep 246, loss: 4.948151588439941\r\nStep 247, loss: 4.917622089385986\r\nStep 248, loss: 4.974345684051514\r\nStep 249, loss: 5.26238489151001\r\nStep 250, loss: 5.1873626708984375\r\nStep 251, loss: 4.785111427307129\r\nStep 252, loss: 4.715816020965576\r\nStep 253, loss: 4.326463222503662\r\nStep 254, loss: 4.268327236175537\r\nStep 255, loss: 4.240422248840332\r\nStep 256, loss: 4.236913681030273\r\nStep 257, loss: 4.235619068145752\r\nStep 258, loss: 4.274286270141602\r\nStep 259, loss: 4.561218738555908\r\nStep 260, loss: 4.603343963623047\r\nStep 261, loss: 4.54098653793335\r\nStep 262, loss: 4.4940924644470215\r\nStep 263, loss: 4.4457502365112305\r\nStep 264, loss: 4.321335792541504\r\nStep 265, loss: 4.4671101570129395\r\nStep 266, loss: 4.381819725036621\r\nStep 267, loss: 4.3429789543151855\r\nStep 268, loss: 4.431487083435059\r\nStep 269, loss: 4.370826244354248\r\nStep 270, loss: 4.327877044677734\r\nStep 271, loss: 4.275437831878662\r\nStep 272, loss: 4.217920303344727\r\nStep 273, loss: 4.479426860809326\r\nStep 274, loss: 4.487495422363281\r\nStep 275, loss: 4.205310344696045\r\nStep 276, loss: 4.191920280456543\r\nStep 277, loss: 4.264395236968994\r\nStep 278, loss: 4.257936954498291\r\nStep 279, loss: 4.427878379821777\r\nStep 280, loss: 4.2576823234558105\r\nStep 281, loss: 4.175642967224121\r\nStep 282, loss: 4.156636714935303\r\nStep 283, loss: 4.310987949371338\r\nStep 284, loss: 4.223696708679199\r\nStep 285, loss: 4.140011787414551\r\nStep 286, loss: 4.132030487060547\r\nStep 287, loss: 4.112862586975098\r\nStep 288, loss: 4.1739182472229\r\nStep 289, loss: 4.174359321594238\r\nStep 290, loss: 4.232516288757324\r\nStep 291, loss: 4.336156368255615\r\nStep 292, loss: 4.39429235458374\r\nStep 293, loss: 4.353558540344238\r\nStep 294, loss: 4.170259952545166\r\nStep 295, loss: 4.129907131195068\r\nStep 296, loss: 4.286856174468994\r\nStep 297, loss: 4.10078763961792\r\nStep 298, loss: 4.061574935913086\r\nStep 299, loss: 3.972627639770508\r\nStep 300, loss: 4.008823871612549\r\nStep 301, loss: 4.047136306762695\r\nStep 302, loss: 4.028026580810547\r\nStep 303, loss: 3.97763991355896\r\nStep 304, loss: 4.047784328460693\r\nStep 305, loss: 3.9618794918060303\r\nStep 306, loss: 3.9945099353790283\r\nStep 307, loss: 4.017402172088623\r\nStep 308, loss: 3.9693164825439453\r\nStep 309, loss: 3.9679670333862305\r\nStep 310, loss: 3.9535627365112305\r\nStep 311, loss: 3.9334938526153564\r\nStep 312, loss: 3.975756883621216\r\nStep 313, loss: 3.96964430809021\r\nStep 314, loss: 3.889720916748047\r\nStep 315, loss: 3.8499557971954346\r\nStep 316, loss: 3.8592917919158936\r\nStep 317, loss: 3.8856468200683594\r\nStep 318, loss: 3.9554975032806396\r\nStep 319, loss: 3.872201681137085\r\nStep 320, loss: 3.853358268737793\r\nStep 321, loss: 3.8328702449798584\r\nStep 322, loss: 3.8372671604156494\r\nStep 323, loss: 3.823647975921631\r\nStep 324, loss: 3.8535239696502686\r\nStep 325, loss: 3.8253018856048584\r\nStep 326, loss: 3.7945735454559326\r\nStep 327, loss: 3.784423828125\r\nStep 328, loss: 3.78865647315979\r\nStep 329, loss: 3.7692291736602783\r\nStep 330, loss: 3.8148951530456543\r\nStep 331, loss: 3.8439924716949463\r\nStep 332, loss: 3.8190317153930664\r\nStep 333, loss: 3.7166974544525146\r\nStep 334, loss: 3.7122080326080322\r\nStep 335, loss: 3.7320024967193604\r\nStep 336, loss: 3.7224502563476562\r\nStep 337, loss: 3.746755838394165\r\nStep 338, loss: 3.808483600616455\r\nStep 339, loss: 3.7154974937438965\r\nStep 340, loss: 3.6312057971954346\r\nStep 341, loss: 3.6377363204956055\r\nStep 342, loss: 3.605238676071167\r\nStep 343, loss: 3.5857272148132324\r\nStep 344, loss: 3.5967864990234375\r\nStep 345, loss: 3.6781888008117676\r\nStep 346, loss: 3.6223700046539307\r\nStep 347, loss: 3.592221736907959\r\nStep 348, loss: 3.6423370838165283\r\nStep 349, loss: 3.6522772312164307\r\nStep 350, loss: 3.5942366123199463\r\nStep 351, loss: 3.5495307445526123\r\nStep 352, loss: 3.5418012142181396\r\nStep 353, loss: 3.703362226486206\r\nStep 354, loss: 3.5873258113861084\r\nStep 355, loss: 3.5269298553466797\r\nStep 356, loss: 3.4913136959075928\r\nStep 357, loss: 3.468705415725708\r\nStep 358, loss: 3.4383411407470703\r\nStep 359, loss: 3.4752721786499023\r\nStep 360, loss: 3.5524861812591553\r\nStep 361, loss: 3.550297498703003\r\nStep 362, loss: 3.519166946411133\r\nStep 363, loss: 3.4665334224700928\r\nStep 364, loss: 3.4734697341918945\r\nStep 365, loss: 3.4336705207824707\r\nStep 366, loss: 3.435974597930908\r\nStep 367, loss: 3.418926477432251\r\nStep 368, loss: 3.404459238052368\r\nStep 369, loss: 3.4348771572113037\r\nStep 370, loss: 3.396491050720215\r\nStep 371, loss: 3.3846092224121094\r\nStep 372, loss: 3.3657338619232178\r\nStep 373, loss: 3.4035909175872803\r\nStep 374, loss: 3.429360866546631\r\nStep 375, loss: 3.3553566932678223\r\nStep 376, loss: 3.338202714920044\r\nStep 377, loss: 3.323160409927368\r\nStep 378, loss: 3.314152956008911\r\nStep 379, loss: 3.3369483947753906\r\nStep 380, loss: 3.3325724601745605\r\nStep 381, loss: 3.3309035301208496\r\nStep 382, loss: 3.3175337314605713\r\nStep 383, loss: 3.280341148376465\r\nStep 384, loss: 3.257169008255005\r\nStep 385, loss: 3.2781755924224854\r\nStep 386, loss: 3.2503669261932373\r\nStep 387, loss: 3.223836898803711\r\nStep 388, loss: 3.2086308002471924\r\nStep 389, loss: 3.1995737552642822\r\nStep 390, loss: 3.2221310138702393\r\nStep 391, loss: 3.3142306804656982\r\nStep 392, loss: 3.2467944622039795\r\nStep 393, loss: 3.208667755126953\r\nStep 394, loss: 3.2665445804595947\r\nStep 395, loss: 3.1671597957611084\r\nStep 396, loss: 3.182051181793213\r\nStep 397, loss: 3.2156248092651367\r\nStep 398, loss: 3.240788698196411\r\nStep 399, loss: 3.2169623374938965\r\nStep 400, loss: 3.2174761295318604\r\nStep 401, loss: 3.1909642219543457\r\nStep 402, loss: 3.172123670578003\r\nStep 403, loss: 3.1112565994262695\r\nStep 404, loss: 3.149679660797119\r\nStep 405, loss: 3.1837329864501953\r\nStep 406, loss: 3.233766794204712\r\nStep 407, loss: 3.1735355854034424\r\nStep 408, loss: 3.1419215202331543\r\nStep 409, loss: 3.0836498737335205\r\nStep 410, loss: 3.0581068992614746\r\nStep 411, loss: 3.0390405654907227\r\nStep 412, loss: 3.041393280029297\r\nStep 413, loss: 3.030224561691284\r\nStep 414, loss: 3.0158557891845703\r\nStep 415, loss: 3.007946491241455\r\nStep 416, loss: 3.0150701999664307\r\nStep 417, loss: 2.991776466369629\r\nStep 418, loss: 2.989900827407837\r\nStep 419, loss: 3.020813465118408\r\nStep 420, loss: 3.048997402191162\r\nStep 421, loss: 3.0070793628692627\r\nStep 422, loss: 3.02970290184021\r\nStep 423, loss: 3.022357940673828\r\nStep 424, loss: 2.9924750328063965\r\nStep 425, loss: 2.975386142730713\r\nStep 426, loss: 2.933770179748535\r\nStep 427, loss: 2.918210744857788\r\nStep 428, loss: 2.9129858016967773\r\nStep 429, loss: 2.9357681274414062\r\nStep 430, loss: 2.900171995162964\r\nStep 431, loss: 2.868572950363159\r\nStep 432, loss: 2.8696093559265137\r\nStep 433, loss: 2.8585011959075928\r\nStep 434, loss: 2.8542368412017822\r\nStep 435, loss: 2.8781497478485107\r\nStep 436, loss: 2.929823160171509\r\nStep 437, loss: 2.9732887744903564\r\nStep 438, loss: 2.942631959915161\r\nStep 439, loss: 2.881298542022705\r\nStep 440, loss: 2.8917765617370605\r\nStep 441, loss: 2.8619256019592285\r\nStep 442, loss: 2.8414323329925537\r\nStep 443, loss: 2.856564521789551\r\nStep 444, loss: 2.803628444671631\r\nStep 445, loss: 2.8273823261260986\r\nStep 446, loss: 2.8048508167266846\r\nStep 447, loss: 2.784576177597046\r\nStep 448, loss: 2.7716739177703857\r\nStep 449, loss: 2.831552505493164\r\nStep 450, loss: 2.8269503116607666\r\nStep 451, loss: 2.801234483718872\r\nStep 452, loss: 2.7844390869140625\r\nStep 453, loss: 2.823249578475952\r\nStep 454, loss: 2.7646706104278564\r\nStep 455, loss: 2.723184108734131\r\nStep 456, loss: 2.7008702754974365\r\nStep 457, loss: 2.6986124515533447\r\nStep 458, loss: 2.6883809566497803\r\nStep 459, loss: 2.6711626052856445\r\nStep 460, loss: 2.713655710220337\r\nStep 461, loss: 2.7099480628967285\r\nStep 462, loss: 2.704972267150879\r\nStep 463, loss: 2.7037296295166016\r\nStep 464, loss: 2.6825311183929443\r\nStep 465, loss: 2.6992881298065186\r\nStep 466, loss: 2.6633429527282715\r\nStep 467, loss: 2.6517701148986816\r\nStep 468, loss: 2.638716220855713\r\nStep 469, loss: 2.6271300315856934\r\nStep 470, loss: 2.6783318519592285\r\nStep 471, loss: 2.633916139602661\r\nStep 472, loss: 2.608354091644287\r\nStep 473, loss: 2.6293163299560547\r\nStep 474, loss: 2.5793991088867188\r\nStep 475, loss: 2.5533437728881836\r\n",,terminal_output +3965,10235414,"TERMINAL",0,0,"5055815",,terminal_output +3966,10236473,"TERMINAL",0,0,"166926",,terminal_output +3967,10237477,"TERMINAL",0,0,"2774037",,terminal_output +3968,10238517,"TERMINAL",0,0,"388148",,terminal_output +3969,10239537,"TERMINAL",0,0,"499259",,terminal_output +3970,10240671,"TERMINAL",0,0,"540403610",,terminal_output +3971,10240686,"TERMINAL",0,0,"Step 476, loss: 2.5922770500183105\r\nStep 477, loss: 2.5859947204589844\r\nStep 478, loss: 2.5449955463409424\r\nStep 479, loss: 2.5689854621887207\r\nStep 480, loss: 2.5318350791931152\r\nStep 481, loss: 2.5170466899871826\r\nStep 482, loss: 2.51200795173645\r\nStep 483, loss: 2.5405194759368896\r\nStep 484, loss: 2.536344528198242\r\nStep 485, loss: 2.5559394359588623\r\nStep 486, loss: 2.5291733741760254\r\nStep 487, loss: 2.4909286499023438\r\nStep 488, loss: 2.5190250873565674\r\nStep 489, loss: 2.4980108737945557\r\nStep 490, loss: 2.4472930431365967\r\nStep 491, loss: 2.470956325531006\r\nStep 492, loss: 2.4470112323760986\r\nStep 493, loss: 2.4093196392059326\r\nStep 494, loss: 2.4876291751861572\r\nStep 495, loss: 2.466196298599243\r\nStep 496, loss: 2.4645817279815674\r\nStep 497, loss: 2.4705545902252197\r\nStep 498, loss: 2.4379940032958984\r\nStep 499, loss: 2.4188151359558105\r\nStep 500, loss: 2.439887046813965\r\nStep 501, loss: 2.415673017501831\r\nStep 502, loss: 2.362355947494507\r\nStep 503, loss: 2.360138177871704\r\nStep 504, loss: 2.363220691680908\r\nStep 505, loss: 2.3614871501922607\r\nStep 506, loss: 2.344993829727173\r\nStep 507, loss: 2.3374128341674805\r\nStep 508, loss: 2.3545124530792236\r\nStep 509, loss: 2.3491435050964355\r\nStep 510, loss: 2.339677095413208\r\nStep 511, loss: 2.324512243270874\r\nStep 512, loss: 2.3097379207611084\r\nStep 513, loss: 2.317112445831299\r\nStep 514, loss: 2.3556830883026123\r\nStep 515, loss: 2.318377733230591\r\nStep 516, loss: 2.29813814163208\r\nStep 517, loss: 2.2969565391540527\r\nStep 518, loss: 2.277107000350952\r\nStep 519, loss: 2.3069605827331543\r\nStep 520, loss: 2.310940742492676\r\nStep 521, loss: 2.2752037048339844\r\nStep 522, loss: 2.270472526550293\r\nStep 523, loss: 2.2575714588165283\r\nStep 524, loss: 2.2906904220581055\r\nStep 525, loss: 2.2530734539031982\r\nStep 526, loss: 2.222980499267578\r\nStep 527, loss: 2.2204065322875977\r\nStep 528, loss: 2.2236790657043457\r\nStep 529, loss: 2.1973726749420166\r\nStep 530, loss: 2.181072473526001\r\nStep 531, loss: 2.232959032058716\r\nStep 532, loss: 2.216320514678955\r\nStep 533, loss: 2.199305772781372\r\nStep 534, loss: 2.182588815689087\r\nStep 535, loss: 2.168705701828003\r\nStep 536, loss: 2.1546521186828613\r\nStep 537, loss: 2.1420719623565674\r\nStep 538, loss: 2.1542654037475586\r\nStep 539, loss: 2.1295528411865234\r\nStep 540, loss: 2.1076786518096924\r\nStep 541, loss: 2.1135811805725098\r\nStep 542, loss: 2.087582588195801\r\nStep 543, loss: 2.116379976272583\r\nStep 544, loss: 2.0895705223083496\r\nStep 545, loss: 2.099026679992676\r\nStep 546, loss: 2.058218240737915\r\nStep 547, loss: 2.046268939971924\r\nStep 548, loss: 2.0339033603668213\r\nStep 549, loss: 2.0074844360351562\r\nStep 550, loss: 1.9966540336608887\r\nStep 551, loss: 1.990099310874939\r\nStep 552, loss: 2.0043985843658447\r\nStep 553, loss: 2.010704517364502\r\nStep 554, loss: 2.0291848182678223\r\nStep 555, loss: 2.0369608402252197\r\nStep 556, loss: 2.0173635482788086\r\nStep 557, loss: 1.9921727180480957\r\nStep 558, loss: 1.9518043994903564\r\nStep 559, loss: 1.9438753128051758\r\nStep 560, loss: 1.9416176080703735\r\nStep 561, loss: 1.9372470378875732\r\nStep 562, loss: 1.9543579816818237\r\nStep 563, loss: 1.9718092679977417\r\nStep 564, loss: 1.9605412483215332\r\nStep 565, loss: 1.9306827783584595\r\nStep 566, loss: 1.9324830770492554\r\nStep 567, loss: 1.8972344398498535\r\nStep 568, loss: 1.8605456352233887\r\nStep 569, loss: 1.8721150159835815\r\nStep 570, loss: 1.8564603328704834\r\nStep 571, loss: 1.8354642391204834\r\nStep 572, loss: 1.8508400917053223\r\nStep 573, loss: 1.8212889432907104\r\nStep 574, loss: 1.8120074272155762\r\nStep 575, loss: 1.7969613075256348\r\nStep 576, loss: 1.8112797737121582\r\nStep 577, loss: 1.8248268365859985\r\nStep 578, loss: 1.7854175567626953\r\nStep 579, loss: 1.8023117780685425\r\nStep 580, loss: 1.7691149711608887\r\nStep 581, loss: 1.7661339044570923\r\nStep 582, loss: 1.7452317476272583\r\nStep 583, loss: 1.7968330383300781\r\nStep 584, loss: 1.7628419399261475\r\nStep 585, loss: 1.7360541820526123\r\nStep 586, loss: 1.7456283569335938\r\nStep 587, loss: 1.748909592628479\r\nStep 588, loss: 1.7458723783493042\r\nStep 589, loss: 1.7367409467697144\r\nStep 590, loss: 1.7192716598510742\r\nStep 591, loss: 1.7170348167419434\r\nStep 592, loss: 1.7020292282104492\r\nStep 593, loss: 1.6742652654647827\r\nStep 594, loss: 1.6958400011062622\r\nStep 595, loss: 1.6533998250961304\r\nStep 596, loss: 1.671288013458252\r\nStep 597, loss: 1.6642144918441772\r\nStep 598, loss: 1.6314775943756104\r\nStep 599, loss: 1.6347264051437378\r\nStep 600, loss: 1.6106704473495483\r\nStep 601, loss: 1.611275553703308\r\nStep 602, loss: 1.6072139739990234\r\nStep 603, loss: 1.6027125120162964\r\nStep 604, loss: 1.5692474842071533\r\nStep 605, loss: 1.5731924772262573\r\nStep 606, loss: 1.6050331592559814\r\nStep 607, loss: 1.5819165706634521\r\nStep 608, loss: 1.5690124034881592\r\nStep 609, loss: 1.5902926921844482\r\nStep 610, loss: 1.5882354974746704\r\nStep 611, loss: 1.6092051267623901\r\nStep 612, loss: 1.5884971618652344\r\nStep 613, loss: 1.5382025241851807\r\nStep 614, loss: 1.5302447080612183\r\nStep 615, loss: 1.4934755563735962\r\nStep 616, loss: 1.4922901391983032\r\nStep 617, loss: 1.4783998727798462\r\nStep 618, loss: 1.4760500192642212\r\nStep 619, loss: 1.5128107070922852\r\nStep 620, loss: 1.4944809675216675\r\nStep 621, loss: 1.4848226308822632\r\nStep 622, loss: 1.4362648725509644\r\nStep 623, loss: 1.4164607524871826\r\nStep 624, loss: 1.4163892269134521\r\nStep 625, loss: 1.4048373699188232\r\nStep 626, loss: 1.3994402885437012\r\nStep 627, loss: 1.4372225999832153\r\nStep 628, loss: 1.447267770767212\r\nStep 629, loss: 1.4274860620498657\r\nStep 630, loss: 1.4226666688919067\r\nStep 631, loss: 1.4106159210205078\r\nStep 632, loss: 1.3884767293930054\r\nStep 633, loss: 1.348196029663086\r\nStep 634, loss: 1.329584002494812\r\nStep 635, loss: 1.3296535015106201\r\nStep 636, loss: 1.3550177812576294\r\nStep 637, loss: 1.3371622562408447\r\nStep 638, loss: 1.3128892183303833\r\nStep 639, loss: 1.3009154796600342\r\nStep 640, loss: 1.284874677658081\r\nStep 641, loss: 1.288753867149353\r\nStep 642, loss: 1.276092529296875\r\nStep 643, loss: 1.2975842952728271\r\nStep 644, loss: 1.2805390357971191\r\nStep 645, loss: 1.242788553237915\r\nStep 646, loss: 1.2235305309295654\r\nStep 647, loss: 1.2258915901184082\r\nStep 648, loss: 1.2279022932052612\r\nStep 649, loss: 1.2076776027679443\r\nStep 650, loss: 1.202594518661499\r\nStep 651, loss: 1.1962299346923828\r\nStep 652, loss: 1.2255059480667114\r\nStep 653, loss: 1.2125623226165771\r\nStep 654, loss: 1.174932837486267\r\nStep 655, loss: 1.1622493267059326\r\nStep 656, loss: 1.1643519401550293\r\nStep 657, loss: 1.1593643426895142\r\nStep 658, loss: 1.1500595808029175\r\nStep 659, loss: 1.1415513753890991\r\nStep 660, loss: 1.1277979612350464\r\nStep 661, loss: 1.1046319007873535\r\nStep 662, loss: 1.0972893238067627\r\nStep 663, loss: 1.0772416591644287\r\nStep 664, loss: 1.0841724872589111\r\nStep 665, loss: 1.0654568672180176\r\nStep 666, loss: 1.063889980316162\r\nStep 667, loss: 1.0538586378097534\r\nStep 668, loss: 1.0437517166137695\r\nStep 669, loss: 1.02790105342865\r\nStep 670, loss: 1.0222210884094238\r\nStep 671, loss: 1.0471938848495483\r\nStep 672, loss: 1.0333141088485718\r\nStep 673, loss: 1.0188857316970825\r\nStep 674, loss: 0.9948674440383911\r\nStep 675, loss: 0.9911745190620422\r\nStep 676, loss: 0.9932522773742676\r\nStep 677, loss: 0.9801518321037292\r\nStep 678, loss: 0.9683449864387512\r\nStep 679, loss: 0.9637853503227234\r\nStep 680, loss: 0.9505601525306702\r\nStep 681, loss: 0.9507589936256409\r\nStep 682, loss: 0.9420446753501892\r\nStep 683, loss: 0.9219518899917603\r\nStep 684, loss: 0.9906125068664551\r\nStep 685, loss: 0.9951944351196289\r\nStep 686, loss: 1.0090900659561157\r\nStep 687, loss: 0.9764349460601807\r\nStep 688, loss: 0.9089835286140442\r\nStep 689, loss: 0.9163581728935242\r\nStep 690, loss: 0.9068987369537354\r\nStep 691, loss: 0.8822569251060486\r\nStep 692, loss: 0.890515148639679\r\nStep 693, loss: 0.9034362435340881\r\nStep 694, loss: 0.9005548357963562\r\nStep 695, loss: 0.8736074566841125\r\nStep 696, loss: 0.853632926940918\r\nStep 697, loss: 0.8574893474578857\r\nStep 698, loss: 0.8504339456558228\r\nStep 699, loss: 0.8269093036651611\r\nStep 700, loss: 0.7931914925575256\r\nStep 701, loss: 0.7824216485023499\r\nStep 702, loss: 0.7820708751678467\r\nStep 703, loss: 0.7821744680404663\r\nStep 704, loss: 0.7965542078018188\r\nStep 705, loss: 0.8186914920806885\r\nStep 706, loss: 0.8268790245056152\r\nStep 707, loss: 0.800636351108551\r\nStep 708, loss: 0.7833888530731201\r\nStep 709, loss: 0.7581707239151001\r\nStep 710, loss: 0.7467963099479675\r\n",,terminal_output +3972,10241693,"TERMINAL",0,0,"611471",,terminal_output +3973,10242637,"TERMINAL",0,0,"722582",,terminal_output +3974,10243743,"TERMINAL",0,0,"833693",,terminal_output +3975,10244697,"TERMINAL",0,0,"9447104",,terminal_output +3976,10245783,"TERMINAL",0,0,"1:0055815",,terminal_output +3977,10245795,"TERMINAL",0,0,"Step 711, loss: 0.7404515743255615\r\nStep 712, loss: 0.7433110475540161\r\nStep 713, loss: 0.7411110997200012\r\nStep 714, loss: 0.7319995164871216\r\nStep 715, loss: 0.7087826132774353\r\nStep 716, loss: 0.6854697465896606\r\nStep 717, loss: 0.6847699880599976\r\nStep 718, loss: 0.6785175800323486\r\nStep 719, loss: 0.6772394180297852\r\nStep 720, loss: 0.6731926798820496\r\nStep 721, loss: 0.6580355167388916\r\nStep 722, loss: 0.6638725996017456\r\nStep 723, loss: 0.6652817130088806\r\nStep 724, loss: 0.6525187492370605\r\nStep 725, loss: 0.6395428776741028\r\nStep 726, loss: 0.6310827136039734\r\nStep 727, loss: 0.6112037897109985\r\nStep 728, loss: 0.6103946566581726\r\nStep 729, loss: 0.5957101583480835\r\nStep 730, loss: 0.6150814890861511\r\nStep 731, loss: 0.6007071733474731\r\nStep 732, loss: 0.5969282984733582\r\nStep 733, loss: 0.587446928024292\r\nStep 734, loss: 0.6061014533042908\r\nStep 735, loss: 0.5837782025337219\r\nStep 736, loss: 0.5819242000579834\r\nStep 737, loss: 0.5989861488342285\r\nStep 738, loss: 0.5900041460990906\r\nStep 739, loss: 0.5551652312278748\r\nStep 740, loss: 0.5609185099601746\r\nStep 741, loss: 0.5693771839141846\r\nStep 742, loss: 0.5427417755126953\r\nStep 743, loss: 0.5224369168281555\r\nStep 744, loss: 0.5191894173622131\r\nStep 745, loss: 0.5475836992263794\r\nStep 746, loss: 0.5312250852584839\r\nStep 747, loss: 0.5301179885864258\r\nStep 748, loss: 0.5154416561126709\r\nStep 749, loss: 0.5178009867668152\r\nStep 750, loss: 0.4998522698879242\r\nStep 751, loss: 0.4668436348438263\r\nStep 752, loss: 0.4647647738456726\r\nStep 753, loss: 0.4573141932487488\r\nStep 754, loss: 0.48311764001846313\r\nStep 755, loss: 0.4741423428058624\r\nStep 756, loss: 0.4833276569843292\r\nStep 757, loss: 0.46180570125579834\r\nStep 758, loss: 0.43895766139030457\r\nStep 759, loss: 0.4341837465763092\r\nStep 760, loss: 0.4884434640407562\r\nStep 761, loss: 0.4717116355895996\r\nStep 762, loss: 0.48386839032173157\r\nStep 763, loss: 0.47023633122444153\r\nStep 764, loss: 0.4098624289035797\r\nStep 765, loss: 0.4346941113471985\r\nStep 766, loss: 0.41230136156082153\r\nStep 767, loss: 0.40493646264076233\r\nStep 768, loss: 0.41751155257225037\r\nStep 769, loss: 0.4120519459247589\r\nStep 770, loss: 0.3863734304904938\r\nStep 771, loss: 0.3697560131549835\r\nStep 772, loss: 0.3637886643409729\r\nStep 773, loss: 0.3797786235809326\r\nStep 774, loss: 0.3733832836151123\r\nStep 775, loss: 0.3808313012123108\r\nStep 776, loss: 0.3563409447669983\r\nStep 777, loss: 0.37619632482528687\r\nStep 778, loss: 0.3615008294582367\r\nStep 779, loss: 0.3779735565185547\r\nStep 780, loss: 0.35746803879737854\r\nStep 781, loss: 0.41540756821632385\r\nStep 782, loss: 0.35794568061828613\r\nStep 783, loss: 0.3643234670162201\r\nStep 784, loss: 0.3403375446796417\r\nStep 785, loss: 0.32716643810272217\r\nStep 786, loss: 0.3108535408973694\r\nStep 787, loss: 0.3228452503681183\r\nStep 788, loss: 0.3172426223754883\r\nStep 789, loss: 0.31375929713249207\r\nStep 790, loss: 0.3084295988082886\r\nStep 791, loss: 0.2883882522583008\r\nStep 792, loss: 0.28428158164024353\r\nStep 793, loss: 0.27955952286720276\r\nStep 794, loss: 0.2931940257549286\r\nStep 795, loss: 0.2868936061859131\r\nStep 796, loss: 0.28269630670547485\r\nStep 797, loss: 0.26792994141578674\r\nStep 798, loss: 0.26364561915397644\r\nStep 799, loss: 0.2601555585861206\r\nStep 800, loss: 0.25530681014060974\r\nStep 801, loss: 0.2568022608757019\r\nStep 802, loss: 0.2556838095188141\r\nStep 803, loss: 0.2518971562385559\r\nStep 804, loss: 0.2570902109146118\r\nStep 805, loss: 0.2695840299129486\r\nStep 806, loss: 0.2491655945777893\r\nStep 807, loss: 0.24772363901138306\r\nStep 808, loss: 0.24061834812164307\r\nStep 809, loss: 0.2747490406036377\r\nStep 810, loss: 0.24327321350574493\r\nStep 811, loss: 0.23902109265327454\r\nStep 812, loss: 0.2294478863477707\r\nStep 813, loss: 0.21354219317436218\r\nStep 814, loss: 0.2098231315612793\r\nStep 815, loss: 0.20612794160842896\r\nStep 816, loss: 0.20908185839653015\r\nStep 817, loss: 0.20280671119689941\r\nStep 818, loss: 0.21490372717380524\r\nStep 819, loss: 0.20761024951934814\r\nStep 820, loss: 0.2015463411808014\r\nStep 821, loss: 0.21196702122688293\r\nStep 822, loss: 0.20794734358787537\r\nStep 823, loss: 0.2260844111442566\r\nStep 824, loss: 0.1830148696899414\r\nStep 825, loss: 0.17962349951267242\r\nStep 826, loss: 0.17648640275001526\r\nStep 827, loss: 0.23497800529003143\r\nStep 828, loss: 0.19614319503307343\r\nStep 829, loss: 0.17477500438690186\r\nStep 830, loss: 0.17617879807949066\r\nStep 831, loss: 0.17500044405460358\r\nStep 832, loss: 0.17124944925308228\r\nStep 833, loss: 0.16205240786075592\r\nStep 834, loss: 0.1585911214351654\r\nStep 835, loss: 0.1726345717906952\r\nStep 836, loss: 0.15381154417991638\r\nStep 837, loss: 0.1520201563835144\r\nStep 838, loss: 0.1747182011604309\r\nStep 839, loss: 0.17547009885311127\r\nStep 840, loss: 0.147268608212471\r\nStep 841, loss: 0.14504201710224152\r\nStep 842, loss: 0.1689002513885498\r\nStep 843, loss: 0.15494778752326965\r\nStep 844, loss: 0.16339769959449768\r\nStep 845, loss: 0.15063931047916412\r\nStep 846, loss: 0.14036855101585388\r\nStep 847, loss: 0.15067587792873383\r\nStep 848, loss: 0.13696813583374023\r\nStep 849, loss: 0.13474787771701813\r\nStep 850, loss: 0.12885133922100067\r\nStep 851, loss: 0.12684619426727295\r\nStep 852, loss: 0.125933438539505\r\nStep 853, loss: 0.12238112092018127\r\nStep 854, loss: 0.1375637948513031\r\nStep 855, loss: 0.1228318065404892\r\nStep 856, loss: 0.1197589784860611\r\nStep 857, loss: 0.11805064231157303\r\nStep 858, loss: 0.1131906509399414\r\nStep 859, loss: 0.12476013600826263\r\nStep 860, loss: 0.11117453873157501\r\nStep 861, loss: 0.11001545935869217\r\nStep 862, loss: 0.1079629585146904\r\nStep 863, loss: 0.1057603657245636\r\nStep 864, loss: 0.10417944192886353\r\nStep 865, loss: 0.10193388164043427\r\nStep 866, loss: 0.1230032742023468\r\nStep 867, loss: 0.1299131065607071\r\nStep 868, loss: 0.10914790630340576\r\nStep 869, loss: 0.13612103462219238\r\nStep 870, loss: 0.09698144346475601\r\nStep 871, loss: 0.09842216968536377\r\nStep 872, loss: 0.09724414348602295\r\nStep 873, loss: 0.09597203880548477\r\nStep 874, loss: 0.10698544979095459\r\nStep 875, loss: 0.08956354856491089\r\nStep 876, loss: 0.10197731107473373\r\nStep 877, loss: 0.09931118041276932\r\nStep 878, loss: 0.08819886296987534\r\nStep 879, loss: 0.08487988263368607\r\nStep 880, loss: 0.08451003581285477\r\nStep 881, loss: 0.08281484246253967\r\nStep 882, loss: 0.08071973919868469\r\nStep 883, loss: 0.079765185713768\r\nStep 884, loss: 0.07840663194656372\r\nStep 885, loss: 0.08089761435985565\r\nStep 886, loss: 0.08767294883728027\r\nStep 887, loss: 0.0762748047709465\r\nStep 888, loss: 0.07508894801139832\r\nStep 889, loss: 0.08493935316801071\r\nStep 890, loss: 0.07465986162424088\r\nStep 891, loss: 0.07024923712015152\r\nStep 892, loss: 0.07924096286296844\r\nStep 893, loss: 0.06897784769535065\r\nStep 894, loss: 0.07554306834936142\r\nStep 895, loss: 0.06646846979856491\r\nStep 896, loss: 0.06510603427886963\r\nStep 897, loss: 0.06381846219301224\r\nStep 898, loss: 0.07052825391292572\r\nStep 899, loss: 0.06569892913103104\r\nStep 900, loss: 0.06331194937229156\r\nStep 901, loss: 0.07115252315998077\r\nStep 902, loss: 0.06186007335782051\r\nStep 903, loss: 0.0588848851621151\r\nStep 904, loss: 0.06524310261011124\r\nStep 905, loss: 0.058394331485033035\r\nStep 906, loss: 0.05692083388566971\r\nStep 907, loss: 0.05545900762081146\r\nStep 908, loss: 0.05351553112268448\r\nStep 909, loss: 0.052563972771167755\r\nStep 910, loss: 0.08552777767181396\r\nStep 911, loss: 0.0523286834359169\r\nStep 912, loss: 0.050864581018686295\r\nStep 913, loss: 0.050570327788591385\r\nStep 914, loss: 0.049219824373722076\r\nStep 915, loss: 0.05005386844277382\r\nStep 916, loss: 0.06505641341209412\r\nStep 917, loss: 0.04723892733454704\r\nStep 918, loss: 0.05927626043558121\r\nStep 919, loss: 0.04570925980806351\r\nStep 920, loss: 0.04605754092335701\r\nStep 921, loss: 0.05879546329379082\r\nStep 922, loss: 0.04731404036283493\r\nStep 923, loss: 0.044035013765096664\r\nStep 924, loss: 0.04678099974989891\r\nStep 925, loss: 0.04499737173318863\r\nStep 926, loss: 0.043447915464639664\r\nStep 927, loss: 0.042789217084646225\r\nStep 928, loss: 0.041794393211603165\r\nStep 929, loss: 0.04545453190803528\r\nStep 930, loss: 0.03886225074529648\r\nStep 931, loss: 0.03873983398079872\r\nStep 932, loss: 0.04008324071764946\r\nStep 933, loss: 0.03849207982420921\r\nStep 934, loss: 0.04502829164266586\r\nStep 935, loss: 0.03682100027799606\r\nStep 936, loss: 0.03566832095384598\r\nStep 937, loss: 0.0432451069355011\r\nStep 938, loss: 0.03421296551823616\r\nStep 939, loss: 0.03401118144392967\r\nStep 940, loss: 0.03317558392882347\r\n",,terminal_output +3978,10246763,"TERMINAL",0,0,"166926",,terminal_output +3979,10247837,"TERMINAL",0,0,"2775037",,terminal_output +3980,10248831,"TERMINAL",0,0,"388148",,terminal_output +3981,10249925,"TERMINAL",0,0,"499259",,terminal_output +3982,10250903,"TERMINAL",0,0,"550503620",,terminal_output +3983,10251930,"TERMINAL",0,0,"611471",,terminal_output +3984,10252549,"TERMINAL",0,0,"Step 941, loss: 0.03320300579071045\r\nStep 942, loss: 0.03180057182908058\r\nStep 943, loss: 0.03320383280515671\r\nStep 944, loss: 0.03229479491710663\r\nStep 945, loss: 0.03222789242863655\r\nStep 946, loss: 0.030682384967803955\r\nStep 947, loss: 0.03559429943561554\r\nStep 948, loss: 0.028923319652676582\r\nStep 949, loss: 0.028650464490056038\r\nStep 950, loss: 0.03714907169342041\r\nStep 951, loss: 0.02760033868253231\r\nStep 952, loss: 0.02723766677081585\r\nStep 953, loss: 0.02660478837788105\r\nStep 954, loss: 0.03299904242157936\r\nStep 955, loss: 0.02568550780415535\r\nStep 956, loss: 0.025251705199480057\r\nStep 957, loss: 0.02705927938222885\r\nStep 958, loss: 0.026003140956163406\r\nStep 959, loss: 0.02541077509522438\r\nStep 960, loss: 0.024666227400302887\r\nStep 961, loss: 0.024299774318933487\r\nStep 962, loss: 0.023684661835432053\r\nStep 963, loss: 0.023142116144299507\r\nStep 964, loss: 0.03462466225028038\r\nStep 965, loss: 0.022357970476150513\r\nStep 966, loss: 0.021566569805145264\r\nStep 967, loss: 0.021558649837970734\r\nStep 968, loss: 0.02114396169781685\r\nStep 969, loss: 0.02066788077354431\r\nStep 970, loss: 0.020398251712322235\r\nStep 971, loss: 0.019796844571828842\r\nStep 972, loss: 0.01953483372926712\r\nStep 973, loss: 0.019054366275668144\r\nStep 974, loss: 0.0266432948410511\r\nStep 975, loss: 0.018535075709223747\r\nStep 976, loss: 0.018331585451960564\r\nStep 977, loss: 0.01806262508034706\r\nStep 978, loss: 0.019588181748986244\r\nStep 979, loss: 0.019033480435609818\r\nStep 980, loss: 0.049851737916469574\r\nStep 981, loss: 0.021269120275974274\r\nStep 982, loss: 0.020556457340717316\r\nStep 983, loss: 0.02808617614209652\r\nStep 984, loss: 0.033234670758247375\r\nStep 985, loss: 0.018352577462792397\r\nStep 986, loss: 0.018823953345417976\r\nStep 987, loss: 0.0179374348372221\r\nStep 988, loss: 0.017573969438672066\r\nStep 989, loss: 0.017447618767619133\r\nStep 990, loss: 0.022464821115136147\r\nStep 991, loss: 0.016676437109708786\r\nStep 992, loss: 0.025236546993255615\r\nStep 993, loss: 0.016446389257907867\r\nStep 994, loss: 0.01631222851574421\r\nStep 995, loss: 0.05366140976548195\r\nStep 996, loss: 0.01618782803416252\r\nStep 997, loss: 0.016207970678806305\r\nStep 998, loss: 0.016014859080314636\r\nStep 999, loss: 0.022341212257742882\r\nSaved checkpoint at step 1000\r\nStep 1000, loss: 0.015828358009457588\r\nStep 1001, loss: 0.015525592491030693\r\nStep 1002, loss: 0.015379820950329304\r\nStep 1003, loss: 0.015287468209862709\r\nStep 1004, loss: 0.014796593226492405\r\nStep 1005, loss: 0.014535403810441494\r\nStep 1006, loss: 0.014204232022166252\r\nStep 1007, loss: 0.013924009166657925\r\nStep 1008, loss: 0.013743076473474503\r\nStep 1009, loss: 0.013362854719161987\r\nStep 1010, loss: 0.0131221329793334\r\nStep 1011, loss: 0.01710394211113453\r\nStep 1012, loss: 0.01271453034132719\r\nStep 1013, loss: 0.015354047529399395\r\nStep 1014, loss: 0.013232833705842495\r\nStep 1015, loss: 0.013061088509857655\r\nStep 1016, loss: 0.012092644348740578\r\nStep 1017, loss: 0.011986360885202885\r\nStep 1018, loss: 0.011641327291727066\r\nStep 1019, loss: 0.01154222059994936\r\nStep 1020, loss: 0.011337468400597572\r\nStep 1021, loss: 0.011102018877863884\r\nStep 1022, loss: 0.014655749313533306\r\nStep 1023, loss: 0.0108388876542449\r\nStep 1024, loss: 0.010713684372603893\r\nStep 1025, loss: 0.01053725928068161\r\nStep 1026, loss: 0.01206805370748043\r\nStep 1027, loss: 0.010255163535475731\r\nStep 1028, loss: 0.010039013810455799\r\nStep 1029, loss: 0.009861321188509464\r\nStep 1030, loss: 0.011578609235584736\r\nStep 1031, loss: 0.011051530949771404\r\nStep 1032, loss: 0.010593333281576633\r\nStep 1033, loss: 0.009481477551162243\r\nStep 1034, loss: 0.009368252009153366\r\nStep 1035, loss: 0.00993108469992876\r\nStep 1036, loss: 0.00904438178986311\r\nStep 1037, loss: 0.00940465647727251\r\nStep 1038, loss: 0.013418743386864662\r\nStep 1039, loss: 0.0087147681042552\r\nStep 1040, loss: 0.009092297405004501\r\nStep 1041, loss: 0.009092241525650024\r\nStep 1042, loss: 0.008814767934381962\r\nStep 1043, loss: 0.008687478490173817\r\nStep 1044, loss: 0.008133027702569962\r\nStep 1045, loss: 0.008978178724646568\r\nStep 1046, loss: 0.007871409878134727\r\nStep 1047, loss: 0.007661527954041958\r\nStep 1048, loss: 0.007559893187135458\r\nStep 1049, loss: 0.007376710884273052\r\nStep 1050, loss: 0.007237425539642572\r\nStep 1051, loss: 0.007097762543708086\r\nStep 1052, loss: 0.0069678304716944695\r\nStep 1053, loss: 0.006851419806480408\r\nStep 1054, loss: 0.006689512636512518\r\nStep 1055, loss: 0.007105756551027298\r\nStep 1056, loss: 0.00644281879067421\r\nStep 1057, loss: 0.0063878814689815044\r\nStep 1058, loss: 0.006251988932490349\r\nStep 1059, loss: 0.007358951959758997\r\nStep 1060, loss: 0.006174338050186634\r\nStep 1061, loss: 0.006082137580960989\r\nStep 1062, loss: 0.00928892195224762\r\nStep 1063, loss: 0.006338069681078196\r\nStep 1064, loss: 0.006141272373497486\r\nStep 1065, loss: 0.006075251381844282\r\nStep 1066, loss: 0.005937688518315554\r\nStep 1067, loss: 0.005612172186374664\r\nStep 1068, loss: 0.005531447473913431\r\nStep 1069, loss: 0.005409047473222017\r\nStep 1070, loss: 0.005233587697148323\r\nStep 1071, loss: 0.011071580462157726\r\nStep 1072, loss: 0.005308290012180805\r\nStep 1073, loss: 0.005375281907618046\r\nStep 1074, loss: 0.0053206742741167545\r\nStep 1075, loss: 0.00535355182364583\r\nStep 1076, loss: 0.005089277401566505\r\nStep 1077, loss: 0.009105080738663673\r\nStep 1078, loss: 0.012944189831614494\r\nStep 1079, loss: 0.005115482024848461\r\nStep 1080, loss: 0.012197226285934448\r\nStep 1081, loss: 0.005301341414451599\r\nStep 1082, loss: 0.005812445189803839\r\nStep 1083, loss: 0.005699486006051302\r\nStep 1084, loss: 0.005267654545605183\r\nStep 1085, loss: 0.0052531687542796135\r\nStep 1086, loss: 0.0052793314680457115\r\nStep 1087, loss: 0.005035040900111198\r\nStep 1088, loss: 0.004919357597827911\r\nStep 1089, loss: 0.0048035504296422005\r\nStep 1090, loss: 0.004771131090819836\r\nStep 1091, loss: 0.00470375269651413\r\nStep 1092, loss: 0.004584727808833122\r\nStep 1093, loss: 0.005627168342471123\r\nStep 1094, loss: 0.005246071144938469\r\nStep 1095, loss: 0.004434830509126186\r\nStep 1096, loss: 0.004363803658634424\r\nStep 1097, loss: 0.004353197757154703\r\nStep 1098, loss: 0.004257232882082462\r\nStep 1099, loss: 0.004120055120438337\r\nStep 1100, loss: 0.0040968325920403\r\nStep 1101, loss: 0.003925526514649391\r\nStep 1102, loss: 0.003822720842435956\r\nStep 1103, loss: 0.0038108674343675375\r\nStep 1104, loss: 0.0037715614307671785\r\nStep 1105, loss: 0.003723063273355365\r\nStep 1106, loss: 0.0035913344472646713\r\nStep 1107, loss: 0.0035269821528345346\r\nStep 1108, loss: 0.0035260014701634645\r\nStep 1109, loss: 0.003510719398036599\r\nStep 1110, loss: 0.003421252127736807\r\nStep 1111, loss: 0.0033146333880722523\r\nStep 1112, loss: 0.0032320264726877213\r\nStep 1113, loss: 0.003219448495656252\r\nStep 1114, loss: 0.03074469417333603\r\nStep 1115, loss: 0.003665517782792449\r\nStep 1116, loss: 0.007890637032687664\r\nStep 1117, loss: 0.0042461915872991085\r\nStep 1118, loss: 0.004197265952825546\r\nStep 1119, loss: 0.0046842326410114765\r\nStep 1120, loss: 0.004299357533454895\r\nStep 1121, loss: 0.00395174277946353\r\nStep 1122, loss: 0.0038931332528591156\r\nStep 1123, loss: 0.004009998869150877\r\nStep 1124, loss: 0.003884211415424943\r\nStep 1125, loss: 0.0037235484924167395\r\nStep 1126, loss: 0.0036960793659090996\r\nStep 1127, loss: 0.0036067869514226913\r\nStep 1128, loss: 0.003477462800219655\r\nStep 1129, loss: 0.0034106746315956116\r\nStep 1130, loss: 0.0031882720068097115\r\nStep 1131, loss: 0.0031897209119051695\r\nStep 1132, loss: 0.0031853539403527975\r\nStep 1133, loss: 0.0030727714765816927\r\nStep 1134, loss: 0.0030238451436161995\r\nStep 1135, loss: 0.006077311933040619\r\nStep 1136, loss: 0.0029569854959845543\r\nStep 1137, loss: 0.005402021575719118\r\nStep 1138, loss: 0.0034207834396511316\r\nStep 1139, loss: 0.003130064345896244\r\nStep 1140, loss: 0.0031656173523515463\r\nStep 1141, loss: 0.006086445413529873\r\nStep 1142, loss: 0.0028442353941500187\r\nStep 1143, loss: 0.003405526280403137\r\nStep 1144, loss: 0.00332318851724267\r\nStep 1145, loss: 0.060350678861141205\r\nStep 1146, loss: 0.003583916462957859\r\nStep 1147, loss: 0.004974186886101961\r\nStep 1148, loss: 0.006221821531653404\r\nStep 1149, loss: 0.004792613442987204\r\nStep 1150, loss: 0.0040451837703585625\r\nStep 1151, loss: 0.004145503044128418\r\nStep 1152, loss: 0.00437559699639678\r\nStep 1153, loss: 0.004304835572838783\r\nStep 1154, loss: 0.005956144537776709\r\nStep 1155, loss: 0.004972143564373255\r\nStep 1156, loss: 0.0035692083183676004\r\n",,terminal_output +3985,10252963,"TERMINAL",0,0,"722582",,terminal_output +3986,10254086,"TERMINAL",0,0,"833693",,terminal_output +3987,10255025,"TERMINAL",0,0,"9447204",,terminal_output +3988,10256131,"TERMINAL",0,0,"1055815",,terminal_output +3989,10257091,"TERMINAL",0,0,"166926",,terminal_output +3990,10257254,"TERMINAL",0,0,"Step 1157, loss: 0.009453811682760715\r\nStep 1158, loss: 0.004439024720340967\r\nStep 1159, loss: 0.004126052837818861\r\nStep 1160, loss: 0.003965871874243021\r\nStep 1161, loss: 0.005927540827542543\r\nStep 1162, loss: 0.003754551289603114\r\nStep 1163, loss: 0.0037275059148669243\r\nStep 1164, loss: 0.0034225410781800747\r\nStep 1165, loss: 0.003284421283751726\r\nStep 1166, loss: 0.0035541823599487543\r\nStep 1167, loss: 0.0033771460875868797\r\nStep 1168, loss: 0.003240420948714018\r\nStep 1169, loss: 0.003122822381556034\r\nStep 1170, loss: 0.0030949849169701338\r\nStep 1171, loss: 0.002929531503468752\r\nStep 1172, loss: 0.0028029694221913815\r\nStep 1173, loss: 0.005524154752492905\r\nStep 1174, loss: 0.002722891280427575\r\nStep 1175, loss: 0.0027829152531921864\r\nStep 1176, loss: 0.002800429705530405\r\nStep 1177, loss: 0.0025757974945008755\r\nStep 1178, loss: 0.0024661000352352858\r\nStep 1179, loss: 0.0024292331654578447\r\nStep 1180, loss: 0.0024115657433867455\r\nStep 1181, loss: 0.0023155694361776114\r\nStep 1182, loss: 0.002328607952222228\r\nStep 1183, loss: 0.002190664876252413\r\nStep 1184, loss: 0.0021650337148457766\r\nStep 1185, loss: 0.00207401136867702\r\nStep 1186, loss: 0.002069698879495263\r\nStep 1187, loss: 0.00199085078202188\r\nStep 1188, loss: 0.0019881976768374443\r\nStep 1189, loss: 0.001974540064111352\r\nStep 1190, loss: 0.0019028055248782039\r\nStep 1191, loss: 0.001830581109970808\r\nStep 1192, loss: 0.0018835781374946237\r\nStep 1193, loss: 0.0019056870369240642\r\nStep 1194, loss: 0.0017897700890898705\r\nStep 1195, loss: 0.001902200747281313\r\nStep 1196, loss: 0.0016318447887897491\r\nStep 1197, loss: 0.001792577444575727\r\nStep 1198, loss: 0.0016770199872553349\r\nStep 1199, loss: 0.0016742270672693849\r\nStep 1200, loss: 0.0015989940147846937\r\nStep 1201, loss: 0.00171887397300452\r\nStep 1202, loss: 0.0016591245075687766\r\nStep 1203, loss: 0.00161453103646636\r\nStep 1204, loss: 0.0015880254795774817\r\nStep 1205, loss: 0.0014996180543676019\r\nStep 1206, loss: 0.0015366656007245183\r\nStep 1207, loss: 0.0013832860859110951\r\nStep 1208, loss: 0.0013857854064553976\r\nStep 1209, loss: 0.0013087906409054995\r\nStep 1210, loss: 0.001365733565762639\r\nStep 1211, loss: 0.0012400042032822967\r\nStep 1212, loss: 0.0012889460194855928\r\nStep 1213, loss: 0.0011606643674895167\r\nStep 1214, loss: 0.0012047605123370886\r\nStep 1215, loss: 0.0011371920118108392\r\nStep 1216, loss: 0.0011665609199553728\r\nStep 1217, loss: 0.0012541580945253372\r\nStep 1218, loss: 0.00129297177772969\r\nStep 1219, loss: 0.0018056221306324005\r\nStep 1220, loss: 0.0012926810886710882\r\nStep 1221, loss: 0.0011666922364383936\r\nStep 1222, loss: 0.0013414009008556604\r\nStep 1223, loss: 0.0010808436200022697\r\nStep 1224, loss: 0.001166044850833714\r\nStep 1225, loss: 0.0010994296753779054\r\nStep 1226, loss: 0.0010204404825344682\r\nStep 1227, loss: 0.001092427410185337\r\nStep 1228, loss: 0.0009716851636767387\r\nStep 1229, loss: 0.0034809852950274944\r\nStep 1230, loss: 0.0010873322607949376\r\nStep 1231, loss: 0.0012494366383180022\r\nStep 1232, loss: 0.0012357048690319061\r\nStep 1233, loss: 0.0037054040003567934\r\nStep 1234, loss: 0.001144198700785637\r\nStep 1235, loss: 0.001375085092149675\r\nStep 1236, loss: 0.0014252291293814778\r\nStep 1237, loss: 0.0012052410747855902\r\nStep 1238, loss: 0.001324283191934228\r\nStep 1239, loss: 0.0011412387248128653\r\nStep 1240, loss: 0.0011270769173279405\r\nStep 1241, loss: 0.0012745909625664353\r\nStep 1242, loss: 0.0009474050020799041\r\nStep 1243, loss: 0.0010649763280525804\r\nStep 1244, loss: 0.001201355131343007\r\nStep 1245, loss: 0.0009781140834093094\r\nStep 1246, loss: 0.0010777043644338846\r\nStep 1247, loss: 0.0009470614604651928\r\nStep 1248, loss: 0.0010520732030272484\r\nStep 1249, loss: 0.000895460310857743\r\nStep 1250, loss: 0.0010274804662913084\r\nStep 1251, loss: 0.000828832620754838\r\nStep 1252, loss: 0.0010634834179654717\r\nStep 1253, loss: 0.0007716504042036831\r\nStep 1254, loss: 0.001093612750992179\r\nStep 1255, loss: 0.0008078006212599576\r\nStep 1256, loss: 0.0007786978385411203\r\nStep 1257, loss: 0.0008665542700327933\r\nStep 1258, loss: 0.0007435258012264967\r\nStep 1259, loss: 0.05610882118344307\r\nStep 1260, loss: 0.001413110294379294\r\nStep 1261, loss: 0.003084757598116994\r\nStep 1262, loss: 0.0033558430150151253\r\nStep 1263, loss: 0.013002480380237103\r\nStep 1264, loss: 0.00560012785717845\r\nStep 1265, loss: 0.00405077775940299\r\nStep 1266, loss: 0.001944085699506104\r\nStep 1267, loss: 0.002340753562748432\r\nStep 1268, loss: 0.009403497911989689\r\nStep 1269, loss: 0.005616793874651194\r\nStep 1270, loss: 0.004217856097966433\r\nStep 1271, loss: 0.0024946611374616623\r\nStep 1272, loss: 0.0024861288256943226\r\nStep 1273, loss: 0.0024672793224453926\r\nStep 1274, loss: 0.00249247788451612\r\nStep 1275, loss: 0.0024645428638905287\r\nStep 1276, loss: 0.0036284734960645437\r\nStep 1277, loss: 0.001978483749553561\r\nStep 1278, loss: 0.0017614661483094096\r\nStep 1279, loss: 0.046765126287937164\r\nStep 1280, loss: 0.0016873965505510569\r\nStep 1281, loss: 0.002141313161700964\r\nStep 1282, loss: 0.00247763330116868\r\nStep 1283, loss: 0.002602791879326105\r\nStep 1284, loss: 0.0023695954587310553\r\nStep 1285, loss: 0.0022296092938631773\r\nStep 1286, loss: 0.002177230082452297\r\nStep 1287, loss: 0.002213072497397661\r\nStep 1288, loss: 0.0020559849217534065\r\nStep 1289, loss: 0.0018539141165092587\r\nStep 1290, loss: 0.0016555169131606817\r\nStep 1291, loss: 0.001606798148714006\r\nStep 1292, loss: 0.0015573420096188784\r\nStep 1293, loss: 0.0014621714362874627\r\nStep 1294, loss: 0.0013923757942393422\r\nStep 1295, loss: 0.0012811663327738643\r\nStep 1296, loss: 0.001220914302393794\r\nStep 1297, loss: 0.0011849742149934173\r\nStep 1298, loss: 0.0011355298338457942\r\nStep 1299, loss: 0.0010801083408296108\r\nStep 1300, loss: 0.0010177787626162171\r\nStep 1301, loss: 0.0009757789084687829\r\nStep 1302, loss: 0.0009528608061373234\r\nStep 1303, loss: 0.0009305774583481252\r\nStep 1304, loss: 0.0008943412103690207\r\nStep 1305, loss: 0.000859374413266778\r\nStep 1306, loss: 0.0008385927649214864\r\nStep 1307, loss: 0.0008123667794279754\r\nStep 1308, loss: 0.0007865212392061949\r\nStep 1309, loss: 0.0007696707034483552\r\nStep 1310, loss: 0.0007388395606540143\r\nStep 1311, loss: 0.0007185518625192344\r\nStep 1312, loss: 0.0006962428451515734\r\nStep 1313, loss: 0.0006773843197152019\r\nStep 1314, loss: 0.0006726984865963459\r\nStep 1315, loss: 0.0006560353212989867\r\nStep 1316, loss: 0.0006355398218147457\r\nStep 1317, loss: 0.000619059894233942\r\nStep 1318, loss: 0.000605825218372047\r\nStep 1319, loss: 0.0005942154093645513\r\nStep 1320, loss: 0.0006806032615713775\r\nStep 1321, loss: 0.0006547445664182305\r\nStep 1322, loss: 0.0006329899188131094\r\nStep 1323, loss: 0.0006233081221580505\r\nStep 1324, loss: 0.0006065640482120216\r\nStep 1325, loss: 0.0005851107416674495\r\nStep 1326, loss: 0.0005734475562348962\r\nStep 1327, loss: 0.0005543713923543692\r\nStep 1328, loss: 0.0005466091679409146\r\nStep 1329, loss: 0.000530689605511725\r\nStep 1330, loss: 0.0005187761271372437\r\nStep 1331, loss: 0.00047866764361970127\r\nStep 1332, loss: 0.0004623129789251834\r\nStep 1333, loss: 0.00045461891568265855\r\nStep 1334, loss: 0.0004448550462257117\r\nStep 1335, loss: 0.00043924825149588287\r\nStep 1336, loss: 0.0004216445377096534\r\nStep 1337, loss: 0.0007286698091775179\r\nStep 1338, loss: 0.00043263399857096374\r\nStep 1339, loss: 0.0005599958240054548\r\nStep 1340, loss: 0.00044182929559610784\r\nStep 1341, loss: 0.00045846038847230375\r\nStep 1342, loss: 0.0004731607623398304\r\nStep 1343, loss: 0.00044007512042298913\r\nStep 1344, loss: 0.0020394199527800083\r\nStep 1345, loss: 0.000528888136614114\r\nStep 1346, loss: 0.0009464968461543322\r\nStep 1347, loss: 0.0013955736067146063\r\nStep 1348, loss: 0.0006005383911542594\r\nStep 1349, loss: 0.0016563227400183678\r\nStep 1350, loss: 0.0007560704834759235\r\nStep 1351, loss: 0.0010102743981406093\r\nStep 1352, loss: 0.0012386329472064972\r\nStep 1353, loss: 0.0006215822068043053\r\nStep 1354, loss: 0.0012176878517493606\r\nStep 1355, loss: 0.0007928594131954014\r\nStep 1356, loss: 0.0005486184963956475\r\nStep 1357, loss: 0.0006415073876269162\r\nStep 1358, loss: 0.029512308537960052\r\nStep 1359, loss: 0.0007954449392855167\r\nStep 1360, loss: 0.0010961861116811633\r\nStep 1361, loss: 0.0017603440210223198\r\nStep 1362, loss: 0.001689511933363974\r\nStep 1363, loss: 0.0015611121198162436\r\nStep 1364, loss: 0.0013763300376012921\r\nStep 1365, loss: 0.0010887270327657461\r\nStep 1366, loss: 0.0010264882585033774\r\nStep 1367, loss: 0.0011090137995779514\r\n",,terminal_output +3991,10258128,"TERMINAL",0,0,"2778:0037",,terminal_output +3992,10259160,"TERMINAL",0,0,"388148",,terminal_output +3993,10260227,"TERMINAL",0,0,"499259",,terminal_output +3994,10261234,"TERMINAL",0,0,"57:007:003630",,terminal_output +3995,10262274,"TERMINAL",0,0,"611471",,terminal_output +3996,10262298,"TERMINAL",0,0,"Step 1368, loss: 0.0011074594222009182\r\nStep 1369, loss: 0.0010926576796919107\r\nStep 1370, loss: 0.0009071550448425114\r\nStep 1371, loss: 0.0008900828543119133\r\nStep 1372, loss: 0.0008594805840402842\r\nStep 1373, loss: 0.0009115600259974599\r\nStep 1374, loss: 0.0007595343049615622\r\nStep 1375, loss: 0.0007304175524041057\r\nStep 1376, loss: 0.0006985944346524775\r\nStep 1377, loss: 0.0006334728095680475\r\nStep 1378, loss: 0.0006186477839946747\r\nStep 1379, loss: 0.0006304123671725392\r\nStep 1380, loss: 0.0005869638407602906\r\nStep 1381, loss: 0.0005492761265486479\r\nStep 1382, loss: 0.002253533573821187\r\nStep 1383, loss: 0.0005924470606260002\r\nStep 1384, loss: 0.0006227222038432956\r\nStep 1385, loss: 0.0007074562599882483\r\nStep 1386, loss: 0.0006518728332594037\r\nStep 1387, loss: 0.0006532156839966774\r\nStep 1388, loss: 0.000589173985645175\r\nStep 1389, loss: 0.0005513163632713258\r\nStep 1390, loss: 0.0005319053889252245\r\nStep 1391, loss: 0.0004688258050009608\r\nStep 1392, loss: 0.00045380755909718573\r\nStep 1393, loss: 0.000448236329248175\r\nStep 1394, loss: 0.028494497761130333\r\nStep 1395, loss: 0.0005771876312792301\r\nStep 1396, loss: 0.0012208509724587202\r\nStep 1397, loss: 0.0017514026258140802\r\nStep 1398, loss: 0.007882940582931042\r\nStep 1399, loss: 0.001126057468354702\r\nStep 1400, loss: 0.0013576340861618519\r\nStep 1401, loss: 0.0017145685851573944\r\nStep 1402, loss: 0.0016140140360221267\r\nStep 1403, loss: 0.0013032410060986876\r\nStep 1404, loss: 0.001165500609204173\r\nStep 1405, loss: 0.001242465223185718\r\nStep 1406, loss: 0.0014748655958101153\r\nStep 1407, loss: 0.0012741737300530076\r\nStep 1408, loss: 0.002458993112668395\r\nStep 1409, loss: 0.0015553898410871625\r\nStep 1410, loss: 0.0014237065333873034\r\nStep 1411, loss: 0.0013657138915732503\r\nStep 1412, loss: 0.0010474800365045667\r\nStep 1413, loss: 0.0009634423186071217\r\nStep 1414, loss: 0.0009064226760528982\r\nStep 1415, loss: 0.0008193192770704627\r\nStep 1416, loss: 0.0028502524364739656\r\nStep 1417, loss: 0.0020023768302053213\r\nStep 1418, loss: 0.0016320846043527126\r\nStep 1419, loss: 0.000981396995484829\r\nStep 1420, loss: 0.0008563208393752575\r\nStep 1421, loss: 0.0008591639925725758\r\nStep 1422, loss: 0.0008746315143071115\r\nStep 1423, loss: 0.0008850239682942629\r\nStep 1424, loss: 0.0007708186749368906\r\nStep 1425, loss: 0.0006476452690549195\r\nStep 1426, loss: 0.0006339136743918061\r\nStep 1427, loss: 0.0006916666752658784\r\nStep 1428, loss: 0.0005819801590405405\r\nStep 1429, loss: 0.0005497413803823292\r\nStep 1430, loss: 0.0005287456442601979\r\nStep 1431, loss: 0.0005469867028295994\r\nStep 1432, loss: 0.0005166024784557521\r\nStep 1433, loss: 0.00047110262676142156\r\nStep 1434, loss: 0.0004507103585638106\r\nStep 1435, loss: 0.0008231032988987863\r\nStep 1436, loss: 0.0004632983764167875\r\nStep 1437, loss: 0.0006129360408522189\r\nStep 1438, loss: 0.00043670955346897244\r\nStep 1439, loss: 0.0004320635343901813\r\nStep 1440, loss: 0.0004437877796590328\r\nStep 1441, loss: 0.0005155206890776753\r\nStep 1442, loss: 0.00041012876317836344\r\nStep 1443, loss: 0.00041262913146056235\r\nStep 1444, loss: 0.00040674599586054683\r\nStep 1445, loss: 0.00036401901161298156\r\nStep 1446, loss: 0.0007841542246751487\r\nStep 1447, loss: 0.0005912288324907422\r\nStep 1448, loss: 0.0005528558976948261\r\nStep 1449, loss: 0.0018749231239780784\r\nStep 1450, loss: 0.0005223797052167356\r\nStep 1451, loss: 0.0005766231915913522\r\nStep 1452, loss: 0.0004544567782431841\r\nStep 1453, loss: 0.003056332701817155\r\nStep 1454, loss: 0.000443034601630643\r\nStep 1455, loss: 0.0006930390372872353\r\nStep 1456, loss: 0.0008635955164209008\r\nStep 1457, loss: 0.0006043880130164325\r\nStep 1458, loss: 0.0005893757333979011\r\nStep 1459, loss: 0.012311081402003765\r\nStep 1460, loss: 0.0005977295222692192\r\nStep 1461, loss: 0.0008476003422401845\r\nStep 1462, loss: 0.001404370879754424\r\nStep 1463, loss: 0.001280946540646255\r\nStep 1464, loss: 0.0009813904762268066\r\nStep 1465, loss: 0.00095661252271384\r\nStep 1466, loss: 0.000885726185515523\r\nStep 1467, loss: 0.0008634365512989461\r\nStep 1468, loss: 0.0008857549401000142\r\nStep 1469, loss: 0.0008310646517202258\r\nStep 1470, loss: 0.0007380308234132826\r\nStep 1471, loss: 0.0006719074444845319\r\nStep 1472, loss: 0.06750363856554031\r\nStep 1473, loss: 0.0007875399314798415\r\nStep 1474, loss: 0.0012133880518376827\r\nStep 1475, loss: 0.0018603777280077338\r\nStep 1476, loss: 0.0019211858743801713\r\nStep 1477, loss: 0.0017761730123311281\r\nStep 1478, loss: 0.001531879766844213\r\nStep 1479, loss: 0.0029157144017517567\r\nStep 1480, loss: 0.0020077938679605722\r\nStep 1481, loss: 0.03638847917318344\r\nStep 1482, loss: 0.0016685088630765676\r\nStep 1483, loss: 0.0019492218270897865\r\nStep 1484, loss: 0.0011913274647668004\r\nStep 1485, loss: 0.00118555489461869\r\nStep 1486, loss: 0.0013806956121698022\r\nStep 1487, loss: 0.0014944373397156596\r\nStep 1488, loss: 0.0014495063805952668\r\nStep 1489, loss: 0.001301493844948709\r\nStep 1490, loss: 0.0011947558959946036\r\nStep 1491, loss: 0.0010922410292550921\r\nStep 1492, loss: 0.0010197004303336143\r\nStep 1493, loss: 0.000998463248834014\r\nStep 1494, loss: 0.0009982915362343192\r\nStep 1495, loss: 0.036205265671014786\r\nStep 1496, loss: 0.0011671558022499084\r\nStep 1497, loss: 0.001866146340034902\r\nStep 1498, loss: 0.00211983360350132\r\nStep 1499, loss: 0.00170407525729388\r\nStep 1500, loss: 0.0015833366196602583\r\nStep 1501, loss: 0.0016625452553853393\r\nStep 1502, loss: 0.0015102936886250973\r\nStep 1503, loss: 0.0013425078941509128\r\nStep 1504, loss: 0.0012177088065072894\r\nStep 1505, loss: 0.0010857249144464731\r\nStep 1506, loss: 0.0011572898365557194\r\nStep 1507, loss: 0.02178576961159706\r\nStep 1508, loss: 0.0009568842360749841\r\nStep 1509, loss: 0.0014234223635867238\r\nStep 1510, loss: 0.001808395143598318\r\nStep 1511, loss: 0.0015243840171024203\r\nStep 1512, loss: 0.0011861753882840276\r\nStep 1513, loss: 0.001024300348944962\r\nStep 1514, loss: 0.001105484669096768\r\nStep 1515, loss: 0.0011492028133943677\r\nStep 1516, loss: 0.0010411172406747937\r\nStep 1517, loss: 0.0008577213156968355\r\nStep 1518, loss: 0.0007933923625387251\r\nStep 1519, loss: 0.0007428395911119878\r\nStep 1520, loss: 0.0007377112633548677\r\nStep 1521, loss: 0.0007536556222476065\r\nStep 1522, loss: 0.0007425736985169351\r\nStep 1523, loss: 0.042960379272699356\r\nStep 1524, loss: 0.000716435257345438\r\nStep 1525, loss: 0.0010328657226637006\r\nStep 1526, loss: 0.0017887731082737446\r\nStep 1527, loss: 0.0017816154286265373\r\nStep 1528, loss: 0.001556413248181343\r\nStep 1529, loss: 0.001406555064022541\r\nStep 1530, loss: 0.0013135542394593358\r\nStep 1531, loss: 0.0013320731231942773\r\nStep 1532, loss: 0.0014238704461604357\r\nStep 1533, loss: 0.0012688294518738985\r\nStep 1534, loss: 0.001128756906837225\r\nStep 1535, loss: 0.0009731824975460768\r\nStep 1536, loss: 0.00083410635124892\r\nStep 1537, loss: 0.0007781594758853316\r\nStep 1538, loss: 0.0007368938531726599\r\nStep 1539, loss: 0.0006672416930086911\r\nStep 1540, loss: 0.0006123943021520972\r\nStep 1541, loss: 0.001165450899861753\r\nStep 1542, loss: 0.0008637415012344718\r\nStep 1543, loss: 0.0005599034484475851\r\nStep 1544, loss: 0.0005411891033872962\r\nStep 1545, loss: 0.0005236004362814128\r\nStep 1546, loss: 0.0005156907136552036\r\nStep 1547, loss: 0.0004924568347632885\r\nStep 1548, loss: 0.0004764057230204344\r\nStep 1549, loss: 0.00047942090895958245\r\nStep 1550, loss: 0.00047063009697012603\r\nStep 1551, loss: 0.000425913865910843\r\nStep 1552, loss: 0.00039117695996537805\r\nStep 1553, loss: 0.0003661190567072481\r\nStep 1554, loss: 0.0003598508483264595\r\nStep 1555, loss: 0.0003541858750395477\r\nStep 1556, loss: 0.0003405913303140551\r\nStep 1557, loss: 0.00033277421607635915\r\nStep 1558, loss: 0.0003221884835511446\r\nStep 1559, loss: 0.00031256547663360834\r\nStep 1560, loss: 0.00030012516072019935\r\nStep 1561, loss: 0.00028577051125466824\r\nStep 1562, loss: 0.00027486486942507327\r\nStep 1563, loss: 0.00026455349870957434\r\nStep 1564, loss: 0.00025810091756284237\r\nStep 1565, loss: 0.00025055493460968137\r\nStep 1566, loss: 0.0002441505203023553\r\nStep 1567, loss: 0.0009501844178885221\r\nStep 1568, loss: 0.00037128987605683506\r\nStep 1569, loss: 0.00029394717421382666\r\nStep 1570, loss: 0.0003663085517473519\r\nStep 1571, loss: 0.0002867307630367577\r\nStep 1572, loss: 0.0003088466473855078\r\nStep 1573, loss: 0.0003369543410371989\r\nStep 1574, loss: 0.0003097335866186768\r\nStep 1575, loss: 0.0004313751996960491\r\nStep 1576, loss: 0.0002847264113370329\r\nStep 1577, loss: 0.0006306413561105728\r\n",,terminal_output +3997,10263300,"TERMINAL",0,0,"722582",,terminal_output +3998,10264361,"TERMINAL",0,0,"8447304",,terminal_output +3999,10265448,"TERMINAL",0,0,"2055815",,terminal_output +4000,10266392,"TERMINAL",0,0,"166926",,terminal_output +4001,10267305,"TERMINAL",0,0,"Step 1578, loss: 0.00025331455981358886\r\nStep 1579, loss: 0.0004614870995283127\r\nStep 1580, loss: 0.0002679667086340487\r\nStep 1581, loss: 0.0004020341148134321\r\nStep 1582, loss: 0.00024263450177386403\r\nStep 1583, loss: 0.0003172088472638279\r\nStep 1584, loss: 0.00024944101460278034\r\nStep 1585, loss: 0.0002243486960651353\r\nStep 1586, loss: 0.0002193859254475683\r\nStep 1587, loss: 0.00022483422071672976\r\nStep 1588, loss: 0.0002029601891990751\r\nStep 1589, loss: 0.00020516787481028587\r\nStep 1590, loss: 0.00020912126637995243\r\nStep 1591, loss: 0.00019181071547791362\r\nStep 1592, loss: 0.00018526334315538406\r\nStep 1593, loss: 0.00018770732276607305\r\nStep 1594, loss: 0.00017422877135686576\r\nStep 1595, loss: 0.00017114875663537532\r\nStep 1596, loss: 0.0001695354876574129\r\nStep 1597, loss: 0.00016364890325348824\r\nStep 1598, loss: 0.00015646006795577705\r\nStep 1599, loss: 0.00015321675164159387\r\nStep 1600, loss: 0.0001496415789006278\r\nStep 1601, loss: 0.00015116833674255759\r\nStep 1602, loss: 0.0001454823650419712\r\nStep 1603, loss: 0.00014223992184270173\r\nStep 1604, loss: 0.00013882968050893396\r\nStep 1605, loss: 0.0001352360559394583\r\nStep 1606, loss: 0.0001324364566244185\r\nStep 1607, loss: 0.00012830628838855773\r\nStep 1608, loss: 0.00012629557750187814\r\nStep 1609, loss: 0.0001238401309819892\r\nStep 1610, loss: 0.00012056080595357344\r\nStep 1611, loss: 0.00011778841144405305\r\nStep 1612, loss: 0.00011558173719095066\r\nStep 1613, loss: 0.0001132459074142389\r\nStep 1614, loss: 0.00010994857439072803\r\nStep 1615, loss: 0.0001077127963071689\r\nStep 1616, loss: 0.00010560922237345949\r\nStep 1617, loss: 0.00011487027222756296\r\nStep 1618, loss: 0.00011082648416049778\r\nStep 1619, loss: 0.00010826878133229911\r\nStep 1620, loss: 0.00010572386236162856\r\nStep 1621, loss: 0.00010289352212566882\r\nStep 1622, loss: 9.274018520954996e-05\r\nStep 1623, loss: 0.00019175952184014022\r\nStep 1624, loss: 9.3601927801501e-05\r\nStep 1625, loss: 0.00012946643983013928\r\nStep 1626, loss: 0.00011218390864087269\r\nStep 1627, loss: 0.000378959346562624\r\nStep 1628, loss: 0.00010157456563320011\r\nStep 1629, loss: 0.0001113311227527447\r\nStep 1630, loss: 0.00033629988320171833\r\nStep 1631, loss: 0.00016094795137178153\r\nStep 1632, loss: 0.00014515889051835984\r\nStep 1633, loss: 0.00015763285045977682\r\nStep 1634, loss: 0.0011429694714024663\r\nStep 1635, loss: 0.0020220009610056877\r\nStep 1636, loss: 0.00029799347976222634\r\nStep 1637, loss: 0.0006942475447431207\r\nStep 1638, loss: 0.0008506157901138067\r\nStep 1639, loss: 0.0005394454346969724\r\nStep 1640, loss: 0.00034678771044127643\r\nStep 1641, loss: 0.0003798353427555412\r\nStep 1642, loss: 0.00040584205999039114\r\nStep 1643, loss: 0.00032842170912772417\r\nStep 1644, loss: 0.0003238962381146848\r\nStep 1645, loss: 0.00026891398010775447\r\nStep 1646, loss: 0.0002760769857559353\r\nStep 1647, loss: 0.0002507461467757821\r\nStep 1648, loss: 0.00023650455113966018\r\nStep 1649, loss: 0.00021088837820570916\r\nStep 1650, loss: 0.0001936201297212392\r\nStep 1651, loss: 0.00019127852283418179\r\nStep 1652, loss: 0.0001907973928609863\r\nStep 1653, loss: 0.00017962259880732745\r\nStep 1654, loss: 0.00017721143376547843\r\nStep 1655, loss: 0.00017217607819475234\r\nStep 1656, loss: 0.00016549977590329945\r\nStep 1657, loss: 0.0014732279814779758\r\nStep 1658, loss: 0.0005903867422603071\r\nStep 1659, loss: 0.00029847476980648935\r\nStep 1660, loss: 0.004364475607872009\r\nStep 1661, loss: 0.00041676705586723983\r\nStep 1662, loss: 0.00046972540440037847\r\nStep 1663, loss: 0.0009841566206887364\r\nStep 1664, loss: 0.0009663865785114467\r\nStep 1665, loss: 0.0003906720085069537\r\nStep 1666, loss: 0.00042595472768880427\r\nStep 1667, loss: 0.0005860522505827248\r\nStep 1668, loss: 0.0004837346205022186\r\nStep 1669, loss: 0.0004049835551995784\r\nStep 1670, loss: 0.0012871386716142297\r\nStep 1671, loss: 0.0003016513946931809\r\nStep 1672, loss: 0.0003775096556637436\r\nStep 1673, loss: 0.0003674230247270316\r\nStep 1674, loss: 0.0003585092490538955\r\nStep 1675, loss: 0.00032491475576534867\r\nStep 1676, loss: 0.04084179550409317\r\nStep 1677, loss: 0.00030460135894827545\r\nStep 1678, loss: 0.0008444288396276534\r\nStep 1679, loss: 0.002587264869362116\r\nStep 1680, loss: 0.005281489808112383\r\nStep 1681, loss: 0.0007777874707244337\r\nStep 1682, loss: 0.0006997594027779996\r\nStep 1683, loss: 0.0015176527667790651\r\nStep 1684, loss: 0.0021449453197419643\r\nStep 1685, loss: 0.001186689012683928\r\nStep 1686, loss: 0.0006361114792525768\r\nStep 1687, loss: 0.0006859198329038918\r\nStep 1688, loss: 0.0005468273884616792\r\nStep 1689, loss: 0.0005811153096146882\r\nStep 1690, loss: 0.0006670289440080523\r\nStep 1691, loss: 0.0007580773672088981\r\nStep 1692, loss: 0.0007085850229486823\r\nStep 1693, loss: 0.0004723363963421434\r\nStep 1694, loss: 0.0005034524365328252\r\nStep 1695, loss: 0.00041674607200548053\r\nStep 1696, loss: 0.000385816500056535\r\nStep 1697, loss: 0.0004621460393536836\r\nStep 1698, loss: 0.00037429752410389483\r\nStep 1699, loss: 0.0003661539521999657\r\nStep 1700, loss: 0.00034116252209059894\r\nStep 1701, loss: 0.00031980566564016044\r\nStep 1702, loss: 0.008859065361320972\r\nStep 1703, loss: 0.00028149111312814057\r\nStep 1704, loss: 0.00042427852167747915\r\nStep 1705, loss: 0.0007370931562036276\r\nStep 1706, loss: 0.0016622130060568452\r\nStep 1707, loss: 0.0005855791387148201\r\nStep 1708, loss: 0.0008524138247594237\r\nStep 1709, loss: 0.0008044200367294252\r\nStep 1710, loss: 0.00045676372246816754\r\nStep 1711, loss: 0.0003886836639139801\r\nStep 1712, loss: 0.00038457170012407005\r\nStep 1713, loss: 0.00044164955033920705\r\nStep 1714, loss: 0.00042585181654430926\r\nStep 1715, loss: 0.001246585976332426\r\nStep 1716, loss: 0.00031007634242996573\r\nStep 1717, loss: 0.000295721780275926\r\nStep 1718, loss: 0.0019518197514116764\r\nStep 1719, loss: 0.0004085372784174979\r\nStep 1720, loss: 0.0003566690720617771\r\nStep 1721, loss: 0.0004505540709942579\r\nStep 1722, loss: 0.0005194288096390665\r\nStep 1723, loss: 0.0004914953606203198\r\nStep 1724, loss: 0.0005005744169466197\r\nStep 1725, loss: 0.0003879471041727811\r\nStep 1726, loss: 0.0003252979659009725\r\nStep 1727, loss: 0.00029842089861631393\r\nStep 1728, loss: 0.0002823796821758151\r\nStep 1729, loss: 0.0002701342455111444\r\nStep 1730, loss: 0.0002793569292407483\r\nStep 1731, loss: 0.0002311547432327643\r\nStep 1732, loss: 0.00021569032105617225\r\nStep 1733, loss: 0.00020981660054530948\r\nStep 1734, loss: 0.00021010312775615603\r\nStep 1735, loss: 0.0002044059510808438\r\nStep 1736, loss: 0.0001920505310408771\r\nStep 1737, loss: 0.00017011923773679882\r\nStep 1738, loss: 0.00016570743173360825\r\nStep 1739, loss: 0.00015623962099198252\r\nStep 1740, loss: 0.00015155761502683163\r\nStep 1741, loss: 0.00014992005890235305\r\nStep 1742, loss: 0.00014379584172274917\r\nStep 1743, loss: 0.00013622416008729488\r\nStep 1744, loss: 0.0001285522012040019\r\nStep 1745, loss: 0.00012360309483483434\r\nStep 1746, loss: 0.00012090497330063954\r\nStep 1747, loss: 0.0001146651993622072\r\nStep 1748, loss: 0.00011187394557055086\r\nStep 1749, loss: 0.0001104003022192046\r\nStep 1750, loss: 0.00010683026630431414\r\nStep 1751, loss: 0.00010380821913713589\r\nStep 1752, loss: 0.00010065464448416606\r\nStep 1753, loss: 9.645056707086042e-05\r\nStep 1754, loss: 9.380659321323037e-05\r\nStep 1755, loss: 9.227505506714806e-05\r\nStep 1756, loss: 9.043751924764365e-05\r\nStep 1757, loss: 0.00025635314523242414\r\nStep 1758, loss: 8.624040492577478e-05\r\nStep 1759, loss: 9.255902841687202e-05\r\nStep 1760, loss: 0.00014023501717019826\r\nStep 1761, loss: 0.00010751043737400323\r\nStep 1762, loss: 0.00010440443293191493\r\nStep 1763, loss: 0.0001136326536652632\r\nStep 1764, loss: 0.00011045676365029067\r\nStep 1765, loss: 0.00010688337351894006\r\nStep 1766, loss: 0.00010412508709123358\r\nStep 1767, loss: 0.00010319238936062902\r\nStep 1768, loss: 9.030133514897898e-05\r\nStep 1769, loss: 9.424714517081156e-05\r\nStep 1770, loss: 8.413282921537757e-05\r\nStep 1771, loss: 7.926568650873378e-05\r\nStep 1772, loss: 8.008025906747207e-05\r\nStep 1773, loss: 7.807734073139727e-05\r\nStep 1774, loss: 7.535231270594522e-05\r\nStep 1775, loss: 7.252966315718368e-05\r\nStep 1776, loss: 7.241780986078084e-05\r\nStep 1777, loss: 6.866782496217638e-05\r\nStep 1778, loss: 7.567424472654238e-05\r\nStep 1779, loss: 6.626322283409536e-05\r\nStep 1780, loss: 0.0007308334461413324\r\nStep 1781, loss: 7.60710536269471e-05\r\nStep 1782, loss: 0.0016192292096093297\r\nStep 1783, loss: 0.0008724972722120583\r\nStep 1784, loss: 0.0006303352420218289\r\nStep 1785, loss: 0.000818794418592006\r\n",,terminal_output +4002,10267433,"TERMINAL",0,0,"2771037",,terminal_output +4003,10268520,"TERMINAL",0,0,"388148",,terminal_output +4004,10269493,"TERMINAL",0,0,"499259",,terminal_output +4005,10270569,"TERMINAL",0,0,"510103640",,terminal_output +4006,10271567,"TERMINAL",0,0,"611471",,terminal_output +4007,10271921,"TERMINAL",0,0,"Step 1786, loss: 0.0007203338318504393\r\nStep 1787, loss: 0.0003414771636016667\r\nStep 1788, loss: 0.00042853294871747494\r\nStep 1789, loss: 0.0002658325538504869\r\nStep 1790, loss: 0.00010574234329396859\r\nStep 1791, loss: 0.000203836738364771\r\nStep 1792, loss: 0.00037335901288315654\r\nStep 1793, loss: 0.0002927565947175026\r\nStep 1794, loss: 0.0002014136698562652\r\nStep 1795, loss: 8.901695400709286e-05\r\nStep 1796, loss: 7.849753455957398e-05\r\nStep 1797, loss: 0.01302554365247488\r\nStep 1798, loss: 0.004861913155764341\r\nStep 1799, loss: 0.0027485613245517015\r\nStep 1800, loss: 0.0019026355585083365\r\nStep 1801, loss: 0.0005459141102619469\r\nStep 1802, loss: 0.0006543350755237043\r\nStep 1803, loss: 0.0008653225959278643\r\nStep 1804, loss: 0.0009890402434393764\r\nStep 1805, loss: 0.0009212440345436335\r\nStep 1806, loss: 0.0006730767199769616\r\nStep 1807, loss: 0.0005892388289794326\r\nStep 1808, loss: 0.0005004793056286871\r\nStep 1809, loss: 0.00048518343828618526\r\nStep 1810, loss: 0.00042347770067863166\r\nStep 1811, loss: 0.00038247619522735476\r\nStep 1812, loss: 0.00035851550637744367\r\nStep 1813, loss: 0.0003579328185878694\r\nStep 1814, loss: 0.00035221411962993443\r\nStep 1815, loss: 0.00034111543209291995\r\nStep 1816, loss: 0.0003216705226805061\r\nStep 1817, loss: 0.00028340803692117333\r\nStep 1818, loss: 0.0002604613546282053\r\nStep 1819, loss: 0.00024311040760949254\r\nStep 1820, loss: 0.0002322262298548594\r\nStep 1821, loss: 0.00022081406495999545\r\nStep 1822, loss: 0.00020609877537935972\r\nStep 1823, loss: 0.00019314850214868784\r\nStep 1824, loss: 0.00018351271864958107\r\nStep 1825, loss: 0.00017453968757763505\r\nStep 1826, loss: 0.00016390006931032985\r\nStep 1827, loss: 0.00952807255089283\r\nStep 1828, loss: 0.0001740443694870919\r\nStep 1829, loss: 0.00032953923800960183\r\nStep 1830, loss: 0.0148354796692729\r\nStep 1831, loss: 0.000450077117420733\r\nStep 1832, loss: 0.0006596375606022775\r\nStep 1833, loss: 0.001211301307193935\r\nStep 1834, loss: 0.0010914618615061045\r\nStep 1835, loss: 0.0007719734567217529\r\nStep 1836, loss: 0.0006363675347529352\r\nStep 1837, loss: 0.0006988191162236035\r\nStep 1838, loss: 0.000737444672267884\r\nStep 1839, loss: 0.0006524271448142827\r\nStep 1840, loss: 0.0005457506631501019\r\nStep 1841, loss: 0.00046644083340652287\r\nStep 1842, loss: 0.00043371785432100296\r\nStep 1843, loss: 0.0004296557162888348\r\nStep 1844, loss: 0.00044017989421263337\r\nStep 1845, loss: 0.005420754197984934\r\nStep 1846, loss: 0.00035268234205432236\r\nStep 1847, loss: 0.00034382654121145606\r\nStep 1848, loss: 0.0004074188764207065\r\nStep 1849, loss: 0.0004667323373723775\r\nStep 1850, loss: 0.00041519742808304727\r\nStep 1851, loss: 0.0003772078489419073\r\nStep 1852, loss: 0.0003622380900196731\r\nStep 1853, loss: 0.00034009673981927335\r\nStep 1854, loss: 0.00030688292463310063\r\nStep 1855, loss: 0.00028265282162465155\r\nStep 1856, loss: 0.0002679611789062619\r\nStep 1857, loss: 0.00028184326947666705\r\nStep 1858, loss: 0.000292362121399492\r\nStep 1859, loss: 0.0002743806107901037\r\nStep 1860, loss: 0.0002758146438281983\r\nStep 1861, loss: 0.00023113153292797506\r\nStep 1862, loss: 0.00020297466835472733\r\nStep 1863, loss: 0.00018711367738433182\r\nStep 1864, loss: 0.0001806846121326089\r\nStep 1865, loss: 0.00017744940123520792\r\nStep 1866, loss: 0.00017012908938340843\r\nStep 1867, loss: 0.00016310712089762092\r\nStep 1868, loss: 0.00015066220657899976\r\nStep 1869, loss: 0.00013112025044392794\r\nStep 1870, loss: 0.00048591653467155993\r\nStep 1871, loss: 0.00012450887879822403\r\nStep 1872, loss: 0.0001377446169499308\r\nStep 1873, loss: 0.0006957835284993052\r\nStep 1874, loss: 0.00017067919543478638\r\nStep 1875, loss: 0.0001556051429361105\r\nStep 1876, loss: 0.00014553953951690346\r\nStep 1877, loss: 0.00029118225211277604\r\nStep 1878, loss: 0.00013828909141011536\r\nStep 1879, loss: 0.0001371654070680961\r\nStep 1880, loss: 0.00015728756261523813\r\nStep 1881, loss: 0.00016743196465540677\r\nStep 1882, loss: 0.0001671783538768068\r\nStep 1883, loss: 0.0001549188600620255\r\nStep 1884, loss: 0.00014518391981255263\r\nStep 1885, loss: 0.0001248159387614578\r\nStep 1886, loss: 0.0001222652499563992\r\nStep 1887, loss: 0.00012652442092075944\r\nStep 1888, loss: 0.00040020691812969744\r\nStep 1889, loss: 0.00021743751130998135\r\nStep 1890, loss: 0.00011601227015489712\r\nStep 1891, loss: 0.0004231837228871882\r\nStep 1892, loss: 0.00012417256948538125\r\nStep 1893, loss: 0.00013366018538363278\r\nStep 1894, loss: 0.00013217597734183073\r\nStep 1895, loss: 0.0001241399149876088\r\nStep 1896, loss: 0.0001355499989585951\r\nStep 1897, loss: 0.00012752918701153249\r\nStep 1898, loss: 0.00012717914069071412\r\nStep 1899, loss: 0.00010648812167346478\r\nStep 1900, loss: 0.00010725203173933551\r\nStep 1901, loss: 0.0001042333897203207\r\nStep 1902, loss: 9.625815437175333e-05\r\nStep 1903, loss: 8.86445413925685e-05\r\nStep 1904, loss: 8.542158320778981e-05\r\nStep 1905, loss: 8.209415682358667e-05\r\nStep 1906, loss: 8.094128861557692e-05\r\nStep 1907, loss: 7.950380677357316e-05\r\nStep 1908, loss: 7.597684452775866e-05\r\nStep 1909, loss: 7.145483687054366e-05\r\nStep 1910, loss: 6.752056651748717e-05\r\nStep 1911, loss: 6.240719085326418e-05\r\nStep 1912, loss: 6.119974568719044e-05\r\nStep 1913, loss: 6.123904313426465e-05\r\nStep 1914, loss: 6.1071987147443e-05\r\nStep 1915, loss: 5.891403270652518e-05\r\nStep 1916, loss: 5.511419294634834e-05\r\nStep 1917, loss: 5.132200749358162e-05\r\nStep 1918, loss: 4.952716699335724e-05\r\nStep 1919, loss: 5.7689539971761405e-05\r\nStep 1920, loss: 4.936254845233634e-05\r\nStep 1921, loss: 5.5743475968483835e-05\r\nStep 1922, loss: 5.231528484728187e-05\r\nStep 1923, loss: 4.9111087719211355e-05\r\nStep 1924, loss: 4.781917959917337e-05\r\nStep 1925, loss: 4.164421261521056e-05\r\nStep 1926, loss: 4.091913433512673e-05\r\nStep 1927, loss: 4.5085136662237346e-05\r\nStep 1928, loss: 3.8623871660092846e-05\r\nStep 1929, loss: 3.749392635654658e-05\r\nStep 1930, loss: 3.723151166923344e-05\r\nStep 1931, loss: 3.659958383650519e-05\r\nStep 1932, loss: 3.5393932193983346e-05\r\nStep 1933, loss: 3.403089795028791e-05\r\nStep 1934, loss: 3.3103060559369624e-05\r\nStep 1935, loss: 3.243198443669826e-05\r\nStep 1936, loss: 3.2027561246650293e-05\r\nStep 1937, loss: 3.103543713223189e-05\r\nStep 1938, loss: 3.021361590072047e-05\r\nStep 1939, loss: 2.960916935990099e-05\r\nStep 1940, loss: 2.8755055609508418e-05\r\nStep 1941, loss: 2.8026341169606894e-05\r\nStep 1942, loss: 2.7375206627766602e-05\r\nStep 1943, loss: 2.675420546438545e-05\r\nStep 1944, loss: 2.6116611479665153e-05\r\nStep 1945, loss: 2.5524195734760724e-05\r\nStep 1946, loss: 2.4748807845753618e-05\r\nStep 1947, loss: 2.4048693376244046e-05\r\nStep 1948, loss: 2.361177212151233e-05\r\nStep 1949, loss: 2.2920587070984766e-05\r\nStep 1950, loss: 2.2401831301976927e-05\r\nStep 1951, loss: 2.178992872359231e-05\r\nStep 1952, loss: 2.128517189703416e-05\r\nStep 1953, loss: 2.0786224922630936e-05\r\nStep 1954, loss: 2.3853044694988057e-05\r\nStep 1955, loss: 2.3010486984276213e-05\r\nStep 1956, loss: 2.229421806987375e-05\r\nStep 1957, loss: 1.872936809377279e-05\r\nStep 1958, loss: 1.830547807912808e-05\r\nStep 1959, loss: 1.780184538802132e-05\r\nStep 1960, loss: 1.7285241483477876e-05\r\nStep 1961, loss: 1.6891546692932025e-05\r\nStep 1962, loss: 1.6485169908264652e-05\r\nStep 1963, loss: 1.5944551705615595e-05\r\nStep 1964, loss: 1.543211874377448e-05\r\nStep 1965, loss: 1.5059839824971277e-05\r\nStep 1966, loss: 1.4610649486712646e-05\r\nStep 1967, loss: 1.4137550351733807e-05\r\nStep 1968, loss: 1.3707924154005013e-05\r\nStep 1969, loss: 1.5533738405792974e-05\r\nStep 1970, loss: 1.4974933037592564e-05\r\nStep 1971, loss: 1.4384773749043234e-05\r\nStep 1972, loss: 1.2212805813760497e-05\r\nStep 1973, loss: 1.1860368431371171e-05\r\nStep 1974, loss: 1.1453137631178834e-05\r\nStep 1975, loss: 1.118085583584616e-05\r\nStep 1976, loss: 1.0779272997751832e-05\r\nStep 1977, loss: 1.0430113434267696e-05\r\nStep 1978, loss: 1.0078837476612534e-05\r\nStep 1979, loss: 9.839408448897302e-06\r\nStep 1980, loss: 9.43161649047397e-06\r\nStep 1981, loss: 9.174747901852243e-06\r\nStep 1982, loss: 8.849094228935428e-06\r\nStep 1983, loss: 8.52009452501079e-06\r\nStep 1984, loss: 9.638392839406151e-06\r\nStep 1985, loss: 9.279841833631508e-06\r\nStep 1986, loss: 8.946913112595212e-06\r\nStep 1987, loss: 8.562350558349863e-06\r\nStep 1988, loss: 8.275579602923244e-06\r\nStep 1989, loss: 7.1036101871868595e-06\r\nStep 1990, loss: 6.972860319365282e-06\r\nStep 1991, loss: 6.746221515641082e-06\r\nStep 1992, loss: 6.5669737523421645e-06\r\nStep 1993, loss: 6.453133210015949e-06\r\n",,terminal_output +4008,10272596,"TERMINAL",0,0,"722582",,terminal_output +4009,10273642,"TERMINAL",0,0,"833693",,terminal_output +4010,10274767,"TERMINAL",0,0,"9447404",,terminal_output +4011,10275695,"TERMINAL",0,0,"3055815",,terminal_output +4012,10276814,"TERMINAL",0,0,"166926",,terminal_output +4013,10277841,"TERMINAL",0,0,"2772037",,terminal_output +4014,10278662,"TERMINAL",0,0,"Step 1994, loss: 6.815570941398619e-06\r\nStep 1995, loss: 0.00033188986708410084\r\nStep 1996, loss: 0.0002318446640856564\r\nStep 1997, loss: 0.00020016648340970278\r\nStep 1998, loss: 3.369120167917572e-05\r\nStep 1999, loss: 0.0007630927138961852\r\nSaved checkpoint at step 2000\r\nStep 2000, loss: 0.00033600282040424645\r\nStep 2001, loss: 1.1033375812985469e-05\r\nStep 2002, loss: 0.0006485992926172912\r\nStep 2003, loss: 4.4964501284994185e-05\r\nStep 2004, loss: 0.00022675326908938587\r\nStep 2005, loss: 3.100198591710068e-05\r\nStep 2006, loss: 0.0008403555257245898\r\nStep 2007, loss: 5.2426235924940556e-05\r\nStep 2008, loss: 1.0979565558955073e-05\r\nStep 2009, loss: 3.353993088239804e-05\r\nStep 2010, loss: 0.00039789831498637795\r\nStep 2011, loss: 9.191498247673735e-05\r\nStep 2012, loss: 8.861137030180544e-05\r\nStep 2013, loss: 0.0001925536198541522\r\nStep 2014, loss: 5.062912168796174e-05\r\nStep 2015, loss: 9.638300252845511e-05\r\nStep 2016, loss: 0.00011211445234948769\r\nStep 2017, loss: 3.181058127665892e-05\r\nStep 2018, loss: 4.472413274925202e-05\r\nStep 2019, loss: 0.00011854140757350251\r\nStep 2020, loss: 5.597689232672565e-05\r\nStep 2021, loss: 3.123122951365076e-05\r\nStep 2022, loss: 2.243256130896043e-05\r\nStep 2023, loss: 2.3379772756015882e-05\r\nStep 2024, loss: 4.968348002876155e-05\r\nStep 2025, loss: 2.6038453142973594e-05\r\nStep 2026, loss: 2.5025199647643603e-05\r\nStep 2027, loss: 2.6888248612522148e-05\r\nStep 2028, loss: 2.832493555615656e-05\r\nStep 2029, loss: 2.9879596695536748e-05\r\nStep 2030, loss: 2.587863855296746e-05\r\nStep 2031, loss: 1.9556930055841804e-05\r\nStep 2032, loss: 0.0003707846044562757\r\nStep 2033, loss: 1.7406717233825475e-05\r\nStep 2034, loss: 0.0001373954873997718\r\nStep 2035, loss: 0.00022464938228949904\r\nStep 2036, loss: 5.750904529122636e-05\r\nStep 2037, loss: 5.480028266902082e-05\r\nStep 2038, loss: 0.000413339352235198\r\nStep 2039, loss: 2.5131532311206684e-05\r\nStep 2040, loss: 3.076547000091523e-05\r\nStep 2041, loss: 8.1315403804183e-05\r\nStep 2042, loss: 6.884059985168278e-05\r\nStep 2043, loss: 5.8316793001722544e-05\r\nStep 2044, loss: 4.1934505134122446e-05\r\nStep 2045, loss: 3.89007618650794e-05\r\nStep 2046, loss: 3.868622297886759e-05\r\nStep 2047, loss: 3.868206113111228e-05\r\nStep 2048, loss: 0.07465647161006927\r\nStep 2049, loss: 3.983620263170451e-05\r\nStep 2050, loss: 0.00039434339851140976\r\nStep 2051, loss: 0.0025993785820901394\r\nStep 2052, loss: 0.001983902184292674\r\nStep 2053, loss: 0.000881441927049309\r\nStep 2054, loss: 0.0007194728823378682\r\nStep 2055, loss: 0.0008939902181737125\r\nStep 2056, loss: 0.0459725558757782\r\nStep 2057, loss: 0.0005501153063960373\r\nStep 2058, loss: 0.0005495024379342794\r\nStep 2059, loss: 0.0006796409143134952\r\nStep 2060, loss: 0.0009661728399805725\r\nStep 2061, loss: 0.0013015009462833405\r\nStep 2062, loss: 0.0010585288982838392\r\nStep 2063, loss: 0.000623492815066129\r\nStep 2064, loss: 0.0004911684663966298\r\nStep 2065, loss: 0.0003673075116239488\r\nStep 2066, loss: 0.0003387352335266769\r\nStep 2067, loss: 0.00031875306740403175\r\nStep 2068, loss: 0.0003456038539297879\r\nStep 2069, loss: 0.00033702849759720266\r\nStep 2070, loss: 0.00034279056126251817\r\nStep 2071, loss: 0.0003236284537706524\r\nStep 2072, loss: 0.00027718223282136023\r\nStep 2073, loss: 0.00025065720546990633\r\nStep 2074, loss: 0.00021951577218715101\r\nStep 2075, loss: 0.00020016051712445915\r\nStep 2076, loss: 0.00018834821821656078\r\nStep 2077, loss: 0.00017337205645162612\r\nStep 2078, loss: 0.0003845830506179482\r\nStep 2079, loss: 0.000241181900491938\r\nStep 2080, loss: 0.00019478437025099993\r\nStep 2081, loss: 0.0001684241578914225\r\nStep 2082, loss: 0.0001535636547487229\r\nStep 2083, loss: 0.00014742609346285462\r\nStep 2084, loss: 0.00014232636021915823\r\nStep 2085, loss: 0.0001343895710306242\r\nStep 2086, loss: 0.00011609583452809602\r\nStep 2087, loss: 0.00010385534551460296\r\nStep 2088, loss: 9.579382458468899e-05\r\nStep 2089, loss: 8.90634473762475e-05\r\nStep 2090, loss: 8.301648631459102e-05\r\nStep 2091, loss: 7.754243415547535e-05\r\nStep 2092, loss: 7.350493979174644e-05\r\nStep 2093, loss: 6.845121970400214e-05\r\nStep 2094, loss: 5.1311108109075576e-05\r\nStep 2095, loss: 4.875662853009999e-05\r\nStep 2096, loss: 4.6302426198963076e-05\r\nStep 2097, loss: 4.392592018120922e-05\r\nStep 2098, loss: 0.02925635129213333\r\nStep 2099, loss: 9.148073149845004e-05\r\nStep 2100, loss: 0.0008776201866567135\r\nStep 2101, loss: 0.0009404895245097578\r\nStep 2102, loss: 0.000478039204608649\r\nStep 2103, loss: 0.00034225062699988484\r\nStep 2104, loss: 0.0003542286576703191\r\nStep 2105, loss: 0.0005376739427447319\r\nStep 2106, loss: 0.0005565929459407926\r\nStep 2107, loss: 0.00035514243063516915\r\nStep 2108, loss: 0.00029167739558033645\r\nStep 2109, loss: 0.000316314835799858\r\nStep 2110, loss: 0.0002851929166354239\r\nStep 2111, loss: 0.0002659932943060994\r\nStep 2112, loss: 0.00023612103541381657\r\nStep 2113, loss: 0.00021418623509816825\r\nStep 2114, loss: 0.00019937845354434103\r\nStep 2115, loss: 0.00018160598119720817\r\nStep 2116, loss: 0.00017348093388136476\r\nStep 2117, loss: 0.00015971669927239418\r\nStep 2118, loss: 0.0002805996045935899\r\nStep 2119, loss: 0.00013293771189637482\r\nStep 2120, loss: 0.0001656745298532769\r\nStep 2121, loss: 0.0001241114514414221\r\nStep 2122, loss: 0.001267690910026431\r\nStep 2123, loss: 0.00012107082147849724\r\nStep 2124, loss: 0.00028619630029425025\r\nStep 2125, loss: 0.0004030099662486464\r\nStep 2126, loss: 0.00018395233200863004\r\nStep 2127, loss: 0.00015983619960024953\r\nStep 2128, loss: 0.00011902747064596042\r\nStep 2129, loss: 0.00013491646677721292\r\nStep 2130, loss: 0.00015500682638958097\r\nStep 2131, loss: 0.0001416121085640043\r\nStep 2132, loss: 0.0001264302700292319\r\nStep 2133, loss: 0.00010428942914586514\r\nStep 2134, loss: 8.097451063804328e-05\r\nStep 2135, loss: 6.833441875642166e-05\r\nStep 2136, loss: 6.374120130203664e-05\r\nStep 2137, loss: 6.63499886286445e-05\r\nStep 2138, loss: 0.00010089429997606203\r\nStep 2139, loss: 6.778645911253989e-05\r\nStep 2140, loss: 6.347055750666186e-05\r\nStep 2141, loss: 5.698365930584259e-05\r\nStep 2142, loss: 5.0830487452913076e-05\r\nStep 2143, loss: 4.676398020819761e-05\r\nStep 2144, loss: 4.335688572609797e-05\r\nStep 2145, loss: 4.152739711571485e-05\r\nStep 2146, loss: 4.062319567310624e-05\r\nStep 2147, loss: 3.883260797010735e-05\r\nStep 2148, loss: 3.71068854292389e-05\r\nStep 2149, loss: 3.474035474937409e-05\r\nStep 2150, loss: 3.247856511734426e-05\r\nStep 2151, loss: 3.063415351789445e-05\r\nStep 2152, loss: 2.8956055757589638e-05\r\nStep 2153, loss: 2.7769057851401158e-05\r\nStep 2154, loss: 2.663856139406562e-05\r\nStep 2155, loss: 3.3788299333537e-05\r\nStep 2156, loss: 3.1008650694275275e-05\r\nStep 2157, loss: 2.298674917255994e-05\r\nStep 2158, loss: 2.2008629457559437e-05\r\nStep 2159, loss: 2.111234425683506e-05\r\nStep 2160, loss: 2.4439272237941623e-05\r\nStep 2161, loss: 1.904796590679325e-05\r\nStep 2162, loss: 1.817787779145874e-05\r\nStep 2163, loss: 1.743980283208657e-05\r\nStep 2164, loss: 1.6854022760526277e-05\r\nStep 2165, loss: 1.6353609680663794e-05\r\nStep 2166, loss: 1.5902825907687657e-05\r\nStep 2167, loss: 1.5510438970522955e-05\r\nStep 2168, loss: 1.8267186533194035e-05\r\nStep 2169, loss: 1.4489968634734396e-05\r\nStep 2170, loss: 1.3973281056678388e-05\r\nStep 2171, loss: 1.353187963104574e-05\r\nStep 2172, loss: 1.3171358659747057e-05\r\nStep 2173, loss: 1.271985729545122e-05\r\nStep 2174, loss: 1.2403537766658701e-05\r\nStep 2175, loss: 1.203724150400376e-05\r\nStep 2176, loss: 1.170350606116699e-05\r\nStep 2177, loss: 1.1339076991134789e-05\r\nStep 2178, loss: 1.3073964510113001e-05\r\nStep 2179, loss: 1.2666899237956386e-05\r\nStep 2180, loss: 1.2297206012590323e-05\r\nStep 2181, loss: 1.1931847438972909e-05\r\nStep 2182, loss: 1.1513893696246669e-05\r\nStep 2183, loss: 1.1228325092815794e-05\r\nStep 2184, loss: 1.0851207662199158e-05\r\nStep 2185, loss: 1.0543938515183982e-05\r\nStep 2186, loss: 1.0220576768915635e-05\r\nStep 2187, loss: 9.952867912943475e-06\r\nStep 2188, loss: 9.671124644228257e-06\r\nStep 2189, loss: 9.43304712563986e-06\r\nStep 2190, loss: 8.272521881735884e-06\r\nStep 2191, loss: 8.992667062557302e-06\r\nStep 2192, loss: 7.924952114990447e-06\r\nStep 2193, loss: 7.779538464092184e-06\r\nStep 2194, loss: 7.6064461609348655e-06\r\nStep 2195, loss: 7.441247817041585e-06\r\nStep 2196, loss: 7.277420081663877e-06\r\nStep 2197, loss: 7.1335944085149094e-06\r\nStep 2198, loss: 7.0141754804353695e-06\r\nStep 2199, loss: 6.8532222030626144e-06\r\nStep 2200, loss: 6.686924734822242e-06\r\n",,terminal_output +4015,10278789,"TERMINAL",0,0,"388148",,terminal_output +4016,10279824,"TERMINAL",0,0,"499259",,terminal_output +4017,10281035,"TERMINAL",0,0,"520203650",,terminal_output +4018,10281937,"TERMINAL",0,0,"611471",,terminal_output +4019,10283058,"TERMINAL",0,0,"722582",,terminal_output +4020,10283242,"TERMINAL",0,0,"Step 2201, loss: 6.551214028149843e-06\r\nStep 2202, loss: 6.452583420468727e-06\r\nStep 2203, loss: 6.3314328144770116e-06\r\nStep 2204, loss: 6.185719939821865e-06\r\nStep 2205, loss: 6.061417479941156e-06\r\nStep 2206, loss: 5.930347015237203e-06\r\nStep 2207, loss: 5.814159976580413e-06\r\nStep 2208, loss: 5.701681857317453e-06\r\nStep 2209, loss: 5.5868481467769016e-06\r\nStep 2210, loss: 5.480249001266202e-06\r\nStep 2211, loss: 5.354831046133768e-06\r\nStep 2212, loss: 5.25366522197146e-06\r\nStep 2213, loss: 5.1456263463478535e-06\r\nStep 2214, loss: 5.058357146481285e-06\r\nStep 2215, loss: 4.952616109221708e-06\r\nStep 2216, loss: 4.841199825023068e-06\r\nStep 2217, loss: 4.782778432854684e-06\r\nStep 2218, loss: 4.673450348491315e-06\r\nStep 2219, loss: 5.304842488840222e-06\r\nStep 2220, loss: 5.1922525017289445e-06\r\nStep 2221, loss: 5.086771125206724e-06\r\nStep 2222, loss: 4.985186478734249e-06\r\nStep 2223, loss: 4.245216587150935e-06\r\nStep 2224, loss: 4.166200142208254e-06\r\nStep 2225, loss: 4.086090939381393e-06\r\nStep 2226, loss: 4.019312200398417e-06\r\nStep 2227, loss: 3.9249493966053706e-06\r\nStep 2228, loss: 3.857230240100762e-06\r\nStep 2229, loss: 3.776983703573933e-06\r\nStep 2230, loss: 3.685682713694405e-06\r\nStep 2231, loss: 3.6043018099007895e-06\r\nStep 2232, loss: 3.529586138029117e-06\r\nStep 2233, loss: 3.4407876228215173e-06\r\nStep 2234, loss: 3.373683739482658e-06\r\nStep 2235, loss: 3.296222757853684e-06\r\nStep 2236, loss: 3.2170366921491222e-06\r\nStep 2237, loss: 3.6533770071400795e-06\r\nStep 2238, loss: 3.5683683563547675e-06\r\nStep 2239, loss: 3.476970277915825e-06\r\nStep 2240, loss: 2.96073017125309e-06\r\nStep 2241, loss: 3.304085794297862e-06\r\nStep 2242, loss: 3.212144747521961e-06\r\nStep 2243, loss: 2.7886392217624234e-06\r\nStep 2244, loss: 3.0476505799015285e-06\r\nStep 2245, loss: 2.669203695404576e-06\r\nStep 2246, loss: 2.6043276193377096e-06\r\nStep 2247, loss: 2.541330104577355e-06\r\nStep 2248, loss: 2.4773523819021648e-06\r\nStep 2249, loss: 2.4279358967760345e-06\r\nStep 2250, loss: 2.37091467170103e-06\r\nStep 2251, loss: 2.30492105401936e-06\r\nStep 2252, loss: 2.246547410322819e-06\r\nStep 2253, loss: 2.186254732805537e-06\r\nStep 2254, loss: 2.3900029191281646e-06\r\nStep 2255, loss: 2.0822385522478726e-06\r\nStep 2256, loss: 2.027630898737698e-06\r\nStep 2257, loss: 2.2296135284705088e-06\r\nStep 2258, loss: 2.1638707039528526e-06\r\nStep 2259, loss: 2.1035941699665273e-06\r\nStep 2260, loss: 2.0359564132377272e-06\r\nStep 2261, loss: 1.976270823433879e-06\r\nStep 2262, loss: 1.9143424196954584e-06\r\nStep 2263, loss: 1.731244765323936e-06\r\nStep 2264, loss: 1.6812856529213605e-06\r\nStep 2265, loss: 1.6481303646287415e-06\r\nStep 2266, loss: 1.599637357685424e-06\r\nStep 2267, loss: 1.5635017689419328e-06\r\nStep 2268, loss: 1.5126277048693737e-06\r\nStep 2269, loss: 1.4845903706373065e-06\r\nStep 2270, loss: 1.4398957546291058e-06\r\nStep 2271, loss: 1.4134932371234754e-06\r\nStep 2272, loss: 1.3711401152249891e-06\r\nStep 2273, loss: 1.3914823284721933e-06\r\nStep 2274, loss: 2.949817371700192e-06\r\nStep 2275, loss: 0.00030805071583017707\r\nStep 2276, loss: 2.7617334126261994e-06\r\nStep 2277, loss: 0.0015170428669080138\r\nStep 2278, loss: 0.0012899474240839481\r\nStep 2279, loss: 0.00017788411059882492\r\nStep 2280, loss: 0.0009553642012178898\r\nStep 2281, loss: 0.0005454737110994756\r\nStep 2282, loss: 0.0011643052566796541\r\nStep 2283, loss: 0.0008906463044695556\r\nStep 2284, loss: 2.382997263339348e-05\r\nStep 2285, loss: 2.7973786927759647e-05\r\nStep 2286, loss: 0.00036691754939965904\r\nStep 2287, loss: 7.382613694062456e-05\r\nStep 2288, loss: 0.00021363163250498474\r\nStep 2289, loss: 0.00013800036686006933\r\nStep 2290, loss: 0.00017286352522205561\r\nStep 2291, loss: 6.879320426378399e-05\r\nStep 2292, loss: 2.265040711790789e-05\r\nStep 2293, loss: 5.2198021876392886e-05\r\nStep 2294, loss: 1.028020960802678e-05\r\nStep 2295, loss: 1.7734275388647802e-05\r\nStep 2296, loss: 3.514582203933969e-05\r\nStep 2297, loss: 1.7323880456387997e-05\r\nStep 2298, loss: 2.1287391064106487e-05\r\nStep 2299, loss: 4.092944800504483e-05\r\nStep 2300, loss: 3.935549102607183e-05\r\nStep 2301, loss: 2.5484638172201812e-05\r\nStep 2302, loss: 1.70225011970615e-05\r\nStep 2303, loss: 1.2707265341305174e-05\r\nStep 2304, loss: 7.7203985711094e-06\r\nStep 2305, loss: 4.8230904212687165e-06\r\nStep 2306, loss: 4.7130802158790175e-06\r\nStep 2307, loss: 5.762167347711511e-06\r\nStep 2308, loss: 7.785931302350946e-06\r\nStep 2309, loss: 7.3806945692922454e-06\r\nStep 2310, loss: 6.8036360971746035e-06\r\nStep 2311, loss: 5.2753148338524625e-06\r\nStep 2312, loss: 5.505002718564356e-06\r\nStep 2313, loss: 0.0014204037142917514\r\nStep 2314, loss: 5.48956377315335e-05\r\nStep 2315, loss: 7.570972229586914e-05\r\nStep 2316, loss: 7.547765562776476e-05\r\nStep 2317, loss: 0.0001380450266879052\r\nStep 2318, loss: 4.620453910320066e-05\r\nStep 2319, loss: 3.338808528496884e-05\r\nStep 2320, loss: 0.009882408194243908\r\nStep 2321, loss: 2.25787480303552e-05\r\nStep 2322, loss: 6.42610975774005e-05\r\nStep 2323, loss: 0.00041826837696135044\r\nStep 2324, loss: 0.0005430146702565253\r\nStep 2325, loss: 0.0002663245832081884\r\nStep 2326, loss: 0.0002088415203616023\r\nStep 2327, loss: 0.00028934780857525766\r\nStep 2328, loss: 0.00021838500106241554\r\nStep 2329, loss: 0.00018352674669586122\r\nStep 2330, loss: 0.00015713983157183975\r\nStep 2331, loss: 0.00015045449254103005\r\nStep 2332, loss: 0.00012715780758298934\r\nStep 2333, loss: 9.403859439771622e-05\r\nStep 2334, loss: 7.692422514082864e-05\r\nStep 2335, loss: 6.596828461624682e-05\r\nStep 2336, loss: 6.219751230673864e-05\r\nStep 2337, loss: 6.708114233333617e-05\r\nStep 2338, loss: 0.001376108848489821\r\nStep 2339, loss: 0.0021599254105240107\r\nStep 2340, loss: 5.884430356672965e-05\r\nStep 2341, loss: 9.59870740189217e-05\r\nStep 2342, loss: 0.00020488606241997331\r\nStep 2343, loss: 0.0002561272995080799\r\nStep 2344, loss: 0.00019672085181809962\r\nStep 2345, loss: 0.00021368956367950886\r\nStep 2346, loss: 0.00014208127686288208\r\nStep 2347, loss: 0.00013469626719597727\r\nStep 2348, loss: 0.0001296493865083903\r\nStep 2349, loss: 0.00012029838399030268\r\nStep 2350, loss: 0.0012273101601749659\r\nStep 2351, loss: 0.0002721543423831463\r\nStep 2352, loss: 6.0739275795640424e-05\r\nStep 2353, loss: 0.00027276130276732147\r\nStep 2354, loss: 0.000141299344250001\r\nStep 2355, loss: 0.0002749025297816843\r\nStep 2356, loss: 0.00011361647193552926\r\nStep 2357, loss: 7.614983042003587e-05\r\nStep 2358, loss: 0.0001986009010579437\r\nStep 2359, loss: 9.597180178388953e-05\r\nStep 2360, loss: 7.650197221664712e-05\r\nStep 2361, loss: 8.053772035054862e-05\r\nStep 2362, loss: 8.47463306854479e-05\r\nStep 2363, loss: 8.345442620338872e-05\r\nStep 2364, loss: 8.508165046805516e-05\r\nStep 2365, loss: 0.0001343023031949997\r\nStep 2366, loss: 6.13669035374187e-05\r\nStep 2367, loss: 7.11283428245224e-05\r\nStep 2368, loss: 0.00039364423719234765\r\nStep 2369, loss: 4.3969146645395085e-05\r\nStep 2370, loss: 3.7202535168034956e-05\r\nStep 2371, loss: 7.8613382356707e-05\r\nStep 2372, loss: 5.8509180234977975e-05\r\nStep 2373, loss: 3.638459020294249e-05\r\nStep 2374, loss: 3.287343861302361e-05\r\nStep 2375, loss: 3.1454623240279034e-05\r\nStep 2376, loss: 3.139618274872191e-05\r\nStep 2377, loss: 3.2029565772973e-05\r\nStep 2378, loss: 3.123439819319174e-05\r\nStep 2379, loss: 3.173763252561912e-05\r\nStep 2380, loss: 2.8664027922786772e-05\r\nStep 2381, loss: 2.631019742693752e-05\r\nStep 2382, loss: 2.3488148144679144e-05\r\nStep 2383, loss: 2.150981526938267e-05\r\nStep 2384, loss: 2.023432716669049e-05\r\nStep 2385, loss: 1.910424543893896e-05\r\nStep 2386, loss: 1.7961445337277837e-05\r\nStep 2387, loss: 1.6407397197326645e-05\r\nStep 2388, loss: 1.5169879588938784e-05\r\nStep 2389, loss: 0.0007583594997413456\r\nStep 2390, loss: 1.6278559996862896e-05\r\nStep 2391, loss: 0.0007279012352228165\r\nStep 2392, loss: 0.0002788159472402185\r\nStep 2393, loss: 0.00017248799849767238\r\nStep 2394, loss: 7.05387064954266e-05\r\nStep 2395, loss: 0.00010311156074749306\r\nStep 2396, loss: 9.70140245044604e-05\r\nStep 2397, loss: 0.0007914254674687982\r\nStep 2398, loss: 8.754697773838416e-05\r\nStep 2399, loss: 9.740792302181944e-05\r\nStep 2400, loss: 6.36484837741591e-05\r\nStep 2401, loss: 8.062090637395158e-05\r\nStep 2402, loss: 8.753714064368978e-05\r\nStep 2403, loss: 7.557450589956716e-05\r\nStep 2404, loss: 5.89211231272202e-05\r\nStep 2405, loss: 4.763620745507069e-05\r\nStep 2406, loss: 3.8606871385127306e-05\r\nStep 2407, loss: 3.784091677516699e-05\r\nStep 2408, loss: 4.0928305679699406e-05\r\n",,terminal_output +4021,10284088,"TERMINAL",0,0,"833693",,terminal_output +4022,10284987,"TERMINAL",0,0,"9447504",,terminal_output +4023,10286134,"TERMINAL",0,0,"4055815",,terminal_output +4024,10287054,"TERMINAL",0,0,"166926",,terminal_output +4025,10288080,"TERMINAL",0,0,"2773037",,terminal_output +4026,10288232,"TERMINAL",0,0,"Step 2409, loss: 4.328340583015233e-05\r\nStep 2410, loss: 4.3114305299241096e-05\r\nStep 2411, loss: 4.249442645232193e-05\r\nStep 2412, loss: 4.042480941279791e-05\r\nStep 2413, loss: 0.0033747402485460043\r\nStep 2414, loss: 2.6480896849534474e-05\r\nStep 2415, loss: 5.630232044495642e-05\r\nStep 2416, loss: 0.00017707287042867392\r\nStep 2417, loss: 0.00011795728642027825\r\nStep 2418, loss: 0.00012211297871544957\r\nStep 2419, loss: 0.00017118177493102849\r\nStep 2420, loss: 0.00032193990773521364\r\nStep 2421, loss: 0.00013889062392991036\r\nStep 2422, loss: 9.998117457143962e-05\r\nStep 2423, loss: 0.00010507111437618732\r\nStep 2424, loss: 0.00011864968109875917\r\nStep 2425, loss: 0.000164433557074517\r\nStep 2426, loss: 6.565827061422169e-05\r\nStep 2427, loss: 5.931419582339004e-05\r\nStep 2428, loss: 5.976544343866408e-05\r\nStep 2429, loss: 5.808575951959938e-05\r\nStep 2430, loss: 5.942732605035417e-05\r\nStep 2431, loss: 5.756377868237905e-05\r\nStep 2432, loss: 5.734130900236778e-05\r\nStep 2433, loss: 4.9350659537594765e-05\r\nStep 2434, loss: 3.8984308048384264e-05\r\nStep 2435, loss: 3.3108473871834576e-05\r\nStep 2436, loss: 3.0405173674807884e-05\r\nStep 2437, loss: 2.8721951821353287e-05\r\nStep 2438, loss: 2.6848882043850608e-05\r\nStep 2439, loss: 2.4942577510955743e-05\r\nStep 2440, loss: 0.01278288196772337\r\nStep 2441, loss: 4.133499169256538e-05\r\nStep 2442, loss: 0.00017702477634884417\r\nStep 2443, loss: 0.0006868094787932932\r\nStep 2444, loss: 0.00043513422133401036\r\nStep 2445, loss: 0.0002921717241406441\r\nStep 2446, loss: 0.000227106997044757\r\nStep 2447, loss: 0.0001963358372449875\r\nStep 2448, loss: 0.00014708228991366923\r\nStep 2449, loss: 0.00015621152124367654\r\nStep 2450, loss: 0.00018651099526323378\r\nStep 2451, loss: 0.00018444859597366303\r\nStep 2452, loss: 0.00015948977670632303\r\nStep 2453, loss: 0.00014117886894382536\r\nStep 2454, loss: 0.00013373889669310302\r\nStep 2455, loss: 0.00010646519513102248\r\nStep 2456, loss: 9.230842988472432e-05\r\nStep 2457, loss: 0.0002719734620768577\r\nStep 2458, loss: 9.19146987143904e-05\r\nStep 2459, loss: 0.0001031774008879438\r\nStep 2460, loss: 0.0001321066083619371\r\nStep 2461, loss: 0.00012992873962502927\r\nStep 2462, loss: 0.00025014099082909524\r\nStep 2463, loss: 6.668457353953272e-05\r\nStep 2464, loss: 6.258492066990584e-05\r\nStep 2465, loss: 7.779624138493091e-05\r\nStep 2466, loss: 9.412915824213997e-05\r\nStep 2467, loss: 0.00010026618838310242\r\nStep 2468, loss: 7.355787965934724e-05\r\nStep 2469, loss: 5.4217816796153784e-05\r\nStep 2470, loss: 4.445576632861048e-05\r\nStep 2471, loss: 4.2852017941186205e-05\r\nStep 2472, loss: 4.4771986722480506e-05\r\nStep 2473, loss: 4.606024594977498e-05\r\nStep 2474, loss: 4.394154530018568e-05\r\nStep 2475, loss: 4.115107731195167e-05\r\nStep 2476, loss: 3.5391796700423583e-05\r\nStep 2477, loss: 2.9662622182513587e-05\r\nStep 2478, loss: 2.564127316873055e-05\r\nStep 2479, loss: 2.3483937184209935e-05\r\nStep 2480, loss: 2.203825715696439e-05\r\nStep 2481, loss: 2.136731745849829e-05\r\nStep 2482, loss: 0.0002891985059250146\r\nStep 2483, loss: 2.2779258870286867e-05\r\nStep 2484, loss: 0.0010294381063431501\r\nStep 2485, loss: 2.719748044910375e-05\r\nStep 2486, loss: 2.923351712524891e-05\r\nStep 2487, loss: 3.206983092240989e-05\r\nStep 2488, loss: 3.955411011702381e-05\r\nStep 2489, loss: 0.00044439142220653594\r\nStep 2490, loss: 3.368382385815494e-05\r\nStep 2491, loss: 3.0282426450867206e-05\r\nStep 2492, loss: 0.00020961632253602147\r\nStep 2493, loss: 5.943259384366684e-05\r\nStep 2494, loss: 5.926719313720241e-05\r\nStep 2495, loss: 6.241614755708724e-05\r\nStep 2496, loss: 6.61007798044011e-05\r\nStep 2497, loss: 8.91184390638955e-05\r\nStep 2498, loss: 4.540902591543272e-05\r\nStep 2499, loss: 4.639582766685635e-05\r\nStep 2500, loss: 0.03287198394536972\r\nStep 2501, loss: 0.012492084875702858\r\nStep 2502, loss: 6.791159103158861e-05\r\nStep 2503, loss: 0.000687217980157584\r\nStep 2504, loss: 0.002911621704697609\r\nStep 2505, loss: 0.0006024953327141702\r\nStep 2506, loss: 0.0002628327638376504\r\nStep 2507, loss: 0.00017223523173015565\r\nStep 2508, loss: 0.0002381637750659138\r\nStep 2509, loss: 0.0003497262659948319\r\nStep 2510, loss: 0.0005461963010020554\r\nStep 2511, loss: 0.0005300137563608587\r\nStep 2512, loss: 0.00039981771260499954\r\nStep 2513, loss: 0.0002920620609074831\r\nStep 2514, loss: 0.00022508374240715057\r\nStep 2515, loss: 0.00022508439724333584\r\nStep 2516, loss: 0.00021153043780941516\r\nStep 2517, loss: 0.0001809807145036757\r\nStep 2518, loss: 0.00017967211897484958\r\nStep 2519, loss: 0.00017305748770013452\r\nStep 2520, loss: 0.00015722925309091806\r\nStep 2521, loss: 0.0001367805089103058\r\nStep 2522, loss: 0.00012670960859395564\r\nStep 2523, loss: 0.0003782501444220543\r\nStep 2524, loss: 0.00010614524217089638\r\nStep 2525, loss: 0.00011794913734775037\r\nStep 2526, loss: 0.00010482822108315304\r\nStep 2527, loss: 0.00010300837311660871\r\nStep 2528, loss: 0.0001027986581902951\r\nStep 2529, loss: 9.856134420260787e-05\r\nStep 2530, loss: 8.972708019427955e-05\r\nStep 2531, loss: 8.06830357760191e-05\r\nStep 2532, loss: 7.278360135387629e-05\r\nStep 2533, loss: 6.828655023127794e-05\r\nStep 2534, loss: 6.383943400578573e-05\r\nStep 2535, loss: 0.0010965286055579782\r\nStep 2536, loss: 5.267731103231199e-05\r\nStep 2537, loss: 7.15861824573949e-05\r\nStep 2538, loss: 0.00016843801131471992\r\nStep 2539, loss: 0.00015088293002918363\r\nStep 2540, loss: 0.0001240246056113392\r\nStep 2541, loss: 0.0001075972177204676\r\nStep 2542, loss: 8.542069554096088e-05\r\nStep 2543, loss: 7.447702955687419e-05\r\nStep 2544, loss: 0.004494591150432825\r\nStep 2545, loss: 6.62441088934429e-05\r\nStep 2546, loss: 0.0001575907226651907\r\nStep 2547, loss: 0.00042015837971121073\r\nStep 2548, loss: 0.0003808621841017157\r\nStep 2549, loss: 0.00019508654077071697\r\nStep 2550, loss: 0.00020158129336778075\r\nStep 2551, loss: 0.00015009086928330362\r\nStep 2552, loss: 0.00015405012527480721\r\nStep 2553, loss: 0.0001745326298987493\r\nStep 2554, loss: 0.00015348057786468416\r\nStep 2555, loss: 0.00012336077634245157\r\nStep 2556, loss: 0.00010929904237855226\r\nStep 2557, loss: 0.00010415682481834665\r\nStep 2558, loss: 9.724449046188965e-05\r\nStep 2559, loss: 8.809218707028776e-05\r\nStep 2560, loss: 7.975708285812289e-05\r\nStep 2561, loss: 7.29604362277314e-05\r\nStep 2562, loss: 7.200649997685105e-05\r\nStep 2563, loss: 6.860825669718906e-05\r\nStep 2564, loss: 6.56149786664173e-05\r\nStep 2565, loss: 5.9676283854059875e-05\r\nStep 2566, loss: 5.473420969792642e-05\r\nStep 2567, loss: 4.877306128037162e-05\r\nStep 2568, loss: 0.025559859350323677\r\nStep 2569, loss: 5.8583158534020185e-05\r\nStep 2570, loss: 0.0022553508169949055\r\nStep 2571, loss: 0.00016578947543166578\r\nStep 2572, loss: 0.00022966500546317548\r\nStep 2573, loss: 0.0004381463513709605\r\nStep 2574, loss: 0.0006316245999187231\r\nStep 2575, loss: 0.0006225917604751885\r\nStep 2576, loss: 0.00045004006824456155\r\nStep 2577, loss: 0.0003649100835900754\r\nStep 2578, loss: 0.00024119530280586332\r\nStep 2579, loss: 0.00019662457634694874\r\nStep 2580, loss: 0.00019745326426345855\r\nStep 2581, loss: 0.00015318735677283257\r\nStep 2582, loss: 0.0009568528621457517\r\nStep 2583, loss: 0.00022934636217541993\r\nStep 2584, loss: 0.00016461477207485586\r\nStep 2585, loss: 0.00020997154933866113\r\nStep 2586, loss: 0.00021078213467262685\r\nStep 2587, loss: 0.00018657659529708326\r\nStep 2588, loss: 0.00015203618386294693\r\nStep 2589, loss: 0.000133883862872608\r\nStep 2590, loss: 0.00011902785627171397\r\nStep 2591, loss: 0.00010530425061006099\r\nStep 2592, loss: 9.659273200668395e-05\r\nStep 2593, loss: 8.561278809793293e-05\r\nStep 2594, loss: 7.950969302328303e-05\r\nStep 2595, loss: 7.020904013188556e-05\r\nStep 2596, loss: 6.504003977170214e-05\r\nStep 2597, loss: 6.21134095126763e-05\r\nStep 2598, loss: 5.756460814154707e-05\r\nStep 2599, loss: 0.0006811968632973731\r\nStep 2600, loss: 5.54189391550608e-05\r\nStep 2601, loss: 0.0005773691227659583\r\nStep 2602, loss: 5.245561624178663e-05\r\nStep 2603, loss: 5.0404607463860884e-05\r\nStep 2604, loss: 6.756641960237175e-05\r\nStep 2605, loss: 0.0002514887601137161\r\nStep 2606, loss: 7.092943997122347e-05\r\nStep 2607, loss: 7.41387193556875e-05\r\nStep 2608, loss: 7.259510311996564e-05\r\nStep 2609, loss: 0.0001255524839507416\r\nStep 2610, loss: 6.030704025761224e-05\r\nStep 2611, loss: 4.7180710680549964e-05\r\nStep 2612, loss: 4.0452552639180794e-05\r\nStep 2613, loss: 3.838204065687023e-05\r\nStep 2614, loss: 0.0005980787682347\r\nStep 2615, loss: 7.922811346361414e-05\r\nStep 2616, loss: 0.0002937965327873826\r\n",,terminal_output +4027,10289114,"TERMINAL",0,0,"388148",,terminal_output +4028,10290147,"TERMINAL",0,0,"499259",,terminal_output +4029,10291180,"TERMINAL",0,0,"53030368:00",,terminal_output +4030,10292215,"TERMINAL",0,0,"611471",,terminal_output +4031,10293238,"TERMINAL",0,0,"Step 2617, loss: 8.304816583404317e-05\r\nStep 2618, loss: 0.00022953619190957397\r\nStep 2619, loss: 0.0002927608438767493\r\nStep 2620, loss: 5.125152529217303e-05\r\nStep 2621, loss: 7.04190315445885e-05\r\nStep 2622, loss: 0.0002683234924916178\r\nStep 2623, loss: 8.388792048208416e-05\r\nStep 2624, loss: 9.525653149466962e-05\r\nStep 2625, loss: 7.625605940120295e-05\r\nStep 2626, loss: 5.8315941714681685e-05\r\nStep 2627, loss: 6.121551268734038e-05\r\nStep 2628, loss: 7.708894554525614e-05\r\nStep 2629, loss: 5.589010106632486e-05\r\nStep 2630, loss: 0.0009120278991758823\r\nStep 2631, loss: 3.6337176425149664e-05\r\nStep 2632, loss: 5.275452713249251e-05\r\nStep 2633, loss: 0.00012029904610244557\r\nStep 2634, loss: 0.00020755395235028118\r\nStep 2635, loss: 0.00013189301535021514\r\nStep 2636, loss: 9.063864126801491e-05\r\nStep 2637, loss: 6.255211337702349e-05\r\nStep 2638, loss: 5.817527198814787e-05\r\nStep 2639, loss: 6.298342486843467e-05\r\nStep 2640, loss: 7.219349936349317e-05\r\nStep 2641, loss: 7.027840183582157e-05\r\nStep 2642, loss: 6.915484118508175e-05\r\nStep 2643, loss: 5.101982969790697e-05\r\nStep 2644, loss: 4.3384763557696715e-05\r\nStep 2645, loss: 3.668785939225927e-05\r\nStep 2646, loss: 3.468073555268347e-05\r\nStep 2647, loss: 3.558699245331809e-05\r\nStep 2648, loss: 3.6020635889144614e-05\r\nStep 2649, loss: 3.448689676588401e-05\r\nStep 2650, loss: 0.00030443791183643043\r\nStep 2651, loss: 3.1506348022958264e-05\r\nStep 2652, loss: 0.0021762291435152292\r\nStep 2653, loss: 5.354262248147279e-05\r\nStep 2654, loss: 0.000324198161251843\r\nStep 2655, loss: 0.00027354643680155277\r\nStep 2656, loss: 0.00015827345487195998\r\nStep 2657, loss: 0.0002229279198218137\r\nStep 2658, loss: 0.0001292240049224347\r\nStep 2659, loss: 0.00012469767534639686\r\nStep 2660, loss: 0.00012367364251986146\r\nStep 2661, loss: 9.910621156450361e-05\r\nStep 2662, loss: 7.865941006457433e-05\r\nStep 2663, loss: 7.348777580773458e-05\r\nStep 2664, loss: 8.016034553293139e-05\r\nStep 2665, loss: 7.914096931926906e-05\r\nStep 2666, loss: 7.807887595845386e-05\r\nStep 2667, loss: 7.329652726184577e-05\r\nStep 2668, loss: 6.468377978308126e-05\r\nStep 2669, loss: 5.2506846259348094e-05\r\nStep 2670, loss: 4.402705963002518e-05\r\nStep 2671, loss: 3.6922210711054504e-05\r\nStep 2672, loss: 3.2561056286795065e-05\r\nStep 2673, loss: 2.9895158149884082e-05\r\nStep 2674, loss: 2.8957696486031637e-05\r\nStep 2675, loss: 2.8275906515773386e-05\r\nStep 2676, loss: 2.767112891888246e-05\r\nStep 2677, loss: 2.6162582798860967e-05\r\nStep 2678, loss: 0.0003573803114704788\r\nStep 2679, loss: 2.094890623993706e-05\r\nStep 2680, loss: 4.065433677169494e-05\r\nStep 2681, loss: 0.00022743796580471098\r\nStep 2682, loss: 3.81951431336347e-05\r\nStep 2683, loss: 6.0255977587075904e-05\r\nStep 2684, loss: 4.1911265725502744e-05\r\nStep 2685, loss: 3.441763328737579e-05\r\nStep 2686, loss: 3.533033304847777e-05\r\nStep 2687, loss: 0.0021018236875534058\r\nStep 2688, loss: 0.0014620352303609252\r\nStep 2689, loss: 5.585213875747286e-05\r\nStep 2690, loss: 0.0001264932070625946\r\nStep 2691, loss: 0.000360051664756611\r\nStep 2692, loss: 0.00036915033706463873\r\nStep 2693, loss: 0.00017108018801081926\r\nStep 2694, loss: 0.0001399189786752686\r\nStep 2695, loss: 0.000130765387439169\r\nStep 2696, loss: 0.00012163147039245814\r\nStep 2697, loss: 0.00011865475244121626\r\nStep 2698, loss: 0.00011206416820641607\r\nStep 2699, loss: 0.00011462598195066676\r\nStep 2700, loss: 0.00010746161569841206\r\nStep 2701, loss: 9.551533730700612e-05\r\nStep 2702, loss: 7.957307389006019e-05\r\nStep 2703, loss: 6.865546311018988e-05\r\nStep 2704, loss: 6.165636295918375e-05\r\nStep 2705, loss: 5.724812945118174e-05\r\nStep 2706, loss: 5.282794518279843e-05\r\nStep 2707, loss: 4.797790461452678e-05\r\nStep 2708, loss: 4.387964872876182e-05\r\nStep 2709, loss: 4.006751987617463e-05\r\nStep 2710, loss: 3.725366332218982e-05\r\nStep 2711, loss: 3.448194911470637e-05\r\nStep 2712, loss: 3.197331170667894e-05\r\nStep 2713, loss: 2.8493543140939437e-05\r\nStep 2714, loss: 2.557365587563254e-05\r\nStep 2715, loss: 2.362842315051239e-05\r\nStep 2716, loss: 2.1795065549667925e-05\r\nStep 2717, loss: 2.031570693361573e-05\r\nStep 2718, loss: 1.935255750140641e-05\r\nStep 2719, loss: 1.8502463717595674e-05\r\nStep 2720, loss: 1.7488380763097666e-05\r\nStep 2721, loss: 1.6892778148758225e-05\r\nStep 2722, loss: 1.640476330067031e-05\r\nStep 2723, loss: 1.55625311890617e-05\r\nStep 2724, loss: 1.4790458408242557e-05\r\nStep 2725, loss: 1.4025899872649461e-05\r\nStep 2726, loss: 1.3138523172528949e-05\r\nStep 2727, loss: 1.2364546819298994e-05\r\nStep 2728, loss: 1.169574898085557e-05\r\nStep 2729, loss: 1.1080575859523378e-05\r\nStep 2730, loss: 1.0526640835450962e-05\r\nStep 2731, loss: 1.011956192087382e-05\r\nStep 2732, loss: 9.614604095986579e-06\r\nStep 2733, loss: 9.126602890319191e-06\r\nStep 2734, loss: 8.778973096923437e-06\r\nStep 2735, loss: 8.424256520811468e-06\r\nStep 2736, loss: 7.991342499735765e-06\r\nStep 2737, loss: 7.62556783229229e-06\r\nStep 2738, loss: 7.257261586346431e-06\r\nStep 2739, loss: 7.012999503785977e-06\r\nStep 2740, loss: 6.680449587292969e-06\r\nStep 2741, loss: 6.428824235626962e-06\r\nStep 2742, loss: 6.18282410869142e-06\r\nStep 2743, loss: 0.03710496053099632\r\nStep 2744, loss: 7.892800567788072e-06\r\nStep 2745, loss: 7.360374729614705e-05\r\nStep 2746, loss: 0.0003144413058180362\r\nStep 2747, loss: 0.0009884260362014174\r\nStep 2748, loss: 0.0010122691746801138\r\nStep 2749, loss: 0.0008351559517905116\r\nStep 2750, loss: 0.0008559206034988165\r\nStep 2751, loss: 0.0005514048971235752\r\nStep 2752, loss: 0.0003256686613894999\r\nStep 2753, loss: 0.00027300752117298543\r\nStep 2754, loss: 0.00024430762277916074\r\nStep 2755, loss: 0.00019929514382965863\r\nStep 2756, loss: 0.00018416137027088553\r\nStep 2757, loss: 0.0001692899822955951\r\nStep 2758, loss: 0.0001574142079334706\r\nStep 2759, loss: 0.0001473631855333224\r\nStep 2760, loss: 0.00013617746299132705\r\nStep 2761, loss: 0.00011747609823942184\r\nStep 2762, loss: 9.890590445138514e-05\r\nStep 2763, loss: 8.883047121344134e-05\r\nStep 2764, loss: 8.215940033551306e-05\r\nStep 2765, loss: 7.479913620045409e-05\r\nStep 2766, loss: 7.414903666358441e-05\r\nStep 2767, loss: 6.492538523161784e-05\r\nStep 2768, loss: 6.080680032027885e-05\r\nStep 2769, loss: 5.7609457144280896e-05\r\nStep 2770, loss: 5.629260704154149e-05\r\nStep 2771, loss: 5.412050450104289e-05\r\nStep 2772, loss: 4.636442463379353e-05\r\nStep 2773, loss: 4.2462688725208864e-05\r\nStep 2774, loss: 3.859094431390986e-05\r\nStep 2775, loss: 3.4634587791515514e-05\r\nStep 2776, loss: 3.276187271694653e-05\r\nStep 2777, loss: 3.0492588848574087e-05\r\nStep 2778, loss: 2.7805082936538383e-05\r\nStep 2779, loss: 2.5706185624585487e-05\r\nStep 2780, loss: 2.4315062546520494e-05\r\nStep 2781, loss: 2.286870039824862e-05\r\nStep 2782, loss: 2.164395118597895e-05\r\nStep 2783, loss: 2.0618363123503514e-05\r\nStep 2784, loss: 1.9426137441769242e-05\r\nStep 2785, loss: 1.852703462645877e-05\r\nStep 2786, loss: 1.7824870155891404e-05\r\nStep 2787, loss: 1.706882903818041e-05\r\nStep 2788, loss: 1.6230183973675594e-05\r\nStep 2789, loss: 1.528510074422229e-05\r\nStep 2790, loss: 1.4594878848583903e-05\r\nStep 2791, loss: 1.3817949366057292e-05\r\nStep 2792, loss: 1.2973793673154432e-05\r\nStep 2793, loss: 1.2321901522227563e-05\r\nStep 2794, loss: 1.1669585546769667e-05\r\nStep 2795, loss: 1.1052545232814737e-05\r\nStep 2796, loss: 1.057852841768181e-05\r\nStep 2797, loss: 1.0109660252055619e-05\r\nStep 2798, loss: 9.705957381811459e-06\r\nStep 2799, loss: 9.257643796445336e-06\r\nStep 2800, loss: 8.965705092123244e-06\r\nStep 2801, loss: 8.587818228988908e-06\r\nStep 2802, loss: 8.257577064796351e-06\r\nStep 2803, loss: 7.95879077486461e-06\r\nStep 2804, loss: 7.638749593752436e-06\r\nStep 2805, loss: 7.266599368449533e-06\r\nStep 2806, loss: 7.004303370194975e-06\r\nStep 2807, loss: 6.780392595828744e-06\r\nStep 2808, loss: 6.512540949188406e-06\r\nStep 2809, loss: 6.2931862885307055e-06\r\nStep 2810, loss: 6.089250746299513e-06\r\nStep 2811, loss: 5.886835879209684e-06\r\nStep 2812, loss: 5.729973963752855e-06\r\nStep 2813, loss: 5.581182904279558e-06\r\nStep 2814, loss: 5.413187864178326e-06\r\nStep 2815, loss: 5.256963504507439e-06\r\nStep 2816, loss: 5.121301455801586e-06\r\nStep 2817, loss: 4.976765012543183e-06\r\nStep 2818, loss: 4.826841177418828e-06\r\nStep 2819, loss: 4.69808674097294e-06\r\nStep 2820, loss: 4.559719855024014e-06\r\nStep 2821, loss: 4.426746272656601e-06\r\nStep 2822, loss: 4.328254817664856e-06\r\nStep 2823, loss: 4.214116870571161e-06\r\nStep 2824, loss: 4.106207143195206e-06\r\n",,terminal_output +4032,10293290,"TERMINAL",0,0,"722582",,terminal_output +4033,10294362,"TERMINAL",0,0,"833693",,terminal_output +4034,10295354,"TERMINAL",0,0,"95584:015",,terminal_output +4035,10296373,"TERMINAL",0,0,"5166926",,terminal_output +4036,10297391,"TERMINAL",0,0,"2774037",,terminal_output +4037,10298424,"TERMINAL",0,0,"388148",,terminal_output +4038,10299456,"TERMINAL",0,0,"499259",,terminal_output +4039,10300095,"TERMINAL",0,0,"Step 2825, loss: 4.0133031689038035e-06\r\nStep 2826, loss: 3.898663180734729e-06\r\nStep 2827, loss: 3.807888788287528e-06\r\nStep 2828, loss: 3.717057779795141e-06\r\nStep 2829, loss: 3.62902824235789e-06\r\nStep 2830, loss: 3.55320321432373e-06\r\nStep 2831, loss: 3.4722274904197548e-06\r\nStep 2832, loss: 3.3932685710169608e-06\r\nStep 2833, loss: 3.3027044992195442e-06\r\nStep 2834, loss: 3.2428008580609458e-06\r\nStep 2835, loss: 3.156083494104678e-06\r\nStep 2836, loss: 3.0911915018805303e-06\r\nStep 2837, loss: 3.0169214824127266e-06\r\nStep 2838, loss: 2.9586210530396784e-06\r\nStep 2839, loss: 2.8873068913526367e-06\r\nStep 2840, loss: 2.8293225113884546e-06\r\nStep 2841, loss: 2.7635796868707985e-06\r\nStep 2842, loss: 2.714916263357736e-06\r\nStep 2843, loss: 2.662325414348743e-06\r\nStep 2844, loss: 2.597894308564719e-06\r\nStep 2845, loss: 2.550737690398819e-06\r\nStep 2846, loss: 2.480581315467134e-06\r\nStep 2847, loss: 2.4339672108908417e-06\r\nStep 2848, loss: 2.3860486635385314e-06\r\nStep 2849, loss: 2.3365271317743463e-06\r\nStep 2850, loss: 2.271577841383987e-06\r\nStep 2851, loss: 2.2326569251163164e-06\r\nStep 2852, loss: 2.1815155832882738e-06\r\nStep 2853, loss: 2.1405051029432798e-06\r\nStep 2854, loss: 2.092319618895999e-06\r\nStep 2855, loss: 2.0378172393975547e-06\r\nStep 2856, loss: 1.9991716726508457e-06\r\nStep 2857, loss: 1.9625831555458717e-06\r\nStep 2858, loss: 1.911482286232058e-06\r\nStep 2859, loss: 1.8748853563010925e-06\r\nStep 2860, loss: 1.8346444221606362e-06\r\nStep 2861, loss: 1.7984282294492004e-06\r\nStep 2862, loss: 1.7497972066848888e-06\r\nStep 2863, loss: 1.7200679849338485e-06\r\nStep 2864, loss: 1.6751217799537699e-06\r\nStep 2865, loss: 1.6443234471807955e-06\r\nStep 2866, loss: 1.6051270677053253e-06\r\nStep 2867, loss: 1.5689514611949562e-06\r\nStep 2868, loss: 1.5334236422859249e-06\r\nStep 2869, loss: 1.5007627780505572e-06\r\nStep 2870, loss: 1.4692924423798104e-06\r\nStep 2871, loss: 1.4314482541522011e-06\r\nStep 2872, loss: 1.4008606967763626e-06\r\nStep 2873, loss: 1.3739980886384728e-06\r\nStep 2874, loss: 1.3427218163997168e-06\r\nStep 2875, loss: 1.3105307061778149e-06\r\nStep 2876, loss: 1.2761528296323377e-06\r\nStep 2877, loss: 1.2495414694058127e-06\r\nStep 2878, loss: 1.2230919992362033e-06\r\nStep 2879, loss: 1.198205382024753e-06\r\nStep 2880, loss: 1.1678607734211255e-06\r\nStep 2881, loss: 1.1467318472568877e-06\r\nStep 2882, loss: 1.1186949677721714e-06\r\nStep 2883, loss: 1.0889574468819774e-06\r\nStep 2884, loss: 1.067448124558723e-06\r\nStep 2885, loss: 1.041727500705747e-06\r\nStep 2886, loss: 1.019359615384019e-06\r\nStep 2887, loss: 9.935337175193126e-07\r\nStep 2888, loss: 9.737331083670142e-07\r\nStep 2889, loss: 9.506606488685065e-07\r\nStep 2890, loss: 9.251747883354255e-07\r\nStep 2891, loss: 9.071072781807743e-07\r\nStep 2892, loss: 8.842534384712053e-07\r\nStep 2893, loss: 8.642259103908145e-07\r\nStep 2894, loss: 8.462636174044746e-07\r\nStep 2895, loss: 8.267059001809685e-07\r\nStep 2896, loss: 8.074882771325065e-07\r\nStep 2897, loss: 7.894207669778552e-07\r\nStep 2898, loss: 7.714664889135747e-07\r\nStep 2899, loss: 7.516495088566444e-07\r\nStep 2900, loss: 7.361896336988139e-07\r\nStep 2901, loss: 7.221793794087716e-07\r\nStep 2902, loss: 7.040550258352596e-07\r\nStep 2903, loss: 6.867891215733835e-07\r\nStep 2904, loss: 6.721066938553122e-07\r\nStep 2905, loss: 6.556020935022389e-07\r\nStep 2906, loss: 6.395833338501689e-07\r\nStep 2907, loss: 6.254839490793529e-07\r\nStep 2908, loss: 6.126559810581966e-07\r\nStep 2909, loss: 5.99471661644202e-07\r\nStep 2910, loss: 5.858419740434329e-07\r\nStep 2911, loss: 5.729735335080477e-07\r\nStep 2912, loss: 5.614251108454482e-07\r\nStep 2913, loss: 8.909437383408658e-07\r\nStep 2914, loss: 5.669807023878093e-07\r\nStep 2915, loss: 5.568252845478128e-07\r\nStep 2916, loss: 5.502006956703553e-07\r\nStep 2917, loss: 5.392192861108924e-07\r\nStep 2918, loss: 5.257190878182882e-07\r\nStep 2919, loss: 5.190379397390643e-07\r\nStep 2920, loss: 4.976417926627619e-07\r\nStep 2921, loss: 4.935764081892557e-07\r\nStep 2922, loss: 4.7596230956514773e-07\r\nStep 2923, loss: 4.6611458515144477e-07\r\nStep 2924, loss: 4.6233228090386547e-07\r\nStep 2925, loss: 4.77162643619522e-07\r\nStep 2926, loss: 1.029787995321385e-06\r\nStep 2927, loss: 0.00039130295044742525\r\nStep 2928, loss: 3.7320610317692626e-06\r\nStep 2929, loss: 0.0015502150636166334\r\nStep 2930, loss: 0.0010195225477218628\r\nStep 2931, loss: 1.5441454479514505e-06\r\nStep 2932, loss: 0.00013638538075610995\r\nStep 2933, loss: 0.0004984148545190692\r\nStep 2934, loss: 0.0004150966997258365\r\nStep 2935, loss: 0.00022049616381991655\r\nStep 2936, loss: 0.00012908680946566164\r\nStep 2937, loss: 3.99959935748484e-06\r\nStep 2938, loss: 4.335967969382182e-05\r\nStep 2939, loss: 0.000609349284786731\r\nStep 2940, loss: 0.00011031197936972603\r\nStep 2941, loss: 1.2594030067702988e-06\r\nStep 2942, loss: 5.618531304207863e-06\r\nStep 2943, loss: 9.687529382063076e-05\r\nStep 2944, loss: 1.056741712091025e-05\r\nStep 2945, loss: 2.8045807994203642e-05\r\nStep 2946, loss: 7.33402994228527e-05\r\nStep 2947, loss: 8.580885332776234e-05\r\nStep 2948, loss: 3.710495730047114e-05\r\nStep 2949, loss: 7.063687007757835e-06\r\nStep 2950, loss: 9.346540537080728e-06\r\nStep 2951, loss: 0.0001742478198138997\r\nStep 2952, loss: 5.792963565909304e-05\r\nStep 2953, loss: 1.2201924619148485e-05\r\nStep 2954, loss: 1.2750198038702365e-05\r\nStep 2955, loss: 0.00012379956024233252\r\nStep 2956, loss: 4.029423507745378e-06\r\nStep 2957, loss: 1.4796385585214011e-05\r\nStep 2958, loss: 6.784246215829626e-05\r\nStep 2959, loss: 8.967829489847645e-05\r\nStep 2960, loss: 7.244991138577461e-06\r\nStep 2961, loss: 0.00020199877326376736\r\nStep 2962, loss: 7.367973921645898e-06\r\nStep 2963, loss: 2.341864092159085e-05\r\nStep 2964, loss: 2.418658141323249e-06\r\nStep 2965, loss: 1.0320368346583564e-05\r\nStep 2966, loss: 0.00010574045882094651\r\nStep 2967, loss: 1.1084883226430975e-05\r\nStep 2968, loss: 0.00019855040591210127\r\nStep 2969, loss: 2.206874887633603e-05\r\nStep 2970, loss: 1.8581240510684438e-06\r\nStep 2971, loss: 3.820511665253434e-06\r\nStep 2972, loss: 0.00048758299089968204\r\nStep 2973, loss: 6.296580977505073e-05\r\nStep 2974, loss: 2.0064015188836493e-05\r\nStep 2975, loss: 1.5055526091600768e-05\r\nStep 2976, loss: 1.418901206307055e-06\r\nStep 2977, loss: 0.030889369547367096\r\nStep 2978, loss: 7.853230272303335e-06\r\nStep 2979, loss: 1.2715000593743753e-05\r\nStep 2980, loss: 0.0002530204365029931\r\nStep 2981, loss: 0.0007688392652198672\r\nStep 2982, loss: 0.0007092332816682756\r\nStep 2983, loss: 0.0008533368818461895\r\nStep 2984, loss: 0.0006905403570272028\r\nStep 2985, loss: 0.0006209856364876032\r\nStep 2986, loss: 0.0006337825325317681\r\nStep 2987, loss: 0.00032305490458384156\r\nStep 2988, loss: 0.00022614373301621526\r\nStep 2989, loss: 0.00015601520135533065\r\nStep 2990, loss: 0.00011249825183767825\r\nStep 2991, loss: 9.422597213415429e-05\r\nStep 2992, loss: 0.00021528553043026477\r\nStep 2993, loss: 9.799692634260282e-05\r\nStep 2994, loss: 0.0001145435671787709\r\nStep 2995, loss: 0.0001183047061203979\r\nStep 2996, loss: 9.859073907136917e-05\r\nStep 2997, loss: 8.1447753473185e-05\r\nStep 2998, loss: 6.902289169374853e-05\r\nStep 2999, loss: 4.698677366832271e-05\r\nSaved checkpoint at step 3000\r\nStep 3000, loss: 6.982428749324754e-05\r\nStep 3001, loss: 2.958815821330063e-05\r\nStep 3002, loss: 2.9103388442308642e-05\r\nStep 3003, loss: 3.1285300792660564e-05\r\nStep 3004, loss: 3.5267130442662165e-05\r\nStep 3005, loss: 3.476312485872768e-05\r\nStep 3006, loss: 3.5724737244891e-05\r\nStep 3007, loss: 3.577975803636946e-05\r\nStep 3008, loss: 3.0388317100005224e-05\r\nStep 3009, loss: 2.4187989765778184e-05\r\nStep 3010, loss: 1.9230159523431212e-05\r\nStep 3011, loss: 1.605191209819168e-05\r\nStep 3012, loss: 1.3645618309965357e-05\r\nStep 3013, loss: 1.2199164302728605e-05\r\nStep 3014, loss: 1.1152837942063343e-05\r\nStep 3015, loss: 1.1253465345362201e-05\r\nStep 3016, loss: 1.0454601579112932e-05\r\nStep 3017, loss: 9.630515705794096e-06\r\nStep 3018, loss: 9.239261999027804e-06\r\nStep 3019, loss: 8.840330337989144e-06\r\nStep 3020, loss: 8.483164492645301e-06\r\nStep 3021, loss: 8.23454320197925e-06\r\nStep 3022, loss: 7.709704732405953e-06\r\nStep 3023, loss: 7.32281614546082e-06\r\nStep 3024, loss: 6.805076736782212e-06\r\nStep 3025, loss: 6.382167157426011e-06\r\nStep 3026, loss: 5.978566150588449e-06\r\nStep 3027, loss: 5.595070888375631e-06\r\nStep 3028, loss: 5.187408987694653e-06\r\nStep 3029, loss: 4.8992351366905496e-06\r\nStep 3030, loss: 4.654935310099972e-06\r\nStep 3031, loss: 4.38323922935524e-06\r\n",,terminal_output +4040,10300577,"TERMINAL",0,0,"540403610",,terminal_output +4041,10301525,"TERMINAL",0,0,"611471",,terminal_output +4042,10302554,"TERMINAL",0,0,"722582",,terminal_output +4043,10303646,"TERMINAL",0,0,"833693",,terminal_output +4044,10304623,"TERMINAL",0,0,"9447104",,terminal_output +4045,10304818,"TERMINAL",0,0,"Step 3032, loss: 4.187952981737908e-06\r\nStep 3033, loss: 3.958320121455472e-06\r\nStep 3034, loss: 3.7654567677236628e-06\r\nStep 3035, loss: 3.543933871696936e-06\r\nStep 3036, loss: 3.3644892027950846e-06\r\nStep 3037, loss: 3.254403281971463e-06\r\nStep 3038, loss: 3.0808887458988465e-06\r\nStep 3039, loss: 2.9389614155661548e-06\r\nStep 3040, loss: 2.83904637399246e-06\r\nStep 3041, loss: 2.719048325161566e-06\r\nStep 3042, loss: 2.595177647890523e-06\r\nStep 3043, loss: 2.495481112418929e-06\r\nStep 3044, loss: 2.3846887415857054e-06\r\nStep 3045, loss: 2.3013349164102692e-06\r\nStep 3046, loss: 2.2238812107389094e-06\r\nStep 3047, loss: 2.122905016221921e-06\r\nStep 3048, loss: 2.055665390798822e-06\r\nStep 3049, loss: 1.9817837255686754e-06\r\nStep 3050, loss: 1.9340284325153334e-06\r\nStep 3051, loss: 1.8461462332197698e-06\r\nStep 3052, loss: 1.7788657942219288e-06\r\nStep 3053, loss: 1.7129289062722819e-06\r\nStep 3054, loss: 1.6634155599604128e-06\r\nStep 3055, loss: 1.616420945538266e-06\r\nStep 3056, loss: 1.556865527163609e-06\r\nStep 3057, loss: 1.508194600319257e-06\r\nStep 3058, loss: 1.4619528201365029e-06\r\nStep 3059, loss: 1.4258340570449946e-06\r\nStep 3060, loss: 1.3828801002091495e-06\r\nStep 3061, loss: 1.3502760793926427e-06\r\nStep 3062, loss: 1.3122865993864252e-06\r\nStep 3063, loss: 1.277204319194425e-06\r\nStep 3064, loss: 1.2415956689437735e-06\r\nStep 3065, loss: 1.217988938151393e-06\r\nStep 3066, loss: 1.1816108553830418e-06\r\nStep 3067, loss: 1.1490877795949928e-06\r\nStep 3068, loss: 1.1158276720379945e-06\r\nStep 3069, loss: 1.0973955113513512e-06\r\nStep 3070, loss: 1.0717723171183025e-06\r\nStep 3071, loss: 1.0473393103893613e-06\r\nStep 3072, loss: 1.025489837047644e-06\r\nStep 3073, loss: 9.995748087021639e-07\r\nStep 3074, loss: 9.798146720640943e-07\r\nStep 3075, loss: 9.631399962017895e-07\r\nStep 3076, loss: 9.393063464813167e-07\r\nStep 3077, loss: 9.210201028508891e-07\r\nStep 3078, loss: 8.998669613902166e-07\r\nStep 3079, loss: 8.831275977172481e-07\r\nStep 3080, loss: 8.628085765849391e-07\r\nStep 3081, loss: 8.472434274153784e-07\r\nStep 3082, loss: 8.287061064038426e-07\r\nStep 3083, loss: 8.108409019769169e-07\r\nStep 3084, loss: 7.953891554279835e-07\r\nStep 3085, loss: 7.78868297857116e-07\r\nStep 3086, loss: 7.680487783545686e-07\r\nStep 3087, loss: 7.542247431047144e-07\r\nStep 3088, loss: 7.37671598471934e-07\r\nStep 3089, loss: 7.238961075017869e-07\r\nStep 3090, loss: 7.103554935383727e-07\r\nStep 3091, loss: 6.993093393248273e-07\r\nStep 3092, loss: 6.876475708850194e-07\r\nStep 3093, loss: 6.742365030731889e-07\r\nStep 3094, loss: 6.616758128075162e-07\r\nStep 3095, loss: 6.479246508206415e-07\r\nStep 3096, loss: 6.359146027534734e-07\r\nStep 3097, loss: 6.27063116098725e-07\r\nStep 3098, loss: 6.164865453683888e-07\r\nStep 3099, loss: 6.06363471433724e-07\r\nStep 3100, loss: 5.960784505987249e-07\r\nStep 3101, loss: 5.857933729203069e-07\r\nStep 3102, loss: 5.778002218903566e-07\r\nStep 3103, loss: 5.678634238392988e-07\r\nStep 3104, loss: 5.553512778533332e-07\r\nStep 3105, loss: 5.484029657054634e-07\r\nStep 3106, loss: 5.407499088505574e-07\r\nStep 3107, loss: 5.318092348716164e-07\r\nStep 3108, loss: 5.266585958452197e-07\r\nStep 3109, loss: 5.17685521117528e-07\r\nStep 3110, loss: 5.094979655950738e-07\r\nStep 3111, loss: 5.007759114050714e-07\r\nStep 3112, loss: 4.924668814965116e-07\r\nStep 3113, loss: 4.846032766181452e-07\r\nStep 3114, loss: 4.768774601870973e-07\r\nStep 3115, loss: 4.7037437411745486e-07\r\nStep 3116, loss: 4.6338541892509966e-07\r\nStep 3117, loss: 4.548577408058918e-07\r\nStep 3118, loss: 4.4993387859904033e-07\r\nStep 3119, loss: 4.4249139818930416e-07\r\nStep 3120, loss: 4.3622318912639457e-07\r\nStep 3121, loss: 4.3060285292995104e-07\r\nStep 3122, loss: 4.2281214973627357e-07\r\nStep 3123, loss: 4.1639819414740487e-07\r\nStep 3124, loss: 4.1138528672490793e-07\r\nStep 3125, loss: 4.0570827763986017e-07\r\nStep 3126, loss: 3.980471205977665e-07\r\nStep 3127, loss: 3.922810094536544e-07\r\nStep 3128, loss: 3.8812652292108396e-07\r\nStep 3129, loss: 3.817449396592565e-07\r\nStep 3130, loss: 3.7866752222726063e-07\r\nStep 3131, loss: 3.7204299019322207e-07\r\nStep 3132, loss: 3.663416521249019e-07\r\nStep 3133, loss: 3.6098859368394187e-07\r\nStep 3134, loss: 3.572795037598553e-07\r\nStep 3135, loss: 3.510356236802181e-07\r\nStep 3136, loss: 3.469944545031467e-07\r\nStep 3137, loss: 3.4191674558314844e-07\r\nStep 3138, loss: 3.3683900824144075e-07\r\nStep 3139, loss: 3.33413396447213e-07\r\nStep 3140, loss: 3.285867364866135e-07\r\nStep 3141, loss: 3.241973729473102e-07\r\nStep 3142, loss: 3.2078790468403895e-07\r\nStep 3143, loss: 3.1589644322593813e-07\r\nStep 3144, loss: 3.112479305400484e-07\r\nStep 3145, loss: 3.0646984328086546e-07\r\nStep 3146, loss: 3.023153283265856e-07\r\nStep 3147, loss: 2.990921359469212e-07\r\nStep 3148, loss: 2.9526157163672906e-07\r\nStep 3149, loss: 2.9071833296256955e-07\r\nStep 3150, loss: 2.88134941683893e-07\r\nStep 3151, loss: 2.8449872502278595e-07\r\nStep 3152, loss: 2.8176145860925317e-07\r\nStep 3153, loss: 2.766108195828565e-07\r\nStep 3154, loss: 2.7260207957624516e-07\r\nStep 3155, loss: 2.694355885068944e-07\r\nStep 3156, loss: 2.6630149818629434e-07\r\nStep 3157, loss: 2.6252760676470643e-07\r\nStep 3158, loss: 2.596202648419421e-07\r\nStep 3159, loss: 2.560569498655241e-07\r\nStep 3160, loss: 2.538865544465807e-07\r\nStep 3161, loss: 2.505499878679984e-07\r\nStep 3162, loss: 2.4714864821362426e-07\r\nStep 3163, loss: 2.433018835290568e-07\r\nStep 3164, loss: 2.422409863811481e-07\r\nStep 3165, loss: 2.384913955211232e-07\r\nStep 3166, loss: 2.356893418209438e-07\r\nStep 3167, loss: 2.3454744280115847e-07\r\nStep 3168, loss: 2.3083838129878131e-07\r\nStep 3169, loss: 2.280849003000185e-07\r\nStep 3170, loss: 2.2661099308152188e-07\r\nStep 3171, loss: 2.2311245118089573e-07\r\nStep 3172, loss: 2.2052094550417678e-07\r\nStep 3173, loss: 2.1822908990998258e-07\r\nStep 3174, loss: 2.1406647476851504e-07\r\nStep 3175, loss: 2.1243059222797456e-07\r\nStep 3176, loss: 2.1064083455257787e-07\r\nStep 3177, loss: 2.081627030747768e-07\r\nStep 3178, loss: 2.0595992111793748e-07\r\nStep 3179, loss: 2.042349507291874e-07\r\nStep 3180, loss: 2.020726554974317e-07\r\nStep 3181, loss: 2.0157865776582184e-07\r\nStep 3182, loss: 1.9921390048693866e-07\r\nStep 3183, loss: 1.951241870301601e-07\r\nStep 3184, loss: 1.9270274265181797e-07\r\nStep 3185, loss: 1.9053236144372931e-07\r\nStep 3186, loss: 1.8930948897377675e-07\r\nStep 3187, loss: 1.880866307146789e-07\r\nStep 3188, loss: 1.8522787570418586e-07\r\nStep 3189, loss: 1.8280643132584373e-07\r\nStep 3190, loss: 1.8130822354578413e-07\r\nStep 3191, loss: 1.806279499305674e-07\r\nStep 3192, loss: 1.7789876949336758e-07\r\nStep 3193, loss: 1.761333123795339e-07\r\nStep 3194, loss: 1.7461078982705658e-07\r\nStep 3195, loss: 1.7389002948675625e-07\r\nStep 3196, loss: 1.7309638167262165e-07\r\nStep 3197, loss: 1.700189642406258e-07\r\nStep 3198, loss: 1.691119422275733e-07\r\nStep 3199, loss: 1.6769470789768093e-07\r\nStep 3200, loss: 3.2473116107212263e-07\r\nStep 3201, loss: 1.8185082240051997e-07\r\nStep 3202, loss: 2.846605013928638e-07\r\nStep 3203, loss: 2.919410917456844e-07\r\nStep 3204, loss: 1.704805754343397e-07\r\nStep 3205, loss: 1.829684066478876e-07\r\nStep 3206, loss: 1.7790686968055525e-07\r\nStep 3207, loss: 1.758174619226338e-07\r\nStep 3208, loss: 1.8268495693973819e-07\r\nStep 3209, loss: 1.6937109137415973e-07\r\nStep 3210, loss: 1.6433385496839037e-07\r\nStep 3211, loss: 1.6590495022228424e-07\r\nStep 3212, loss: 1.5996879199065006e-07\r\nStep 3213, loss: 0.0025328188203275204\r\nStep 3214, loss: 6.5935905695369e-06\r\nStep 3215, loss: 0.0004176129004918039\r\nStep 3216, loss: 8.637208520667627e-05\r\nStep 3217, loss: 0.00033826171420514584\r\nStep 3218, loss: 0.00019497290486469865\r\nStep 3219, loss: 9.662260708864778e-05\r\nStep 3220, loss: 0.00038564481656067073\r\nStep 3221, loss: 0.002856404287740588\r\nStep 3222, loss: 0.00026945152785629034\r\nStep 3223, loss: 0.00017282537010032684\r\nStep 3224, loss: 2.6830219212570228e-05\r\nStep 3225, loss: 0.0003053518012166023\r\nStep 3226, loss: 0.00026185050955973566\r\nStep 3227, loss: 0.0002183696924475953\r\nStep 3228, loss: 0.0001190238690469414\r\nStep 3229, loss: 0.00012506746861618012\r\nStep 3230, loss: 0.0035795376170426607\r\nStep 3231, loss: 0.0002018886589212343\r\nStep 3232, loss: 8.580621215514839e-05\r\nStep 3233, loss: 0.00020771623530890793\r\nStep 3234, loss: 0.00025144132087007165\r\nStep 3235, loss: 0.00033671848359517753\r\nStep 3236, loss: 0.001296407775953412\r\nStep 3237, loss: 0.00028417364228516817\r\nStep 3238, loss: 0.00021203768847044557\r\n",,terminal_output +4046,10305655,"TERMINAL",0,0,"2:0055815",,terminal_output +4047,10306688,"TERMINAL",0,0,"166926",,terminal_output +4048,10307744,"TERMINAL",0,0,"2775037",,terminal_output +4049,10308766,"TERMINAL",0,0,"388148",,terminal_output +4050,10309891,"TERMINAL",0,0,"499259",,terminal_output +4051,10309905,"TERMINAL",0,0,"Step 3239, loss: 0.00015551441174466163\r\nStep 3240, loss: 0.0002921951818279922\r\nStep 3241, loss: 0.00025753109366633\r\nStep 3242, loss: 0.00033399969106540084\r\nStep 3243, loss: 0.00018944940529763699\r\nStep 3244, loss: 9.183691145153716e-05\r\nStep 3245, loss: 8.68857532623224e-05\r\nStep 3246, loss: 8.134431845974177e-05\r\nStep 3247, loss: 7.044282392598689e-05\r\nStep 3248, loss: 6.884510366944596e-05\r\nStep 3249, loss: 0.00020054139895364642\r\nStep 3250, loss: 4.725956387119368e-05\r\nStep 3251, loss: 4.53985485364683e-05\r\nStep 3252, loss: 4.52955428045243e-05\r\nStep 3253, loss: 4.73245709144976e-05\r\nStep 3254, loss: 4.844008435611613e-05\r\nStep 3255, loss: 3.431899313000031e-05\r\nStep 3256, loss: 3.652046143542975e-05\r\nStep 3257, loss: 2.8482516427175142e-05\r\nStep 3258, loss: 2.847792347893119e-05\r\nStep 3259, loss: 2.5725415980559774e-05\r\nStep 3260, loss: 2.270149707328528e-05\r\nStep 3261, loss: 1.9351728042238392e-05\r\nStep 3262, loss: 1.7350992493447848e-05\r\nStep 3263, loss: 1.7524329450679943e-05\r\nStep 3264, loss: 2.2988850105321035e-05\r\nStep 3265, loss: 1.269152471650159e-05\r\nStep 3266, loss: 1.1699097740347497e-05\r\nStep 3267, loss: 1.1241526408412028e-05\r\nStep 3268, loss: 1.10581695480505e-05\r\nStep 3269, loss: 1.1366906619514339e-05\r\nStep 3270, loss: 1.076707849279046e-05\r\nStep 3271, loss: 1.0111545634572394e-05\r\nStep 3272, loss: 9.52520622377051e-06\r\nStep 3273, loss: 8.607765266788192e-06\r\nStep 3274, loss: 7.86686268838821e-06\r\nStep 3275, loss: 6.83088956066058e-06\r\nStep 3276, loss: 6.08212394581642e-06\r\nStep 3277, loss: 5.44138902114355e-06\r\nStep 3278, loss: 4.895857273368165e-06\r\nStep 3279, loss: 4.554419774649432e-06\r\nStep 3280, loss: 4.132320555072511e-06\r\nStep 3281, loss: 3.855183877021773e-06\r\nStep 3282, loss: 3.6368746805237606e-06\r\nStep 3283, loss: 3.472939624771243e-06\r\nStep 3284, loss: 3.380197767910431e-06\r\nStep 3285, loss: 3.1996930829336634e-06\r\nStep 3286, loss: 3.1270394629245857e-06\r\nStep 3287, loss: 2.924487489508465e-06\r\nStep 3288, loss: 2.811106924127671e-06\r\nStep 3289, loss: 2.6549216727289604e-06\r\nStep 3290, loss: 2.5246108634746633e-06\r\nStep 3291, loss: 2.4103496798488777e-06\r\nStep 3292, loss: 2.3096079075912712e-06\r\nStep 3293, loss: 2.20183824239939e-06\r\nStep 3294, loss: 2.083798790408764e-06\r\nStep 3295, loss: 1.9739559320441913e-06\r\nStep 3296, loss: 1.87401576567936e-06\r\nStep 3297, loss: 1.7942794556802255e-06\r\nStep 3298, loss: 1.727209109958494e-06\r\nStep 3299, loss: 1.6499598132213578e-06\r\nStep 3300, loss: 1.5661023553548148e-06\r\nStep 3301, loss: 1.5093248748598853e-06\r\nStep 3302, loss: 1.4649948525402579e-06\r\nStep 3303, loss: 1.4178461924529984e-06\r\nStep 3304, loss: 1.3788767319056205e-06\r\nStep 3305, loss: 1.3262539368952275e-06\r\nStep 3306, loss: 1.2794616850442253e-06\r\nStep 3307, loss: 0.0005756939644925296\r\nStep 3308, loss: 1.7263707832171349e-06\r\nStep 3309, loss: 5.770773441327037e-06\r\nStep 3310, loss: 0.00012478826101869345\r\nStep 3311, loss: 4.0641574742039666e-05\r\nStep 3312, loss: 2.626897367008496e-05\r\nStep 3313, loss: 2.345125176361762e-05\r\nStep 3314, loss: 2.2186390197020955e-05\r\nStep 3315, loss: 2.853609657904599e-05\r\nStep 3316, loss: 2.701141420402564e-05\r\nStep 3317, loss: 2.46115741902031e-05\r\nStep 3318, loss: 2.2867292500450276e-05\r\nStep 3319, loss: 1.9036111552850343e-05\r\nStep 3320, loss: 1.7298640159424394e-05\r\nStep 3321, loss: 1.6014009815989994e-05\r\nStep 3322, loss: 1.4473805094894487e-05\r\nStep 3323, loss: 1.2946634342370089e-05\r\nStep 3324, loss: 1.1401632946217433e-05\r\nStep 3325, loss: 1.0544472388573922e-05\r\nStep 3326, loss: 9.929981388268061e-06\r\nStep 3327, loss: 9.076264177565463e-06\r\nStep 3328, loss: 8.547692232241388e-06\r\nStep 3329, loss: 7.619400548719568e-06\r\nStep 3330, loss: 7.064918463584036e-06\r\nStep 3331, loss: 6.432733698602533e-06\r\nStep 3332, loss: 5.889086423849221e-06\r\nStep 3333, loss: 5.571344445343129e-06\r\nStep 3334, loss: 5.205319666856667e-06\r\nStep 3335, loss: 4.907042239210568e-06\r\nStep 3336, loss: 4.40434359916253e-06\r\nStep 3337, loss: 4.116074705962092e-06\r\nStep 3338, loss: 3.8586017581110355e-06\r\nStep 3339, loss: 3.5841289900417905e-06\r\nStep 3340, loss: 3.354400405441993e-06\r\nStep 3341, loss: 3.0897131182427984e-06\r\nStep 3342, loss: 2.806248858178151e-06\r\nStep 3343, loss: 2.579374950073543e-06\r\nStep 3344, loss: 2.4137743821484037e-06\r\nStep 3345, loss: 2.266694536956493e-06\r\nStep 3346, loss: 2.1317132450349163e-06\r\nStep 3347, loss: 2.0567147203109926e-06\r\nStep 3348, loss: 1.964419197975076e-06\r\nStep 3349, loss: 1.8701065300774644e-06\r\nStep 3350, loss: 1.8158876855522976e-06\r\nStep 3351, loss: 1.7213731098308926e-06\r\nStep 3352, loss: 1.649767341405095e-06\r\nStep 3353, loss: 1.5713918628534884e-06\r\nStep 3354, loss: 1.5027095514597022e-06\r\nStep 3355, loss: 1.4259696854423964e-06\r\nStep 3356, loss: 1.3437228290058556e-06\r\nStep 3357, loss: 1.2826448028135928e-06\r\nStep 3358, loss: 1.2092493761883816e-06\r\nStep 3359, loss: 1.1558566939129378e-06\r\nStep 3360, loss: 1.115648160521232e-06\r\nStep 3361, loss: 1.0594211516945506e-06\r\nStep 3362, loss: 1.0288008525094483e-06\r\nStep 3363, loss: 9.837738161877496e-07\r\nStep 3364, loss: 9.401314855495002e-07\r\nStep 3365, loss: 9.019637445817352e-07\r\nStep 3366, loss: 8.632290473542525e-07\r\nStep 3367, loss: 8.363504662156629e-07\r\nStep 3368, loss: 8.076253834587988e-07\r\nStep 3369, loss: 7.740817409285228e-07\r\nStep 3370, loss: 7.512603588111233e-07\r\nStep 3371, loss: 7.229807010844524e-07\r\nStep 3372, loss: 6.985558229644084e-07\r\nStep 3373, loss: 6.768601679141284e-07\r\nStep 3374, loss: 6.540063282045594e-07\r\nStep 3375, loss: 6.343433938127419e-07\r\nStep 3376, loss: 6.141296466921631e-07\r\nStep 3377, loss: 5.962158979855303e-07\r\nStep 3378, loss: 5.77225080178323e-07\r\nStep 3379, loss: 5.59157456336834e-07\r\nStep 3380, loss: 5.431469389804988e-07\r\nStep 3381, loss: 5.262616582513147e-07\r\nStep 3382, loss: 5.103158287056431e-07\r\nStep 3383, loss: 4.963297897120356e-07\r\nStep 3384, loss: 4.823032782041992e-07\r\nStep 3385, loss: 4.7041473294484604e-07\r\nStep 3386, loss: 4.5750587673865084e-07\r\nStep 3387, loss: 4.4876762217427313e-07\r\nStep 3388, loss: 4.375026776415325e-07\r\nStep 3389, loss: 4.263916082436481e-07\r\nStep 3390, loss: 4.136203699545149e-07\r\nStep 3391, loss: 4.04137097120838e-07\r\nStep 3392, loss: 3.9331754919658124e-07\r\nStep 3393, loss: 3.834536528302124e-07\r\nStep 3394, loss: 3.755900479518459e-07\r\nStep 3395, loss: 7.34341654151649e-07\r\nStep 3396, loss: 3.58243198661512e-07\r\nStep 3397, loss: 3.5331123626747285e-07\r\nStep 3398, loss: 3.510112946969457e-07\r\nStep 3399, loss: 3.4522088299127063e-07\r\nStep 3400, loss: 3.3800515097937023e-07\r\nStep 3401, loss: 3.2997959920066933e-07\r\nStep 3402, loss: 3.210955981103325e-07\r\nStep 3403, loss: 3.119200471246586e-07\r\nStep 3404, loss: 3.0729580657862243e-07\r\nStep 3405, loss: 3.000234016781178e-07\r\nStep 3406, loss: 2.955206639398966e-07\r\nStep 3407, loss: 2.885236654037726e-07\r\nStep 3408, loss: 2.810001831221598e-07\r\nStep 3409, loss: 2.74221775953265e-07\r\nStep 3410, loss: 2.689982636638888e-07\r\nStep 3411, loss: 2.644064522883127e-07\r\nStep 3412, loss: 2.584135927463649e-07\r\nStep 3413, loss: 2.531415077555721e-07\r\nStep 3414, loss: 2.480394698523014e-07\r\nStep 3415, loss: 2.447110034609068e-07\r\nStep 3416, loss: 2.3918786951071525e-07\r\nStep 3417, loss: 2.3422350636792544e-07\r\nStep 3418, loss: 2.3074926502886228e-07\r\nStep 3419, loss: 2.2551768097400782e-07\r\nStep 3420, loss: 2.2337160032748216e-07\r\nStep 3421, loss: 2.1907942482357612e-07\r\nStep 3422, loss: 2.1412316186797398e-07\r\nStep 3423, loss: 2.1099715752370685e-07\r\nStep 3424, loss: 2.067940698680104e-07\r\nStep 3425, loss: 2.0193499494780554e-07\r\nStep 3426, loss: 1.9977269971604983e-07\r\nStep 3427, loss: 1.9592593503148237e-07\r\nStep 3428, loss: 1.938689280223116e-07\r\nStep 3429, loss: 1.9028941267151822e-07\r\nStep 3430, loss: 1.8577856053525466e-07\r\nStep 3431, loss: 1.834786047538728e-07\r\nStep 3432, loss: 1.8151878578009928e-07\r\nStep 3433, loss: 1.778501825810963e-07\r\nStep 3434, loss: 1.7598753743186535e-07\r\nStep 3435, loss: 1.7332314428131212e-07\r\nStep 3436, loss: 1.706587511307589e-07\r\nStep 3437, loss: 1.6846406936110725e-07\r\nStep 3438, loss: 1.6541905267786206e-07\r\nStep 3439, loss: 1.6337013164502423e-07\r\nStep 3440, loss: 1.6052757700890652e-07\r\nStep 3441, loss: 1.584138829002768e-07\r\nStep 3442, loss: 1.5595195179685106e-07\r\nStep 3443, loss: 1.5520689089498774e-07\r\nStep 3444, loss: 1.5342523340677872e-07\r\nStep 3445, loss: 1.5093090155460231e-07\r\nStep 3446, loss: 1.481774347666942e-07\r\n",,terminal_output +4052,10310919,"TERMINAL",0,0,"550503620",,terminal_output +4053,10311853,"TERMINAL",0,0,"611471",,terminal_output +4054,10312895,"TERMINAL",0,0,"722582",,terminal_output +4055,10313931,"TERMINAL",0,0,"833693",,terminal_output +4056,10314914,"TERMINAL",0,0,"Step 3447, loss: 1.46565838576862e-07\r\nStep 3448, loss: 1.4549684124176565e-07\r\nStep 3449, loss: 1.4389334523912112e-07\r\nStep 3450, loss: 1.4103459022862808e-07\r\nStep 3451, loss: 1.4031382988832775e-07\r\nStep 3452, loss: 1.3737408721681277e-07\r\nStep 3453, loss: 1.3687198929801525e-07\r\nStep 3454, loss: 1.3530087983326666e-07\r\nStep 3455, loss: 1.3502554452315962e-07\r\nStep 3456, loss: 1.3274987509248604e-07\r\nStep 3457, loss: 1.311463790898415e-07\r\nStep 3458, loss: 1.2976154550869978e-07\r\nStep 3459, loss: 1.2894359713300219e-07\r\nStep 3460, loss: 2.3630458656498377e-07\r\nStep 3461, loss: 1.2703236507149995e-07\r\nStep 3462, loss: 1.283443111788074e-07\r\nStep 3463, loss: 1.2599575427429954e-07\r\nStep 3464, loss: 1.2627920398244896e-07\r\nStep 3465, loss: 1.2440035845884267e-07\r\nStep 3466, loss: 1.2180886699297844e-07\r\nStep 3467, loss: 1.2133915561207687e-07\r\nStep 3468, loss: 1.2070746890913142e-07\r\nStep 3469, loss: 1.1941982336338697e-07\r\nStep 3470, loss: 1.1907967945035125e-07\r\nStep 3471, loss: 1.1808357669451652e-07\r\nStep 3472, loss: 1.150790538417823e-07\r\nStep 3473, loss: 1.1481180450800821e-07\r\nStep 3474, loss: 1.1378330100342282e-07\r\nStep 3475, loss: 1.1211501771413168e-07\r\nStep 3476, loss: 1.111756020577559e-07\r\nStep 3477, loss: 1.1056821591637345e-07\r\nStep 3478, loss: 1.1069779048966666e-07\r\nStep 3479, loss: 1.0995272958780333e-07\r\nStep 3480, loss: 1.0846261488950404e-07\r\nStep 3481, loss: 1.811461913803214e-07\r\nStep 3482, loss: 1.0565245389670963e-07\r\nStep 3483, loss: 1.0703728747785135e-07\r\nStep 3484, loss: 1.0759607960153517e-07\r\nStep 3485, loss: 1.064218082547086e-07\r\nStep 3486, loss: 1.05603859879011e-07\r\nStep 3487, loss: 1.0419473284173364e-07\r\nStep 3488, loss: 1.0345777212705798e-07\r\nStep 3489, loss: 1.036197403436745e-07\r\nStep 3490, loss: 1.0382220239080198e-07\r\nStep 3491, loss: 1.0136027128737624e-07\r\nStep 3492, loss: 1.0063950384164855e-07\r\nStep 3493, loss: 1.0083387280701572e-07\r\nStep 3494, loss: 1.0006451844901676e-07\r\nStep 3495, loss: 9.910080223107798e-08\r\nStep 3496, loss: 9.928706390383013e-08\r\nStep 3497, loss: 9.973248182859606e-08\r\nStep 3498, loss: 9.70114015785839e-08\r\nStep 3499, loss: 9.606387862959309e-08\r\nStep 3500, loss: 9.764308117610199e-08\r\nStep 3501, loss: 9.667126477097554e-08\r\nStep 3502, loss: 9.513255605497761e-08\r\nStep 3503, loss: 9.510826259884197e-08\r\nStep 3504, loss: 9.453327010078283e-08\r\nStep 3505, loss: 9.575613546530803e-08\r\nStep 3506, loss: 9.456566374410613e-08\r\nStep 3507, loss: 1.6174232086996199e-07\r\nStep 3508, loss: 1.535061784352365e-07\r\nStep 3509, loss: 1.4562641581505886e-07\r\nStep 3510, loss: 9.352906005233308e-08\r\nStep 3511, loss: 9.38125097604825e-08\r\nStep 3512, loss: 9.42417273108731e-08\r\nStep 3513, loss: 9.323751726242335e-08\r\nStep 3514, loss: 9.374772247383589e-08\r\nStep 3515, loss: 9.26220380392806e-08\r\nStep 3516, loss: 9.359384733897969e-08\r\nStep 3517, loss: 9.14720530431623e-08\r\nStep 3518, loss: 9.218472030170233e-08\r\nStep 3519, loss: 9.070269868516334e-08\r\nStep 3520, loss: 9.017630020480283e-08\r\nStep 3521, loss: 9.003052525713429e-08\r\nStep 3522, loss: 9.058932448624546e-08\r\nStep 3523, loss: 9.105093567995937e-08\r\nStep 3524, loss: 8.764148162754282e-08\r\nStep 3525, loss: 8.734183865044542e-08\r\nStep 3526, loss: 8.764148162754282e-08\r\nStep 3527, loss: 8.90020217525489e-08\r\nStep 3528, loss: 8.842703635991711e-08\r\nStep 3529, loss: 8.832175524275954e-08\r\nStep 3530, loss: 8.952842733833677e-08\r\nStep 3531, loss: 8.86294984070446e-08\r\nStep 3532, loss: 8.727705136379882e-08\r\nStep 3533, loss: 8.717177024664124e-08\r\nStep 3534, loss: 8.755240088476057e-08\r\nStep 3535, loss: 8.794922479182787e-08\r\nStep 3536, loss: 8.704219567334803e-08\r\nStep 3537, loss: 8.66858655967917e-08\r\nStep 3538, loss: 8.654819083631082e-08\r\nStep 3539, loss: 8.594890488211604e-08\r\nStep 3540, loss: 8.607038637364894e-08\r\nStep 3541, loss: 8.581123012163516e-08\r\nStep 3542, loss: 8.643481663739294e-08\r\nStep 3543, loss: 8.598129852543934e-08\r\nStep 3544, loss: 8.575454302217622e-08\r\nStep 3545, loss: 1.3998179326790705e-07\r\nStep 3546, loss: 1.317213644824733e-07\r\nStep 3547, loss: 8.35193674220136e-08\r\nStep 3548, loss: 8.584362376495847e-08\r\nStep 3549, loss: 8.547110041945416e-08\r\nStep 3550, loss: 8.442639654049344e-08\r\nStep 3551, loss: 8.351126723482594e-08\r\nStep 3552, loss: 8.517955052411708e-08\r\nStep 3553, loss: 8.63214353330477e-08\r\nStep 3554, loss: 8.488800773420735e-08\r\nStep 3555, loss: 8.382711058629866e-08\r\nStep 3556, loss: 8.463695877480859e-08\r\nStep 3557, loss: 8.385950422962196e-08\r\nStep 3558, loss: 8.342218649204369e-08\r\nStep 3559, loss: 1.252749939339992e-07\r\nStep 3560, loss: 8.15676415300004e-08\r\nStep 3561, loss: 8.091976155810698e-08\r\nStep 3562, loss: 8.279860708171327e-08\r\nStep 3563, loss: 8.325211808823951e-08\r\nStep 3564, loss: 8.067681278589589e-08\r\nStep 3565, loss: 0.015854597091674805\r\nStep 3566, loss: 1.549720423099643e-07\r\nStep 3567, loss: 1.2771326964866603e-06\r\nStep 3568, loss: 0.00013217916421126574\r\nStep 3569, loss: 0.00048112834338098764\r\nStep 3570, loss: 0.0028882150072604418\r\nStep 3571, loss: 0.00024834671057760715\r\nStep 3572, loss: 0.00010426241351524368\r\nStep 3573, loss: 0.0003515340213198215\r\nStep 3574, loss: 0.0002618427388370037\r\nStep 3575, loss: 0.0002275122096762061\r\nStep 3576, loss: 0.00015864911256358027\r\nStep 3577, loss: 0.00015135572175495327\r\nStep 3578, loss: 0.00029172864742577076\r\nStep 3579, loss: 0.00011735106818377972\r\nStep 3580, loss: 7.409012323478237e-05\r\nStep 3581, loss: 0.005400794092565775\r\nStep 3582, loss: 7.045378879411146e-05\r\nStep 3583, loss: 0.00017833075253292918\r\nStep 3584, loss: 0.00041168928146362305\r\nStep 3585, loss: 0.00033190855174325407\r\nStep 3586, loss: 0.00016155561024788767\r\nStep 3587, loss: 0.00010335352271795273\r\nStep 3588, loss: 8.511530177202076e-05\r\nStep 3589, loss: 8.528539910912514e-05\r\nStep 3590, loss: 7.610732427565381e-05\r\nStep 3591, loss: 7.311688386835158e-05\r\nStep 3592, loss: 6.352640048135072e-05\r\nStep 3593, loss: 6.743672565789893e-05\r\nStep 3594, loss: 6.188311090227216e-05\r\nStep 3595, loss: 5.547222099266946e-05\r\nStep 3596, loss: 4.830146281165071e-05\r\nStep 3597, loss: 4.4903317757416517e-05\r\nStep 3598, loss: 4.0110742702381685e-05\r\nStep 3599, loss: 3.894757537636906e-05\r\nStep 3600, loss: 0.014196319505572319\r\nStep 3601, loss: 3.981509871664457e-05\r\nStep 3602, loss: 0.00012683098611887544\r\nStep 3603, loss: 0.00028643457335419953\r\nStep 3604, loss: 0.000406160659622401\r\nStep 3605, loss: 0.0002939089317806065\r\nStep 3606, loss: 0.0002446369035169482\r\nStep 3607, loss: 0.0002002581168198958\r\nStep 3608, loss: 0.00011854592594318092\r\nStep 3609, loss: 0.0001083512106561102\r\nStep 3610, loss: 0.0001430272968718782\r\nStep 3611, loss: 0.00014793405716773123\r\nStep 3612, loss: 0.006842465605586767\r\nStep 3613, loss: 0.0017056126380339265\r\nStep 3614, loss: 0.00044030725257471204\r\nStep 3615, loss: 0.0005646643112413585\r\nStep 3616, loss: 0.0005122103611938655\r\nStep 3617, loss: 0.0005121712456457317\r\nStep 3618, loss: 0.0002975754614453763\r\nStep 3619, loss: 0.00023967029119376093\r\nStep 3620, loss: 0.00027931705699302256\r\nStep 3621, loss: 0.0002339391503483057\r\nStep 3622, loss: 0.00018063423340208828\r\nStep 3623, loss: 0.00013177683285903186\r\nStep 3624, loss: 0.00010407459922134876\r\nStep 3625, loss: 8.689631067682058e-05\r\nStep 3626, loss: 9.741759276948869e-05\r\nStep 3627, loss: 0.00011409988655941561\r\nStep 3628, loss: 0.0016406693030148745\r\nStep 3629, loss: 8.564796735299751e-05\r\nStep 3630, loss: 0.000109308042738121\r\nStep 3631, loss: 0.0001511356676928699\r\nStep 3632, loss: 0.00012391357449814677\r\nStep 3633, loss: 0.00011989099584752694\r\nStep 3634, loss: 0.00015313360199797899\r\nStep 3635, loss: 7.636532245669514e-05\r\nStep 3636, loss: 9.075639536604285e-05\r\nStep 3637, loss: 0.03343438357114792\r\nStep 3638, loss: 5.986110045341775e-05\r\nStep 3639, loss: 0.00016303229494951665\r\nStep 3640, loss: 0.003971777390688658\r\nStep 3641, loss: 0.001456464291550219\r\nStep 3642, loss: 0.0004895091988146305\r\nStep 3643, loss: 0.00043200698564760387\r\nStep 3644, loss: 0.0005123473238199949\r\nStep 3645, loss: 0.0007698065019212663\r\nStep 3646, loss: 0.0006581329507753253\r\nStep 3647, loss: 0.0001952226593857631\r\nStep 3648, loss: 0.0001935894979396835\r\nStep 3649, loss: 0.00018172820273321122\r\nStep 3650, loss: 0.00019092693401034921\r\nStep 3651, loss: 0.00016161234816536307\r\nStep 3652, loss: 0.0001524301915196702\r\nStep 3653, loss: 0.026378385722637177\r\nStep 3654, loss: 0.0001233355578733608\r\n",,terminal_output +4057,10315009,"TERMINAL",0,0,"9447204",,terminal_output +4058,10316056,"TERMINAL",0,0,"1055815",,terminal_output +4059,10317027,"TERMINAL",0,0,"166926",,terminal_output +4060,10318082,"TERMINAL",0,0,"2779:0037",,terminal_output +4061,10319109,"TERMINAL",0,0,"388148",,terminal_output +4062,10319493,"TERMINAL",0,0,"Step 3655, loss: 0.0002929070615209639\r\nStep 3656, loss: 0.0007520039798691869\r\nStep 3657, loss: 0.0005930282059125602\r\nStep 3658, loss: 0.0003378494002390653\r\nStep 3659, loss: 0.0002587161725386977\r\nStep 3660, loss: 0.00021191795531194657\r\nStep 3661, loss: 0.00023794551088940352\r\nStep 3662, loss: 0.00021680493955500424\r\nStep 3663, loss: 0.00022027258819434792\r\nStep 3664, loss: 0.00017661253514233977\r\nStep 3665, loss: 0.0001565763377584517\r\nStep 3666, loss: 0.0002687006490305066\r\nStep 3667, loss: 0.0001455625897506252\r\nStep 3668, loss: 9.401202260050923e-05\r\nStep 3669, loss: 0.00010148710134672001\r\nStep 3670, loss: 8.507167513016611e-05\r\nStep 3671, loss: 8.809103019302711e-05\r\nStep 3672, loss: 8.136056567309424e-05\r\nStep 3673, loss: 7.037909381324425e-05\r\nStep 3674, loss: 5.879218224436045e-05\r\nStep 3675, loss: 5.432271791505627e-05\r\nStep 3676, loss: 5.041481927037239e-05\r\nStep 3677, loss: 4.322678432799876e-05\r\nStep 3678, loss: 3.960738467867486e-05\r\nStep 3679, loss: 3.333838321850635e-05\r\nStep 3680, loss: 3.27448797179386e-05\r\nStep 3681, loss: 2.9540755349444225e-05\r\nStep 3682, loss: 2.66887218458578e-05\r\nStep 3683, loss: 2.5845893105724826e-05\r\nStep 3684, loss: 2.4555092750233598e-05\r\nStep 3685, loss: 2.4862452846718952e-05\r\nStep 3686, loss: 2.2632952095591463e-05\r\nStep 3687, loss: 2.1240795831545256e-05\r\nStep 3688, loss: 1.8671769794309512e-05\r\nStep 3689, loss: 2.5206876671290956e-05\r\nStep 3690, loss: 2.3669792426517233e-05\r\nStep 3691, loss: 1.9474457076285034e-05\r\nStep 3692, loss: 1.790840906323865e-05\r\nStep 3693, loss: 0.0014928941382095218\r\nStep 3694, loss: 2.177415808546357e-05\r\nStep 3695, loss: 5.57318635401316e-05\r\nStep 3696, loss: 0.000246061768848449\r\nStep 3697, loss: 0.00013718599802814424\r\nStep 3698, loss: 0.0001464526285417378\r\nStep 3699, loss: 0.02211037464439869\r\nStep 3700, loss: 6.028135248925537e-05\r\nStep 3701, loss: 0.00012332208279985934\r\nStep 3702, loss: 0.0006839027046225965\r\nStep 3703, loss: 0.0011276121949777007\r\nStep 3704, loss: 0.000520856527145952\r\nStep 3705, loss: 0.00026884666294790804\r\nStep 3706, loss: 0.00026458859792910516\r\nStep 3707, loss: 0.0003567444218788296\r\nStep 3708, loss: 0.00029845040990039706\r\nStep 3709, loss: 0.00018536130664870143\r\nStep 3710, loss: 0.00018699200882110745\r\nStep 3711, loss: 0.00022548493870999664\r\nStep 3712, loss: 0.00015552480181213468\r\nStep 3713, loss: 0.00012402645370457321\r\nStep 3714, loss: 0.00011521777923917398\r\nStep 3715, loss: 8.44197056721896e-05\r\nStep 3716, loss: 7.187839946709573e-05\r\nStep 3717, loss: 0.003585505299270153\r\nStep 3718, loss: 4.7343190090032294e-05\r\nStep 3719, loss: 6.0114798543509096e-05\r\nStep 3720, loss: 0.00010637941886670887\r\nStep 3721, loss: 0.0003469492949079722\r\nStep 3722, loss: 0.00027011820930056274\r\nStep 3723, loss: 0.0020717375446110964\r\nStep 3724, loss: 0.0003869594947900623\r\nStep 3725, loss: 0.0001290969375986606\r\nStep 3726, loss: 0.00010412462870590389\r\nStep 3727, loss: 0.0001275991089642048\r\nStep 3728, loss: 0.00019888352835550904\r\nStep 3729, loss: 0.000258347688941285\r\nStep 3730, loss: 0.00030477921245619655\r\nStep 3731, loss: 0.00017921872495207936\r\nStep 3732, loss: 0.00011897942022187635\r\nStep 3733, loss: 9.488015348324552e-05\r\nStep 3734, loss: 0.0015721403760835528\r\nStep 3735, loss: 5.129890632815659e-05\r\nStep 3736, loss: 5.471840995596722e-05\r\nStep 3737, loss: 7.618522795382887e-05\r\nStep 3738, loss: 9.01392922969535e-05\r\nStep 3739, loss: 7.720920257270336e-05\r\nStep 3740, loss: 8.221448661061004e-05\r\nStep 3741, loss: 0.0006751401233486831\r\nStep 3742, loss: 0.0001521258964203298\r\nStep 3743, loss: 8.017149229999632e-05\r\nStep 3744, loss: 7.51614716136828e-05\r\nStep 3745, loss: 7.372527034021914e-05\r\nStep 3746, loss: 8.31124561955221e-05\r\nStep 3747, loss: 7.68564423196949e-05\r\nStep 3748, loss: 7.506252586608753e-05\r\nStep 3749, loss: 0.00010063884838018566\r\nStep 3750, loss: 4.7454293962800875e-05\r\nStep 3751, loss: 4.238816472934559e-05\r\nStep 3752, loss: 4.234986045048572e-05\r\nStep 3753, loss: 4.486336911213584e-05\r\nStep 3754, loss: 4.5393320760922506e-05\r\nStep 3755, loss: 3.905880294041708e-05\r\nStep 3756, loss: 3.364501753821969e-05\r\nStep 3757, loss: 2.919819235103205e-05\r\nStep 3758, loss: 2.7696381948771887e-05\r\nStep 3759, loss: 2.3043956389301457e-05\r\nStep 3760, loss: 2.1062725863885134e-05\r\nStep 3761, loss: 2.1846648451173678e-05\r\nStep 3762, loss: 2.1386227672337554e-05\r\nStep 3763, loss: 1.9717097529792227e-05\r\nStep 3764, loss: 1.655118649068754e-05\r\nStep 3765, loss: 1.4264052879298106e-05\r\nStep 3766, loss: 1.2768980013788678e-05\r\nStep 3767, loss: 1.1169438039360102e-05\r\nStep 3768, loss: 9.42976384976646e-06\r\nStep 3769, loss: 8.411005183006637e-06\r\nStep 3770, loss: 7.729046956228558e-06\r\nStep 3771, loss: 6.974730240472127e-06\r\nStep 3772, loss: 6.343694167298963e-06\r\nStep 3773, loss: 5.678776688000653e-06\r\nStep 3774, loss: 5.417388365458464e-06\r\nStep 3775, loss: 5.260906164039625e-06\r\nStep 3776, loss: 5.083746600575978e-06\r\nStep 3777, loss: 4.827044904232025e-06\r\nStep 3778, loss: 4.598448413162259e-06\r\nStep 3779, loss: 4.287147021386772e-06\r\nStep 3780, loss: 4.071576768183149e-06\r\nStep 3781, loss: 3.886665126628941e-06\r\nStep 3782, loss: 3.675549805848277e-06\r\nStep 3783, loss: 3.473779997875681e-06\r\nStep 3784, loss: 3.336433564982144e-06\r\nStep 3785, loss: 3.111525302301743e-06\r\nStep 3786, loss: 2.9171546884754207e-06\r\nStep 3787, loss: 2.755801915554912e-06\r\nStep 3788, loss: 2.6002262529800646e-06\r\nStep 3789, loss: 2.488009386070189e-06\r\nStep 3790, loss: 2.3449078980775084e-06\r\nStep 3791, loss: 0.0014667039504274726\r\nStep 3792, loss: 0.00044905918184667826\r\nStep 3793, loss: 0.00011267032823525369\r\nStep 3794, loss: 0.0002461258554831147\r\nStep 3795, loss: 2.2097463443060406e-05\r\nStep 3796, loss: 2.270601544296369e-05\r\nStep 3797, loss: 0.00035395464510656893\r\nStep 3798, loss: 5.131131183588877e-05\r\nStep 3799, loss: 4.990251181880012e-05\r\nStep 3800, loss: 4.565127892419696e-05\r\nStep 3801, loss: 4.3636195186991245e-05\r\nStep 3802, loss: 4.562930189422332e-05\r\nStep 3803, loss: 3.994203143520281e-05\r\nStep 3804, loss: 0.005646974314004183\r\nStep 3805, loss: 3.1346517062047496e-05\r\nStep 3806, loss: 7.176629878813401e-05\r\nStep 3807, loss: 0.00025422460748814046\r\nStep 3808, loss: 0.000250773475272581\r\nStep 3809, loss: 0.00011146740143885836\r\nStep 3810, loss: 8.7289372459054e-05\r\nStep 3811, loss: 9.661127114668489e-05\r\nStep 3812, loss: 8.893955236999318e-05\r\nStep 3813, loss: 7.112843741197139e-05\r\nStep 3814, loss: 7.829774403944612e-05\r\nStep 3815, loss: 9.112220141105354e-05\r\nStep 3816, loss: 9.032140951603651e-05\r\nStep 3817, loss: 8.179787982953712e-05\r\nStep 3818, loss: 6.577229942195117e-05\r\nStep 3819, loss: 5.087182580609806e-05\r\nStep 3820, loss: 4.165166319580749e-05\r\nStep 3821, loss: 3.909920633304864e-05\r\nStep 3822, loss: 3.8096906791906804e-05\r\nStep 3823, loss: 3.4479504392948e-05\r\nStep 3824, loss: 3.229959838790819e-05\r\nStep 3825, loss: 2.9795011869282462e-05\r\nStep 3826, loss: 2.843065703928005e-05\r\nStep 3827, loss: 2.440653770463541e-05\r\nStep 3828, loss: 2.1096906493767165e-05\r\nStep 3829, loss: 1.9554483515094034e-05\r\nStep 3830, loss: 1.7429438230465166e-05\r\nStep 3831, loss: 1.5702296877861954e-05\r\nStep 3832, loss: 1.492522278567776e-05\r\nStep 3833, loss: 1.3788260730507318e-05\r\nStep 3834, loss: 1.2481923477025703e-05\r\nStep 3835, loss: 1.3506083632819355e-05\r\nStep 3836, loss: 1.2372483979561366e-05\r\nStep 3837, loss: 1.1096301022917032e-05\r\nStep 3838, loss: 9.80295408226084e-06\r\nStep 3839, loss: 8.924375833885279e-06\r\nStep 3840, loss: 8.031859579205047e-06\r\nStep 3841, loss: 7.342657227127347e-06\r\nStep 3842, loss: 6.802085408708081e-06\r\nStep 3843, loss: 6.407014097931096e-06\r\nStep 3844, loss: 6.070742529118434e-06\r\nStep 3845, loss: 5.75590320295305e-06\r\nStep 3846, loss: 5.408473043644335e-06\r\nStep 3847, loss: 5.030687134421896e-06\r\nStep 3848, loss: 4.76242621516576e-06\r\nStep 3849, loss: 4.48412356490735e-06\r\nStep 3850, loss: 4.208691279927734e-06\r\nStep 3851, loss: 3.912799456884386e-06\r\nStep 3852, loss: 3.639209580796887e-06\r\nStep 3853, loss: 3.411888201299007e-06\r\nStep 3854, loss: 3.2180848847929155e-06\r\nStep 3855, loss: 3.0306619009934366e-06\r\nStep 3856, loss: 2.9015525342401816e-06\r\nStep 3857, loss: 2.7479541131469887e-06\r\nStep 3858, loss: 2.649730049597565e-06\r\nStep 3859, loss: 2.520806447137147e-06\r\nStep 3860, loss: 2.4182741071854252e-06\r\nStep 3861, loss: 2.2920464743947377e-06\r\nStep 3862, loss: 2.201014012825908e-06\r\n",,terminal_output +4063,10320119,"TERMINAL",0,0,"499259",,terminal_output +4064,10321155,"TERMINAL",0,0,"58:008:003630",,terminal_output +4065,10322183,"TERMINAL",0,0,"611471",,terminal_output +4066,10323223,"TERMINAL",0,0,"722582",,terminal_output +4067,10324258,"TERMINAL",0,0,"833693",,terminal_output +4068,10325353,"TERMINAL",0,0,"9447304",,terminal_output +4069,10325943,"TERMINAL",0,0,"Step 3863, loss: 2.11649216907972e-06\r\nStep 3864, loss: 2.0165184650977608e-06\r\nStep 3865, loss: 1.947561486304039e-06\r\nStep 3866, loss: 1.8597511370899156e-06\r\nStep 3867, loss: 1.7840642385635874e-06\r\nStep 3868, loss: 1.6991693883028347e-06\r\nStep 3869, loss: 1.6284382127196295e-06\r\nStep 3870, loss: 1.5565651665383484e-06\r\nStep 3871, loss: 1.5129880921449512e-06\r\nStep 3872, loss: 1.4568420283467276e-06\r\nStep 3873, loss: 1.4022103869137936e-06\r\nStep 3874, loss: 1.3576369610746042e-06\r\nStep 3875, loss: 1.3077104767944547e-06\r\nStep 3876, loss: 1.2632746120289085e-06\r\nStep 3877, loss: 1.2163200153736398e-06\r\nStep 3878, loss: 1.174483713839436e-06\r\nStep 3879, loss: 1.1428675179558923e-06\r\nStep 3880, loss: 1.1052424042645725e-06\r\nStep 3881, loss: 1.06537402189133e-06\r\nStep 3882, loss: 1.031101419357583e-06\r\nStep 3883, loss: 9.987239764086553e-07\r\nStep 3884, loss: 9.779596439329907e-07\r\nStep 3885, loss: 9.43897646266123e-07\r\nStep 3886, loss: 9.194080803354154e-07\r\nStep 3887, loss: 8.851275197230279e-07\r\nStep 3888, loss: 8.590910738348612e-07\r\nStep 3889, loss: 8.379055884688569e-07\r\nStep 3890, loss: 8.125332442432409e-07\r\nStep 3891, loss: 7.881650390118011e-07\r\nStep 3892, loss: 7.666312740184367e-07\r\nStep 3893, loss: 7.464581699423434e-07\r\nStep 3894, loss: 7.263578254423919e-07\r\nStep 3895, loss: 7.070349852256186e-07\r\nStep 3896, loss: 6.895747333146574e-07\r\nStep 3897, loss: 6.660649205514346e-07\r\nStep 3898, loss: 6.514554229397618e-07\r\nStep 3899, loss: 6.34205719052261e-07\r\nStep 3900, loss: 6.167049946270708e-07\r\nStep 3901, loss: 5.976413035568839e-07\r\nStep 3902, loss: 5.851372861798154e-07\r\nStep 3903, loss: 5.686569579665957e-07\r\nStep 3904, loss: 5.52856931790302e-07\r\nStep 3905, loss: 5.395025937104947e-07\r\nStep 3906, loss: 5.265855520519835e-07\r\nStep 3907, loss: 5.106153935230395e-07\r\nStep 3908, loss: 4.972691840521293e-07\r\nStep 3909, loss: 4.855345423493418e-07\r\nStep 3910, loss: 4.7242315304174554e-07\r\nStep 3911, loss: 4.6123918195917213e-07\r\nStep 3912, loss: 4.506950688210054e-07\r\nStep 3913, loss: 4.38385399093022e-07\r\nStep 3914, loss: 4.2961477220160305e-07\r\nStep 3915, loss: 4.177424841600441e-07\r\nStep 3916, loss: 4.0723878669268743e-07\r\nStep 3917, loss: 3.9947238406057295e-07\r\nStep 3918, loss: 3.857859951494902e-07\r\nStep 3919, loss: 3.754361728169897e-07\r\nStep 3920, loss: 3.686901663968456e-07\r\nStep 3921, loss: 3.5900444572689594e-07\r\nStep 3922, loss: 3.504119945318962e-07\r\nStep 3923, loss: 3.4283183936167916e-07\r\nStep 3924, loss: 3.318665449114633e-07\r\nStep 3925, loss: 3.2520961212867405e-07\r\nStep 3926, loss: 3.1673860689807043e-07\r\nStep 3927, loss: 3.0912607940081216e-07\r\nStep 3928, loss: 3.0149732310746913e-07\r\nStep 3929, loss: 2.9603896223306947e-07\r\nStep 3930, loss: 2.8802963925045333e-07\r\nStep 3931, loss: 2.8145370833954075e-07\r\nStep 3932, loss: 2.7454572659735277e-07\r\nStep 3933, loss: 2.6728949364951404e-07\r\nStep 3934, loss: 2.6175015932494716e-07\r\nStep 3935, loss: 2.5527140223857714e-07\r\nStep 3936, loss: 2.4965916622932127e-07\r\nStep 3937, loss: 2.4315610858138825e-07\r\nStep 3938, loss: 2.3756007294650772e-07\r\nStep 3939, loss: 2.3254712289144663e-07\r\nStep 3940, loss: 2.2718595005244424e-07\r\nStep 3941, loss: 2.2044802960863308e-07\r\nStep 3942, loss: 2.1682805595446553e-07\r\nStep 3943, loss: 2.1223623036803474e-07\r\nStep 3944, loss: 2.0649441978548566e-07\r\nStep 3945, loss: 2.027772296742114e-07\r\nStep 3946, loss: 1.983635655733451e-07\r\nStep 3947, loss: 1.9370695270026772e-07\r\nStep 3948, loss: 1.9022462538487162e-07\r\nStep 3949, loss: 1.860134233311328e-07\r\nStep 3950, loss: 1.824177076059641e-07\r\nStep 3951, loss: 1.776963074462401e-07\r\nStep 3952, loss: 1.742544668559276e-07\r\nStep 3953, loss: 1.7075592495530145e-07\r\nStep 3954, loss: 1.670873359671532e-07\r\nStep 3955, loss: 1.6367788191473664e-07\r\nStep 3956, loss: 1.6056807794484484e-07\r\nStep 3957, loss: 1.5668891251152672e-07\r\nStep 3958, loss: 1.5364389582828153e-07\r\nStep 3959, loss: 1.5088231464233104e-07\r\nStep 3960, loss: 1.483798968138217e-07\r\nStep 3961, loss: 1.4560212946435058e-07\r\nStep 3962, loss: 1.4327787312140572e-07\r\nStep 3963, loss: 1.4027334316324414e-07\r\nStep 3964, loss: 1.3781951224700606e-07\r\nStep 3965, loss: 1.3491215611338703e-07\r\nStep 3966, loss: 1.3252311248379556e-07\r\nStep 3967, loss: 1.2989922026918066e-07\r\nStep 3968, loss: 1.28433399027017e-07\r\nStep 3969, loss: 1.2639257818136684e-07\r\nStep 3970, loss: 1.2310461272591056e-07\r\nStep 3971, loss: 1.2222997725075402e-07\r\nStep 3972, loss: 1.204078330374614e-07\r\nStep 3973, loss: 1.1848850078877149e-07\r\nStep 3974, loss: 1.166015621834049e-07\r\nStep 3975, loss: 1.1448786807477518e-07\r\nStep 3976, loss: 1.139209757639037e-07\r\nStep 3977, loss: 1.1098933327957639e-07\r\nStep 3978, loss: 1.0933725036466058e-07\r\nStep 3979, loss: 1.0853550236333831e-07\r\nStep 3980, loss: 1.072154560688432e-07\r\nStep 3981, loss: 1.0566054697846994e-07\r\nStep 3982, loss: 1.0377360837310334e-07\r\nStep 3983, loss: 1.0190286303668472e-07\r\nStep 3984, loss: 1.0128738381354196e-07\r\nStep 3985, loss: 9.961910052425083e-08\r\nStep 3986, loss: 9.825046021205708e-08\r\nStep 3987, loss: 9.732723782462926e-08\r\nStep 3988, loss: 9.574804238354773e-08\r\nStep 3989, loss: 9.492199382066246e-08\r\nStep 3990, loss: 9.400687162042232e-08\r\nStep 3991, loss: 9.320512361910005e-08\r\nStep 3992, loss: 9.149634649929794e-08\r\nStep 3993, loss: 9.06136179423811e-08\r\nStep 3994, loss: 9.03139749652837e-08\r\nStep 3995, loss: 9.023298730426177e-08\r\nStep 3996, loss: 8.888864755363102e-08\r\nStep 3997, loss: 8.779535676239902e-08\r\nStep 3998, loss: 8.718797062101658e-08\r\nStep 3999, loss: 8.553588770610077e-08\r\nSaved checkpoint at step 4000\r\nStep 4000, loss: 8.466125223094423e-08\r\nStep 4001, loss: 8.389999806013293e-08\r\nStep 4002, loss: 8.389189787294526e-08\r\nStep 4003, loss: 8.300916931602842e-08\r\nStep 4004, loss: 8.267713269560772e-08\r\nStep 4005, loss: 8.215882729700752e-08\r\nStep 4006, loss: 8.152714769948943e-08\r\nStep 4007, loss: 8.119511107906874e-08\r\nStep 4008, loss: 8.08063873591891e-08\r\nStep 4009, loss: 8.141377350057155e-08\r\nStep 4010, loss: 7.95592214331009e-08\r\nStep 4011, loss: 7.917049771322127e-08\r\nStep 4012, loss: 7.98588644101983e-08\r\nStep 4013, loss: 7.946204050313099e-08\r\nStep 4014, loss: 7.824727532579345e-08\r\nStep 4015, loss: 7.737263985063692e-08\r\nStep 4016, loss: 7.795573253588373e-08\r\nStep 4017, loss: 7.751031461111779e-08\r\nStep 4018, loss: 7.751031461111779e-08\r\nStep 4019, loss: 7.624695541608162e-08\r\nStep 4020, loss: 7.585013150901432e-08\r\nStep 4021, loss: 7.59958993512555e-08\r\nStep 4022, loss: 7.677335389644213e-08\r\nStep 4023, loss: 7.635223653323919e-08\r\nStep 4024, loss: 7.563956927469917e-08\r\nStep 4025, loss: 7.504838350769205e-08\r\nStep 4026, loss: 7.516176481203729e-08\r\nStep 4027, loss: 7.470824670008369e-08\r\nStep 4028, loss: 7.535612667197711e-08\r\nStep 4029, loss: 7.341249386172422e-08\r\nStep 4030, loss: 7.425473569355745e-08\r\nStep 4031, loss: 7.442480409736163e-08\r\nStep 4032, loss: 7.39793861725957e-08\r\nStep 4033, loss: 7.286180192522806e-08\r\nStep 4034, loss: 7.279701463858146e-08\r\nStep 4035, loss: 7.36797431954983e-08\r\nStep 4036, loss: 7.342869423609955e-08\r\nStep 4037, loss: 7.254595857375534e-08\r\nStep 4038, loss: 7.289419556855137e-08\r\nStep 4039, loss: 7.397128598540803e-08\r\nStep 4040, loss: 7.180090477731937e-08\r\nStep 4041, loss: 7.145266778252335e-08\r\nStep 4042, loss: 7.106394406264371e-08\r\nStep 4043, loss: 7.108823751877935e-08\r\nStep 4044, loss: 7.154174852530559e-08\r\nStep 4045, loss: 7.223012232770998e-08\r\nStep 4046, loss: 7.25216651176197e-08\r\nStep 4047, loss: 7.142027413920005e-08\r\nStep 4048, loss: 7.061852613787778e-08\r\nStep 4049, loss: 7.07804943544943e-08\r\nStep 4050, loss: 7.057803230736681e-08\r\nStep 4051, loss: 7.040797100898999e-08\r\nStep 4052, loss: 7.095056275829847e-08\r\nStep 4053, loss: 7.061042595069011e-08\r\nStep 4054, loss: 7.072380725503535e-08\r\nStep 4055, loss: 7.033508353515572e-08\r\nStep 4056, loss: 6.927418638724703e-08\r\nStep 4057, loss: 7.035937699129136e-08\r\nStep 4058, loss: 7.147696123865899e-08\r\nStep 4059, loss: 7.09100689277875e-08\r\nStep 4060, loss: 7.032698334796805e-08\r\nStep 4061, loss: 7.068331342452439e-08\r\nStep 4062, loss: 6.97195972065856e-08\r\nStep 4063, loss: 7.016501513135154e-08\r\nStep 4064, loss: 7.030268989183242e-08\r\nStep 4065, loss: 6.916080508290179e-08\r\nStep 4066, loss: 6.969530375044997e-08\r\nStep 4067, loss: 6.959812282048006e-08\r\nStep 4068, loss: 6.903933069679624e-08\r\nStep 4069, loss: 6.996255308422406e-08\r\nStep 4070, loss: 6.933087348670597e-08\r\n",,terminal_output +4070,10326328,"TERMINAL",0,0,"2066926",,terminal_output +4071,10327402,"TERMINAL",0,0,"2771037",,terminal_output +4072,10328424,"TERMINAL",0,0,"388148",,terminal_output +4073,10329424,"TERMINAL",0,0,"499259",,terminal_output +4074,10330480,"TERMINAL",0,0,"510103640",,terminal_output +4075,10330986,"TERMINAL",0,0,"Step 4071, loss: 6.916890527008945e-08\r\nStep 4072, loss: 6.963861665099103e-08\r\nStep 4073, loss: 6.960622300766772e-08\r\nStep 4074, loss: 6.901503013523325e-08\r\nStep 4075, loss: 6.988156542320212e-08\r\nStep 4076, loss: 7.018120840029951e-08\r\nStep 4077, loss: 6.928227946900734e-08\r\nStep 4078, loss: 6.84643381987371e-08\r\nStep 4079, loss: 6.847243838592476e-08\r\nStep 4080, loss: 6.945234787281152e-08\r\nStep 4081, loss: 6.881256808810576e-08\r\nStep 4082, loss: 6.809180774780543e-08\r\nStep 4083, loss: 6.797843354888755e-08\r\nStep 4084, loss: 7.002734037087066e-08\r\nStep 4085, loss: 6.912841143957849e-08\r\nStep 4086, loss: 6.835905708157952e-08\r\nStep 4087, loss: 6.903933069679624e-08\r\nStep 4088, loss: 6.958192244610473e-08\r\nStep 4089, loss: 6.943615460386354e-08\r\nStep 4090, loss: 6.758970272358056e-08\r\nStep 4091, loss: 6.808370756061777e-08\r\nStep 4092, loss: 6.868299351481255e-08\r\nStep 4093, loss: 6.970340393763763e-08\r\nStep 4094, loss: 6.865059987148925e-08\r\nStep 4095, loss: 6.895024284858664e-08\r\nStep 4096, loss: 6.861820622816595e-08\r\nStep 4097, loss: 6.814040176550407e-08\r\nStep 4098, loss: 6.900693705347294e-08\r\nStep 4099, loss: 6.890975612350303e-08\r\nStep 4100, loss: 6.984917177987882e-08\r\nStep 4101, loss: 6.881256808810576e-08\r\nStep 4102, loss: 6.778406458352038e-08\r\nStep 4103, loss: 6.782455841403134e-08\r\nStep 4104, loss: 6.8561519128707e-08\r\nStep 4105, loss: 6.890975612350303e-08\r\nStep 4106, loss: 6.984917177987882e-08\r\nStep 4107, loss: 6.85291254853837e-08\r\nStep 4108, loss: 6.884496883685642e-08\r\nStep 4109, loss: 6.945234787281152e-08\r\nStep 4110, loss: 6.862630641535361e-08\r\nStep 4111, loss: 6.845623801154943e-08\r\nStep 4112, loss: 6.866680024586458e-08\r\nStep 4113, loss: 6.748442160642298e-08\r\nStep 4114, loss: 6.777597150176007e-08\r\nStep 4115, loss: 6.81646952216397e-08\r\nStep 4116, loss: 6.85291254853837e-08\r\nStep 4117, loss: 6.967101029431433e-08\r\nStep 4118, loss: 6.775977112738474e-08\r\nStep 4119, loss: 6.867490043305224e-08\r\nStep 4120, loss: 1.726425864490011e-07\r\nStep 4121, loss: 7.181709804626735e-08\r\nStep 4122, loss: 7.16308363735152e-08\r\nStep 4123, loss: 6.905552396574421e-08\r\nStep 4124, loss: 7.646561073215707e-08\r\nStep 4125, loss: 7.725116546453137e-08\r\nStep 4126, loss: 6.933087348670597e-08\r\nStep 4127, loss: 6.65530990318075e-08\r\nStep 4128, loss: 6.771118421511346e-08\r\nStep 4129, loss: 6.699041676938577e-08\r\nStep 4130, loss: 7.095866294548614e-08\r\nStep 4131, loss: 7.260265277864164e-08\r\nStep 4132, loss: 6.873968771969885e-08\r\nStep 4133, loss: 6.587283252201814e-08\r\nStep 4134, loss: 6.467426061362858e-08\r\nStep 4135, loss: 6.595381307761272e-08\r\nStep 4136, loss: 6.61805756863032e-08\r\nStep 4137, loss: 6.78002649578957e-08\r\nStep 4138, loss: 6.736294722031744e-08\r\nStep 4139, loss: 6.512777162015482e-08\r\nStep 4140, loss: 6.488482284794372e-08\r\nStep 4141, loss: 6.710379807373101e-08\r\nStep 4142, loss: 6.80675142916698e-08\r\nStep 4143, loss: 6.929847984338267e-08\r\nStep 4144, loss: 6.924988582568403e-08\r\nStep 4145, loss: 6.586473233483048e-08\r\nStep 4146, loss: 6.45365858531477e-08\r\nStep 4147, loss: 6.54193215154919e-08\r\nStep 4148, loss: 6.93227732995183e-08\r\nStep 4149, loss: 6.940376096054024e-08\r\nStep 4150, loss: 1.2998827969568083e-07\r\nStep 4151, loss: 1.1590506687753077e-07\r\nStep 4152, loss: 1.074745910045749e-07\r\nStep 4153, loss: 1.025669220666714e-07\r\nStep 4154, loss: 5.992046681058127e-08\r\nStep 4155, loss: 6.156444953830942e-08\r\nStep 4156, loss: 6.41964561509667e-08\r\nStep 4157, loss: 6.617247549911554e-08\r\nStep 4158, loss: 6.531404039833433e-08\r\nStep 4159, loss: 6.352428272293764e-08\r\nStep 4160, loss: 6.281971565158528e-08\r\nStep 4161, loss: 6.536263441603296e-08\r\nStep 4162, loss: 6.670697416666371e-08\r\nStep 4163, loss: 6.610768821246893e-08\r\nStep 4164, loss: 6.424504306323797e-08\r\nStep 4165, loss: 6.341900160578007e-08\r\nStep 4166, loss: 6.397779372946388e-08\r\nStep 4167, loss: 6.489292303513139e-08\r\nStep 4168, loss: 6.550840225827415e-08\r\nStep 4169, loss: 6.573515776153727e-08\r\nStep 4170, loss: 6.608339475633329e-08\r\nStep 4171, loss: 6.60590941947703e-08\r\nStep 4172, loss: 6.792983953118892e-08\r\nStep 4173, loss: 6.808370756061777e-08\r\nStep 4174, loss: 6.631015025959641e-08\r\nStep 4175, loss: 6.530594021114666e-08\r\nStep 4176, loss: 6.689323583941587e-08\r\nStep 4177, loss: 6.84643381987371e-08\r\nStep 4178, loss: 6.711999134267899e-08\r\nStep 4179, loss: 6.60590941947703e-08\r\nStep 4180, loss: 6.662598650564178e-08\r\nStep 4181, loss: 6.604290092582232e-08\r\nStep 4182, loss: 6.605100111300999e-08\r\nStep 4183, loss: 6.556508935773309e-08\r\nStep 4184, loss: 6.512777162015482e-08\r\nStep 4185, loss: 6.653690576285953e-08\r\nStep 4186, loss: 6.792173934400125e-08\r\nStep 4187, loss: 6.681225528382129e-08\r\nStep 4188, loss: 6.729005974648317e-08\r\nStep 4189, loss: 6.644782502007729e-08\r\nStep 4190, loss: 6.570276411821396e-08\r\nStep 4191, loss: 6.64559181018376e-08\r\nStep 4192, loss: 6.66097932366938e-08\r\nStep 4193, loss: 6.643162464570196e-08\r\nStep 4194, loss: 6.671507435385138e-08\r\nStep 4195, loss: 6.625345605471011e-08\r\nStep 4196, loss: 6.785695205735465e-08\r\nStep 4197, loss: 6.900693705347294e-08\r\nStep 4198, loss: 6.698232368762547e-08\r\nStep 4199, loss: 6.524925311168772e-08\r\nStep 4200, loss: 6.790554607505328e-08\r\nStep 4201, loss: 6.668268071052807e-08\r\nStep 4202, loss: 6.646401828902526e-08\r\nStep 4203, loss: 6.61805756863032e-08\r\nStep 4204, loss: 6.803512064834649e-08\r\nStep 4205, loss: 6.809180774780543e-08\r\nStep 4206, loss: 6.708759769935568e-08\r\nStep 4207, loss: 6.737914759469277e-08\r\nStep 4208, loss: 1.1963845736318035e-07\r\nStep 4209, loss: 1.0872174982523575e-07\r\nStep 4210, loss: 5.9896180459873e-08\r\nStep 4211, loss: 4.049629933433607e-05\r\nStep 4212, loss: 9.768355369033088e-08\r\nStep 4213, loss: 2.7050407879869454e-05\r\nStep 4214, loss: 4.583871941576945e-06\r\nStep 4215, loss: 3.0995774977782276e-06\r\nStep 4216, loss: 5.415675332187675e-06\r\nStep 4217, loss: 5.477834747580346e-06\r\nStep 4218, loss: 8.109075679385569e-06\r\nStep 4219, loss: 1.1124558113806415e-05\r\nStep 4220, loss: 9.627207873563748e-06\r\nStep 4221, loss: 8.558013178117108e-06\r\nStep 4222, loss: 6.839214620413259e-06\r\nStep 4223, loss: 7.1634458436165005e-06\r\nStep 4224, loss: 6.269403002079343e-06\r\nStep 4225, loss: 5.540203346754424e-06\r\nStep 4226, loss: 4.50483321401407e-06\r\nStep 4227, loss: 4.1237226469093e-06\r\nStep 4228, loss: 3.5364696486794855e-06\r\nStep 4229, loss: 3.3181117942149285e-06\r\nStep 4230, loss: 3.0113292268652003e-06\r\nStep 4231, loss: 2.905976316469605e-06\r\nStep 4232, loss: 2.605573854452814e-06\r\nStep 4233, loss: 2.33122727877344e-06\r\nStep 4234, loss: 0.047100961208343506\r\nStep 4235, loss: 2.758997879936942e-06\r\nStep 4236, loss: 2.2918478862266056e-05\r\nStep 4237, loss: 0.00046628524432890117\r\nStep 4238, loss: 0.0005209019291214645\r\nStep 4239, loss: 0.0003383417788427323\r\nStep 4240, loss: 0.00018404994625598192\r\nStep 4241, loss: 0.00011410813021939248\r\nStep 4242, loss: 8.455266652163118e-05\r\nStep 4243, loss: 0.00010883043432841077\r\nStep 4244, loss: 0.00011731583799701184\r\nStep 4245, loss: 9.075064008357003e-05\r\nStep 4246, loss: 0.00010360891610616818\r\nStep 4247, loss: 0.00011376683687558398\r\nStep 4248, loss: 0.00013259256957098842\r\nStep 4249, loss: 0.00013394148845691234\r\nStep 4250, loss: 0.020278198644518852\r\nStep 4251, loss: 0.00013073785521555692\r\nStep 4252, loss: 0.00024782397667877376\r\nStep 4253, loss: 0.0003542987396940589\r\nStep 4254, loss: 0.0003588955441955477\r\nStep 4255, loss: 0.00028226315043866634\r\nStep 4256, loss: 0.000244722468778491\r\nStep 4257, loss: 0.00016035801672842354\r\nStep 4258, loss: 0.00011242716573178768\r\nStep 4259, loss: 9.783274435903877e-05\r\nStep 4260, loss: 9.217280603479594e-05\r\nStep 4261, loss: 0.00010086457768920809\r\nStep 4262, loss: 0.0001135150232585147\r\nStep 4263, loss: 9.174041042570025e-05\r\nStep 4264, loss: 7.751680095680058e-05\r\nStep 4265, loss: 8.27564435894601e-05\r\nStep 4266, loss: 7.161592657212168e-05\r\nStep 4267, loss: 6.544200732605532e-05\r\nStep 4268, loss: 6.276641215663403e-05\r\nStep 4269, loss: 6.0175905673531815e-05\r\nStep 4270, loss: 5.477550803334452e-05\r\nStep 4271, loss: 4.856936720898375e-05\r\nStep 4272, loss: 4.666407039621845e-05\r\nStep 4273, loss: 4.015930608147755e-05\r\nStep 4274, loss: 3.6522807931760326e-05\r\nStep 4275, loss: 3.253727845731191e-05\r\nStep 4276, loss: 2.9846733013982885e-05\r\nStep 4277, loss: 2.7141957616549917e-05\r\nStep 4278, loss: 2.40577501244843e-05\r\nStep 4279, loss: 2.251378282380756e-05\r\nStep 4280, loss: 1.95950942725176e-05\r\n",,terminal_output +4076,10331494,"TERMINAL",0,0,"611471",,terminal_output +4077,10332510,"TERMINAL",0,0,"722582",,terminal_output +4078,10333647,"TERMINAL",0,0,"833693",,terminal_output +4079,10334670,"TERMINAL",0,0,"9447404",,terminal_output +4080,10335543,"TERMINAL",0,0,"Step 4281, loss: 1.8465445464244112e-05\r\nStep 4282, loss: 0.0005742618814110756\r\nStep 4283, loss: 1.8534274204284884e-05\r\nStep 4284, loss: 2.7757092539104633e-05\r\nStep 4285, loss: 5.6548244174337015e-05\r\nStep 4286, loss: 0.00015276002523023635\r\nStep 4287, loss: 9.404469165019691e-05\r\nStep 4288, loss: 5.2659892389783636e-05\r\nStep 4289, loss: 3.574692163965665e-05\r\nStep 4290, loss: 0.0013246716698631644\r\nStep 4291, loss: 2.0825100364163518e-05\r\nStep 4292, loss: 4.863175854552537e-05\r\nStep 4293, loss: 0.00015643087681382895\r\nStep 4294, loss: 0.00018169335089623928\r\nStep 4295, loss: 0.00012272234016563743\r\nStep 4296, loss: 6.859159475425258e-05\r\nStep 4297, loss: 4.804712443728931e-05\r\nStep 4298, loss: 0.007220720872282982\r\nStep 4299, loss: 0.0007790169911459088\r\nStep 4300, loss: 0.0002244586357846856\r\nStep 4301, loss: 0.00025984825333580375\r\nStep 4302, loss: 0.00035090153687633574\r\nStep 4303, loss: 0.0003541707119438797\r\nStep 4304, loss: 0.0004049093113280833\r\nStep 4305, loss: 0.0002682208432815969\r\nStep 4306, loss: 0.00016853181296028197\r\nStep 4307, loss: 7.796018326189369e-05\r\nStep 4308, loss: 7.896264287410304e-05\r\nStep 4309, loss: 7.829949754523113e-05\r\nStep 4310, loss: 8.841502130962908e-05\r\nStep 4311, loss: 7.515768083976582e-05\r\nStep 4312, loss: 6.1019349232083187e-05\r\nStep 4313, loss: 5.522380524780601e-05\r\nStep 4314, loss: 5.022632103646174e-05\r\nStep 4315, loss: 4.441117562237196e-05\r\nStep 4316, loss: 4.119734148844145e-05\r\nStep 4317, loss: 3.89204069506377e-05\r\nStep 4318, loss: 3.882198507199064e-05\r\nStep 4319, loss: 3.627457408583723e-05\r\nStep 4320, loss: 3.1526586099062115e-05\r\nStep 4321, loss: 2.8554561140481383e-05\r\nStep 4322, loss: 2.5210430976585485e-05\r\nStep 4323, loss: 2.27396212721942e-05\r\nStep 4324, loss: 2.061944906017743e-05\r\nStep 4325, loss: 1.8914030079031363e-05\r\nStep 4326, loss: 0.00020025695266667753\r\nStep 4327, loss: 2.9116703444742598e-05\r\nStep 4328, loss: 3.7621026422129944e-05\r\nStep 4329, loss: 0.0006432009977288544\r\nStep 4330, loss: 3.096922591794282e-05\r\nStep 4331, loss: 2.624666376505047e-05\r\nStep 4332, loss: 2.4962850147858262e-05\r\nStep 4333, loss: 2.4018043404794298e-05\r\nStep 4334, loss: 2.9503044061129913e-05\r\nStep 4335, loss: 0.0001751408854033798\r\nStep 4336, loss: 1.9279632397228852e-05\r\nStep 4337, loss: 1.7230131561518647e-05\r\nStep 4338, loss: 1.6156098354258575e-05\r\nStep 4339, loss: 1.814326060411986e-05\r\nStep 4340, loss: 2.084587686113082e-05\r\nStep 4341, loss: 7.781366730341688e-05\r\nStep 4342, loss: 1.7311926058027893e-05\r\nStep 4343, loss: 1.4482569895335473e-05\r\nStep 4344, loss: 1.2344235074124299e-05\r\nStep 4345, loss: 1.148552109953016e-05\r\nStep 4346, loss: 1.0466669664310757e-05\r\nStep 4347, loss: 9.301415047957562e-06\r\nStep 4348, loss: 9.151871381618548e-06\r\nStep 4349, loss: 8.697514203959145e-06\r\nStep 4350, loss: 8.813345630187541e-06\r\nStep 4351, loss: 8.615265869593713e-06\r\nStep 4352, loss: 8.147691005433444e-06\r\nStep 4353, loss: 7.742728485027328e-06\r\nStep 4354, loss: 7.138146884244634e-06\r\nStep 4355, loss: 6.8696717789862305e-06\r\nStep 4356, loss: 5.9643657550623175e-06\r\nStep 4357, loss: 5.384608357417164e-06\r\nStep 4358, loss: 4.804288892046316e-06\r\nStep 4359, loss: 4.497162080951966e-06\r\nStep 4360, loss: 4.104582330910489e-06\r\nStep 4361, loss: 3.939226644433802e-06\r\nStep 4362, loss: 3.775605819100747e-06\r\nStep 4363, loss: 3.500781076581916e-06\r\nStep 4364, loss: 3.3157073175971163e-06\r\nStep 4365, loss: 3.068559180974262e-06\r\nStep 4366, loss: 2.9209534204710508e-06\r\nStep 4367, loss: 2.75216643785825e-06\r\nStep 4368, loss: 2.569329581092461e-06\r\nStep 4369, loss: 2.44483908318216e-06\r\nStep 4370, loss: 2.283797130075982e-06\r\nStep 4371, loss: 2.165487330785254e-06\r\nStep 4372, loss: 2.03845615942555e-06\r\nStep 4373, loss: 1.928661731653847e-06\r\nStep 4374, loss: 1.8220887341158232e-06\r\nStep 4375, loss: 1.7328703734165174e-06\r\nStep 4376, loss: 1.6489398149133194e-06\r\nStep 4377, loss: 1.5620856856912724e-06\r\nStep 4378, loss: 1.5158196902120835e-06\r\nStep 4379, loss: 1.4649051536252955e-06\r\nStep 4380, loss: 1.4010336144565372e-06\r\nStep 4381, loss: 1.338352490165562e-06\r\nStep 4382, loss: 1.2784328191628447e-06\r\nStep 4383, loss: 1.2370825288599008e-06\r\nStep 4384, loss: 1.187253360512841e-06\r\nStep 4385, loss: 1.1306458418403054e-06\r\nStep 4386, loss: 1.0821607929756283e-06\r\nStep 4387, loss: 1.0326067467758548e-06\r\nStep 4388, loss: 9.927060773406993e-07\r\nStep 4389, loss: 9.508210609965317e-07\r\nStep 4390, loss: 9.111227541325206e-07\r\nStep 4391, loss: 8.829564990264771e-07\r\nStep 4392, loss: 8.505304549544235e-07\r\nStep 4393, loss: 8.130186301968934e-07\r\nStep 4394, loss: 7.859779884711315e-07\r\nStep 4395, loss: 7.6354541533874e-07\r\nStep 4396, loss: 7.415744107674982e-07\r\nStep 4397, loss: 7.171415177253948e-07\r\nStep 4398, loss: 6.953728757252975e-07\r\nStep 4399, loss: 6.745599421265069e-07\r\nStep 4400, loss: 6.509611125693482e-07\r\nStep 4401, loss: 6.315006544355128e-07\r\nStep 4402, loss: 6.11043958542723e-07\r\nStep 4403, loss: 5.943044811829168e-07\r\nStep 4404, loss: 5.737993546972575e-07\r\nStep 4405, loss: 5.558693487728306e-07\r\nStep 4406, loss: 5.409439722825482e-07\r\nStep 4407, loss: 5.250305434856273e-07\r\nStep 4408, loss: 5.04225681652315e-07\r\nStep 4409, loss: 4.906283379568777e-07\r\nStep 4410, loss: 4.766990855387121e-07\r\nStep 4411, loss: 4.6333661885000765e-07\r\nStep 4412, loss: 4.5275194793248374e-07\r\nStep 4413, loss: 4.367575456853956e-07\r\nStep 4414, loss: 4.246342086844379e-07\r\nStep 4415, loss: 4.116037928270089e-07\r\nStep 4416, loss: 4.008814471490041e-07\r\nStep 4417, loss: 3.8998902596176777e-07\r\nStep 4418, loss: 3.7916950645922043e-07\r\nStep 4419, loss: 3.6880351217405405e-07\r\nStep 4420, loss: 3.58761440111266e-07\r\nStep 4421, loss: 3.4873556842285325e-07\r\nStep 4422, loss: 3.407423889711936e-07\r\nStep 4423, loss: 3.2980136666083126e-07\r\nStep 4424, loss: 3.1949207368597854e-07\r\nStep 4425, loss: 3.1157179591900785e-07\r\nStep 4426, loss: 3.0210469503799686e-07\r\nStep 4427, loss: 2.9505093834814033e-07\r\nStep 4428, loss: 2.862398389424925e-07\r\nStep 4429, loss: 2.798825278205186e-07\r\nStep 4430, loss: 2.7168690053258615e-07\r\nStep 4431, loss: 2.6513527018323657e-07\r\nStep 4432, loss: 2.5754701482583187e-07\r\nStep 4433, loss: 2.5036368356268213e-07\r\nStep 4434, loss: 2.438687545236462e-07\r\nStep 4435, loss: 2.3777063518082286e-07\r\nStep 4436, loss: 2.305468029817348e-07\r\nStep 4437, loss: 2.251856301427324e-07\r\nStep 4438, loss: 2.1843150932454591e-07\r\nStep 4439, loss: 2.1413933382063988e-07\r\nStep 4440, loss: 2.091911710522254e-07\r\nStep 4441, loss: 2.0273670031656366e-07\r\nStep 4442, loss: 1.972621817003528e-07\r\nStep 4443, loss: 1.93050979646614e-07\r\nStep 4444, loss: 1.8758451858502667e-07\r\nStep 4445, loss: 1.8432083948027866e-07\r\nStep 4446, loss: 1.795265518467204e-07\r\nStep 4447, loss: 1.7521007578125136e-07\r\nStep 4448, loss: 1.704400887092561e-07\r\nStep 4449, loss: 1.655162265024046e-07\r\nStep 4450, loss: 1.6138601210968773e-07\r\nStep 4451, loss: 1.5787938423272863e-07\r\nStep 4452, loss: 1.5414599374707905e-07\r\nStep 4453, loss: 1.4705167927786533e-07\r\nStep 4454, loss: 1.4242749557524803e-07\r\nStep 4455, loss: 1.3642657847867667e-07\r\nStep 4456, loss: 1.409860033163568e-07\r\nStep 4457, loss: 1.2905697133192007e-07\r\nStep 4458, loss: 1.2489437040130724e-07\r\nStep 4459, loss: 1.2263490134500898e-07\r\nStep 4460, loss: 1.1877193628606619e-07\r\nStep 4461, loss: 1.167554373182611e-07\r\nStep 4462, loss: 1.1294915935877725e-07\r\nStep 4463, loss: 1.1132947008718475e-07\r\nStep 4464, loss: 1.0814677864345867e-07\r\nStep 4465, loss: 1.064218082547086e-07\r\nStep 4466, loss: 1.0268841776905901e-07\r\nStep 4467, loss: 1.0112541559692545e-07\r\nStep 4468, loss: 1.1411533051841616e-07\r\nStep 4469, loss: 9.676844570094545e-08\r\nStep 4470, loss: 9.41202458193402e-08\r\nStep 4471, loss: 9.264633149541623e-08\r\nStep 4472, loss: 1.0617885237707014e-07\r\nStep 4473, loss: 1.0507746139865048e-07\r\nStep 4474, loss: 1.0310953513226195e-07\r\nStep 4475, loss: 8.534961892792126e-08\r\nStep 4476, loss: 8.39080982473206e-08\r\nStep 4477, loss: 8.200495926757867e-08\r\nStep 4478, loss: 8.151904751230177e-08\r\nStep 4479, loss: 8.056343148155065e-08\r\nStep 4480, loss: 9.389349031607708e-08\r\nStep 4481, loss: 7.787474487486179e-08\r\nStep 4482, loss: 7.613357411173638e-08\r\nStep 4483, loss: 7.501598986436875e-08\r\nStep 4484, loss: 7.357446207834073e-08\r\nStep 4485, loss: 7.273222735193485e-08\r\nStep 4486, loss: 7.227061615822095e-08\r\nStep 4487, loss: 7.120161882312459e-08\r\nStep 4488, loss: 7.056183903841884e-08\r\n",,terminal_output +4081,10335637,"TERMINAL",0,0,"3055815",,terminal_output +4082,10336649,"TERMINAL",0,0,"166926",,terminal_output +4083,10337684,"TERMINAL",0,0,"2772037",,terminal_output +4084,10338716,"TERMINAL",0,0,"388148",,terminal_output +4085,10339749,"TERMINAL",0,0,"499259",,terminal_output +4086,10340217,"TERMINAL",0,0,"Step 4489, loss: 7.101535004494508e-08\r\nStep 4490, loss: 6.969530375044997e-08\r\nStep 4491, loss: 6.88287684624811e-08\r\nStep 4492, loss: 6.826187615160961e-08\r\nStep 4493, loss: 8.532532547178562e-08\r\nStep 4494, loss: 6.65126123067239e-08\r\nStep 4495, loss: 8.319543098878057e-08\r\nStep 4496, loss: 8.127609874009067e-08\r\nStep 4497, loss: 6.274682817775101e-08\r\nStep 4498, loss: 6.288450293823189e-08\r\nStep 4499, loss: 6.260105323008247e-08\r\nStep 4500, loss: 6.226901660966178e-08\r\nStep 4501, loss: 6.232571081454807e-08\r\nStep 4502, loss: 6.223662296633847e-08\r\nStep 4503, loss: 6.213944914179592e-08\r\nStep 4504, loss: 6.090038340289539e-08\r\nStep 4505, loss: 6.183169887208351e-08\r\nStep 4506, loss: 6.086798975957208e-08\r\nStep 4507, loss: 6.039017819148285e-08\r\nStep 4508, loss: 6.091657667184336e-08\r\nStep 4509, loss: 6.118382600561745e-08\r\nStep 4510, loss: 6.078700209855015e-08\r\nStep 4511, loss: 6.026061072361699e-08\r\nStep 4512, loss: 6.026871091080466e-08\r\nStep 4513, loss: 8.134088602673728e-08\r\nStep 4514, loss: 7.742123386833555e-08\r\nStep 4515, loss: 7.508077715101535e-08\r\nStep 4516, loss: 5.6446236129659155e-08\r\nStep 4517, loss: 5.601701857926855e-08\r\nStep 4518, loss: 5.776628597686795e-08\r\nStep 4519, loss: 5.8114519418950294e-08\r\nStep 4520, loss: 5.696453797554568e-08\r\nStep 4521, loss: 7.581773786569102e-08\r\nStep 4522, loss: 5.6899750688899076e-08\r\nStep 4523, loss: 5.725608076545541e-08\r\nStep 4524, loss: 5.8041631945116023e-08\r\nStep 4525, loss: 5.962892402067155e-08\r\nStep 4526, loss: 5.9458859169581046e-08\r\nStep 4527, loss: 5.7717691959169315e-08\r\nStep 4528, loss: 5.7110309370500545e-08\r\nStep 4529, loss: 5.766910149418436e-08\r\nStep 4530, loss: 5.9515549821753666e-08\r\nStep 4531, loss: 5.985568662936203e-08\r\nStep 4532, loss: 5.847894968269429e-08\r\nStep 4533, loss: 5.7620511029199406e-08\r\nStep 4534, loss: 5.730467123044036e-08\r\nStep 4535, loss: 5.859232743432585e-08\r\nStep 4536, loss: 5.838986538719837e-08\r\nStep 4537, loss: 5.884337994643829e-08\r\nStep 4538, loss: 5.8244093992243506e-08\r\nStep 4539, loss: 5.79201540062968e-08\r\nStep 4540, loss: 5.840606220886002e-08\r\nStep 4541, loss: 5.8738098829280716e-08\r\nStep 4542, loss: 5.937787861398647e-08\r\nStep 4543, loss: 5.892436050203287e-08\r\nStep 4544, loss: 5.891626386755888e-08\r\nStep 4545, loss: 5.830078109170245e-08\r\nStep 4546, loss: 5.908633227136306e-08\r\nStep 4547, loss: 5.8738098829280716e-08\r\nStep 4548, loss: 5.911062928021238e-08\r\nStep 4549, loss: 5.827648763556681e-08\r\nStep 4550, loss: 5.779867962019125e-08\r\nStep 4551, loss: 5.884337994643829e-08\r\nStep 4552, loss: 6.013103615032378e-08\r\nStep 4553, loss: 6.005814157106215e-08\r\nStep 4554, loss: 5.9021544984716456e-08\r\nStep 4555, loss: 5.9191613388520636e-08\r\nStep 4556, loss: 8.677494633957394e-08\r\nStep 4557, loss: 5.495612143135986e-08\r\nStep 4558, loss: 5.496421806583385e-08\r\nStep 4559, loss: 5.780677625466524e-08\r\nStep 4560, loss: 5.966132476942221e-08\r\nStep 4561, loss: 5.8227897170581855e-08\r\nStep 4562, loss: 5.732896823928968e-08\r\nStep 4563, loss: 5.809832259728864e-08\r\nStep 4564, loss: 6.07546155606542e-08\r\nStep 4565, loss: 8.790063077412924e-08\r\nStep 4566, loss: 8.033667597828753e-08\r\nStep 4567, loss: 5.378184297910593e-08\r\nStep 4568, loss: 5.172483596993516e-08\r\nStep 4569, loss: 5.886766984986025e-08\r\nStep 4570, loss: 5.9232103666317926e-08\r\nStep 4571, loss: 5.6786372937267515e-08\r\nStep 4572, loss: 5.8454652673844976e-08\r\nStep 4573, loss: 5.8397965574386035e-08\r\nStep 4574, loss: 5.948315617843036e-08\r\nStep 4575, loss: 8.317923061440524e-08\r\nStep 4576, loss: 7.782615085716316e-08\r\nStep 4577, loss: 5.433253846831576e-08\r\nStep 4578, loss: 5.239700584525053e-08\r\nStep 4579, loss: 5.713460637934986e-08\r\nStep 4580, loss: 6.070602154295557e-08\r\nStep 4581, loss: 5.992047391600863e-08\r\nStep 4582, loss: 7.84011433552223e-08\r\nStep 4583, loss: 7.461916595730145e-08\r\nStep 4584, loss: 5.5199073756284633e-08\r\nStep 4585, loss: 5.643813594247149e-08\r\nStep 4586, loss: 5.690784732337306e-08\r\nStep 4587, loss: 7.778566413207955e-08\r\nStep 4588, loss: 7.295898285519797e-08\r\nStep 4589, loss: 5.369276223632369e-08\r\nStep 4590, loss: 5.53772387945628e-08\r\nStep 4591, loss: 5.979089223728806e-08\r\nStep 4592, loss: 6.13943882399326e-08\r\nStep 4593, loss: 5.690784732337306e-08\r\nStep 4594, loss: 5.602511521374254e-08\r\nStep 4595, loss: 5.573357242383281e-08\r\nStep 4596, loss: 5.793635082795845e-08\r\nStep 4597, loss: 6.15482633747888e-08\r\nStep 4598, loss: 6.027680399256496e-08\r\nStep 4599, loss: 5.772579214635698e-08\r\nStep 4600, loss: 5.7563823929740465e-08\r\nStep 4601, loss: 5.8551833603814885e-08\r\nStep 4602, loss: 6.030919763588827e-08\r\nStep 4603, loss: 6.134580132766132e-08\r\nStep 4604, loss: 5.922400703184394e-08\r\nStep 4605, loss: 5.843035921770934e-08\r\nStep 4606, loss: 5.901344835024247e-08\r\nStep 4607, loss: 6.016342979364708e-08\r\nStep 4608, loss: 6.077080172417482e-08\r\nStep 4609, loss: 6.00257550331662e-08\r\nStep 4610, loss: 5.9175413014145306e-08\r\nStep 4611, loss: 5.8721902007619065e-08\r\nStep 4612, loss: 5.962083093891124e-08\r\nStep 4613, loss: 6.030110455412796e-08\r\nStep 4614, loss: 6.219613624125486e-08\r\nStep 4615, loss: 6.198557400693971e-08\r\nStep 4616, loss: 5.958843729558794e-08\r\nStep 4617, loss: 5.913491918363434e-08\r\nStep 4618, loss: 5.9556043652264634e-08\r\nStep 4619, loss: 8.713128352155763e-08\r\nStep 4620, loss: 7.838495008627433e-08\r\nStep 4621, loss: 7.406846691537794e-08\r\nStep 4622, loss: 7.205195373671813e-08\r\nStep 4623, loss: 5.274524284004656e-08\r\nStep 4624, loss: 5.6373348655824884e-08\r\nStep 4625, loss: 5.769339850303368e-08\r\nStep 4626, loss: 5.880288611592732e-08\r\nStep 4627, loss: 6.043877220918148e-08\r\nStep 4628, loss: 5.99609677465196e-08\r\nStep 4629, loss: 6.010673558876078e-08\r\nStep 4630, loss: 6.00257550331662e-08\r\nStep 4631, loss: 5.79525476496201e-08\r\nStep 4632, loss: 5.8171206518409235e-08\r\nStep 4633, loss: 5.887577358976159e-08\r\nStep 4634, loss: 6.136200170203665e-08\r\nStep 4635, loss: 6.084369630343645e-08\r\nStep 4636, loss: 6.050355949582809e-08\r\nStep 4637, loss: 6.011483577594845e-08\r\nStep 4638, loss: 5.952365000894133e-08\r\nStep 4639, loss: 6.152396991865317e-08\r\nStep 4640, loss: 6.137009478379696e-08\r\nStep 4641, loss: 6.226902371508913e-08\r\nStep 4642, loss: 6.004194830211418e-08\r\nStep 4643, loss: 5.987998008549766e-08\r\nStep 4644, loss: 6.107855199388723e-08\r\nStep 4645, loss: 6.2795429300877e-08\r\nStep 4646, loss: 6.089229032113508e-08\r\nStep 4647, loss: 5.93049911401522e-08\r\nStep 4648, loss: 5.905393862803976e-08\r\nStep 4649, loss: 6.169403832245735e-08\r\nStep 4650, loss: 6.277922182107432e-08\r\nStep 4651, loss: 8.703410259158773e-08\r\nStep 4652, loss: 7.894374220995815e-08\r\nStep 4653, loss: 5.357128429750446e-08\r\nStep 4654, loss: 5.442971939828567e-08\r\nStep 4655, loss: 5.941027225730977e-08\r\nStep 4656, loss: 6.058454715685002e-08\r\nStep 4657, loss: 5.975040551220445e-08\r\nStep 4658, loss: 5.924830048797958e-08\r\nStep 4659, loss: 6.175072542191629e-08\r\nStep 4660, loss: 6.305457134203607e-08\r\nStep 4661, loss: 6.045497258355681e-08\r\nStep 4662, loss: 6.007434194543748e-08\r\nStep 4663, loss: 6.016342268821973e-08\r\nStep 4664, loss: 5.975040551220445e-08\r\nStep 4665, loss: 6.122432694155577e-08\r\nStep 4666, loss: 6.079510939116517e-08\r\nStep 4667, loss: 6.038208510972254e-08\r\nStep 4668, loss: 6.290070331260722e-08\r\nStep 4669, loss: 6.114333928053384e-08\r\nStep 4670, loss: 6.081130266011314e-08\r\nStep 4671, loss: 8.630523495867237e-08\r\nStep 4672, loss: 8.013421393116005e-08\r\nStep 4673, loss: 5.373325251412098e-08\r\nStep 4674, loss: 5.5158579925773665e-08\r\nStep 4675, loss: 6.220422932301517e-08\r\nStep 4676, loss: 6.065743463068429e-08\r\nStep 4677, loss: 5.948315617843036e-08\r\nStep 4678, loss: 5.9021544984716456e-08\r\nStep 4679, loss: 8.543870677613086e-08\r\nStep 4680, loss: 5.77824827985296e-08\r\nStep 4681, loss: 7.925957845600351e-08\r\nStep 4682, loss: 7.454627848346718e-08\r\nStep 4683, loss: 5.3376922437564644e-08\r\nStep 4684, loss: 5.645433276413314e-08\r\nStep 4685, loss: 6.17912121469999e-08\r\nStep 4686, loss: 8.097645576299328e-08\r\nStep 4687, loss: 5.437303229882673e-08\r\nStep 4688, loss: 5.38223368096169e-08\r\nStep 4689, loss: 5.757192056421445e-08\r\nStep 4690, loss: 6.493341686564236e-08\r\nStep 4691, loss: 6.511157835120684e-08\r\nStep 4692, loss: 5.935358160513715e-08\r\nStep 4693, loss: 5.844655603937099e-08\r\nStep 4694, loss: 5.995286755933193e-08\r\nStep 4695, loss: 6.183171308293822e-08\r\nStep 4696, loss: 6.277922892650167e-08\r\nStep 4697, loss: 6.045497258355681e-08\r\n",,terminal_output +4087,10340780,"TERMINAL",0,0,"520203650",,terminal_output +4088,10341838,"TERMINAL",0,0,"611471",,terminal_output +4089,10342864,"TERMINAL",0,0,"722582",,terminal_output +4090,10343888,"TERMINAL",0,0,"833693",,terminal_output +4091,10344913,"TERMINAL",0,0,"9447504",,terminal_output +4092,10345531,"TERMINAL",0,0,"Step 4698, loss: 5.89729545197315e-08\r\nStep 4699, loss: 6.125672058487908e-08\r\nStep 4700, loss: 6.196128765623143e-08\r\nStep 4701, loss: 6.247957884397692e-08\r\nStep 4702, loss: 8.502568249468823e-08\r\nStep 4703, loss: 7.741313368114788e-08\r\nStep 4704, loss: 5.3830433444090886e-08\r\nStep 4705, loss: 5.591173746211098e-08\r\nStep 4706, loss: 6.209086222952465e-08\r\nStep 4707, loss: 6.281162256982498e-08\r\nStep 4708, loss: 6.114333928053384e-08\r\nStep 4709, loss: 5.950745318727968e-08\r\nStep 4710, loss: 6.171833177859298e-08\r\nStep 4711, loss: 6.119192619280511e-08\r\nStep 4712, loss: 6.009054231981281e-08\r\nStep 4713, loss: 6.034969146639924e-08\r\nStep 4714, loss: 6.048736622688011e-08\r\nStep 4715, loss: 6.323273993302791e-08\r\nStep 4716, loss: 6.215564951617125e-08\r\nStep 4717, loss: 6.004194830211418e-08\r\nStep 4718, loss: 6.017962306259506e-08\r\nStep 4719, loss: 6.257676687937419e-08\r\nStep 4720, loss: 8.533342565897328e-08\r\nStep 4721, loss: 5.506949918299142e-08\r\nStep 4722, loss: 5.491562760084889e-08\r\nStep 4723, loss: 6.236620464505904e-08\r\nStep 4724, loss: 6.539502805935626e-08\r\nStep 4725, loss: 6.204226821182601e-08\r\nStep 4726, loss: 5.9062038815227424e-08\r\nStep 4727, loss: 5.9556043652264634e-08\r\nStep 4728, loss: 6.3864419530546e-08\r\nStep 4729, loss: 8.82407675817376e-08\r\nStep 4730, loss: 5.7774382611341935e-08\r\nStep 4731, loss: 7.741313368114788e-08\r\nStep 4732, loss: 5.579835971047942e-08\r\nStep 4733, loss: 5.941836889178376e-08\r\nStep 4734, loss: 6.034159127921157e-08\r\nStep 4735, loss: 6.099757143829265e-08\r\nStep 4736, loss: 6.100566452005296e-08\r\nStep 4737, loss: 6.126482077206674e-08\r\nStep 4738, loss: 6.166164467913404e-08\r\nStep 4739, loss: 6.257676687937419e-08\r\nStep 4740, loss: 6.243909211889331e-08\r\nStep 4741, loss: 5.9499353000092015e-08\r\nStep 4742, loss: 6.245528538784129e-08\r\nStep 4743, loss: 6.247148576221662e-08\r\nStep 4744, loss: 6.145918263200656e-08\r\nStep 4745, loss: 6.207466185514932e-08\r\nStep 4746, loss: 6.110284545002287e-08\r\nStep 4747, loss: 8.556828134942407e-08\r\nStep 4748, loss: 5.4551197337104895e-08\r\nStep 4749, loss: 7.892754183558282e-08\r\nStep 4750, loss: 5.7377558704274634e-08\r\nStep 4751, loss: 5.978279915552775e-08\r\nStep 4752, loss: 8.418344066285499e-08\r\nStep 4753, loss: 7.566386273083481e-08\r\nStep 4754, loss: 7.341249386172422e-08\r\nStep 4755, loss: 7.16308363735152e-08\r\nStep 4756, loss: 5.4000501847895066e-08\r\nStep 4757, loss: 7.55666818008649e-08\r\nStep 4758, loss: 5.629236454751663e-08\r\nStep 4759, loss: 5.758002075140212e-08\r\nStep 4760, loss: 5.93049911401522e-08\r\nStep 4761, loss: 5.996906793370727e-08\r\nStep 4762, loss: 6.025250343100197e-08\r\nStep 4763, loss: 6.104615835056393e-08\r\nStep 4764, loss: 5.899725152858082e-08\r\nStep 4765, loss: 6.018772324978272e-08\r\nStep 4766, loss: 6.188840018239716e-08\r\nStep 4767, loss: 6.407498176486115e-08\r\nStep 4768, loss: 6.297359078644149e-08\r\nStep 4769, loss: 6.179121925242725e-08\r\nStep 4770, loss: 6.176691869086426e-08\r\nStep 4771, loss: 6.051975987020342e-08\r\nStep 4772, loss: 5.948315617843036e-08\r\nStep 4773, loss: 6.26415541660208e-08\r\nStep 4774, loss: 6.209895531128495e-08\r\nStep 4775, loss: 6.339470814964443e-08\r\nStep 4776, loss: 6.121622675436811e-08\r\nStep 4777, loss: 5.99285741031963e-08\r\nStep 4778, loss: 6.091658377727072e-08\r\nStep 4779, loss: 6.394540719156794e-08\r\nStep 4780, loss: 6.30707717164114e-08\r\nStep 4781, loss: 6.169403832245735e-08\r\nStep 4782, loss: 6.242289884994534e-08\r\nStep 4783, loss: 8.581933030882283e-08\r\nStep 4784, loss: 5.7053618718327925e-08\r\nStep 4785, loss: 5.6753975741230533e-08\r\nStep 4786, loss: 6.039828548409787e-08\r\nStep 4787, loss: 6.347569581066637e-08\r\nStep 4788, loss: 6.175882560910395e-08\r\nStep 4789, loss: 5.916731637967132e-08\r\nStep 4790, loss: 8.393239170345623e-08\r\nStep 4791, loss: 5.770959532469533e-08\r\nStep 4792, loss: 8.031238252215189e-08\r\nStep 4793, loss: 7.680574753976543e-08\r\nStep 4794, loss: 7.296708304238564e-08\r\nStep 4795, loss: 7.113683153647798e-08\r\nStep 4796, loss: 7.097485621443411e-08\r\nStep 4797, loss: 5.5061398995803756e-08\r\nStep 4798, loss: 5.6162789974223415e-08\r\nStep 4799, loss: 5.7733892333544645e-08\r\nStep 4800, loss: 6.037398492253487e-08\r\nStep 4801, loss: 6.160494336882039e-08\r\nStep 4802, loss: 6.307886479817171e-08\r\nStep 4803, loss: 6.291690368698255e-08\r\nStep 4804, loss: 6.190459345134514e-08\r\nStep 4805, loss: 6.211514858023293e-08\r\nStep 4806, loss: 6.034969146639924e-08\r\nStep 4807, loss: 6.103805816337626e-08\r\nStep 4808, loss: 6.2430999037133e-08\r\nStep 4809, loss: 5.911062928021238e-08\r\nStep 4810, loss: 6.107855199388723e-08\r\nStep 4811, loss: 6.342710889839509e-08\r\nStep 4812, loss: 6.213135606003561e-08\r\nStep 4813, loss: 6.217184278511922e-08\r\nStep 4814, loss: 6.211514858023293e-08\r\nStep 4815, loss: 6.324894030740325e-08\r\nStep 4816, loss: 6.187219980802183e-08\r\nStep 4817, loss: 5.971801186888115e-08\r\nStep 4818, loss: 6.210705549847262e-08\r\nStep 4819, loss: 6.329752721967452e-08\r\nStep 4820, loss: 6.286830966928392e-08\r\nStep 4821, loss: 6.26739478093441e-08\r\nStep 4822, loss: 6.102186489442829e-08\r\nStep 4823, loss: 6.170213140421765e-08\r\nStep 4824, loss: 8.463695877480859e-08\r\nStep 4825, loss: 7.792333889256042e-08\r\nStep 4826, loss: 7.511317079433866e-08\r\nStep 4827, loss: 5.41867670733609e-08\r\nStep 4828, loss: 5.8568030425476536e-08\r\nStep 4829, loss: 6.084369630343645e-08\r\nStep 4830, loss: 6.000146157703057e-08\r\nStep 4831, loss: 6.030919763588827e-08\r\nStep 4832, loss: 6.138629515817229e-08\r\nStep 4833, loss: 6.14834760881422e-08\r\nStep 4834, loss: 6.18802999952095e-08\r\nStep 4835, loss: 5.982329298603872e-08\r\nStep 4836, loss: 6.221233661563019e-08\r\nStep 4837, loss: 6.161305066143541e-08\r\nStep 4838, loss: 6.152396991865317e-08\r\nStep 4839, loss: 6.307887190359907e-08\r\nStep 4840, loss: 6.335422142456082e-08\r\nStep 4841, loss: 6.288450293823189e-08\r\nStep 4842, loss: 6.135390151484899e-08\r\nStep 4843, loss: 8.567355536115429e-08\r\nStep 4844, loss: 7.739693330677255e-08\r\nStep 4845, loss: 7.264313950372525e-08\r\nStep 4846, loss: 7.251356493043204e-08\r\nStep 4847, loss: 7.218962849719901e-08\r\nStep 4848, loss: 7.226251597103328e-08\r\nStep 4849, loss: 7.021360204362281e-08\r\nStep 4850, loss: 5.298819516497133e-08\r\nStep 4851, loss: 5.6373348655824884e-08\r\nStep 4852, loss: 5.974230532501679e-08\r\nStep 4853, loss: 6.145918263200656e-08\r\nStep 4854, loss: 5.935358160513715e-08\r\nStep 4855, loss: 6.209895531128495e-08\r\nStep 4856, loss: 6.26739478093441e-08\r\nStep 4857, loss: 6.128101404101471e-08\r\nStep 4858, loss: 6.064123425630896e-08\r\nStep 4859, loss: 6.137819497098462e-08\r\nStep 4860, loss: 6.060074753122535e-08\r\nStep 4861, loss: 6.289260312541955e-08\r\nStep 4862, loss: 6.166973776089435e-08\r\nStep 4863, loss: 6.18802999952095e-08\r\nStep 4864, loss: 6.233380389630838e-08\r\nStep 4865, loss: 6.325704049459091e-08\r\nStep 4866, loss: 6.205036839901368e-08\r\nStep 4867, loss: 6.131340768433802e-08\r\nStep 4868, loss: 6.232571791997543e-08\r\nStep 4869, loss: 6.07222219173309e-08\r\nStep 4870, loss: 6.149157627532986e-08\r\nStep 4871, loss: 6.230141735841244e-08\r\nStep 4872, loss: 6.257676687937419e-08\r\nStep 4873, loss: 6.27306420142304e-08\r\nStep 4874, loss: 6.251197959272758e-08\r\nStep 4875, loss: 6.248768613659195e-08\r\nStep 4876, loss: 6.145918263200656e-08\r\nStep 4877, loss: 6.222853699000552e-08\r\nStep 4878, loss: 6.186410672626153e-08\r\nStep 4879, loss: 6.181551270856289e-08\r\nStep 4880, loss: 6.119193329823247e-08\r\nStep 4881, loss: 6.350808945398967e-08\r\nStep 4882, loss: 8.619186075975449e-08\r\nStep 4883, loss: 7.753460806725343e-08\r\nStep 4884, loss: 7.398748635978336e-08\r\nStep 4885, loss: 5.334452879424134e-08\r\nStep 4886, loss: 7.917859790040893e-08\r\nStep 4887, loss: 7.274842062088283e-08\r\nStep 4888, loss: 5.373325251412098e-08\r\nStep 4889, loss: 7.3282919288431e-08\r\nStep 4890, loss: 7.538852031530041e-08\r\nStep 4891, loss: 5.656771406847838e-08\r\nStep 4892, loss: 5.629236454751663e-08\r\nStep 4893, loss: 5.763670785086106e-08\r\nStep 4894, loss: 5.996906793370727e-08\r\nStep 4895, loss: 6.337851488069646e-08\r\nStep 4896, loss: 6.22204296973905e-08\r\nStep 4897, loss: 6.056835388790205e-08\r\nStep 4898, loss: 6.188840018239716e-08\r\nStep 4899, loss: 6.120812656718044e-08\r\nStep 4900, loss: 6.122432694155577e-08\r\nStep 4901, loss: 6.182361289575056e-08\r\nStep 4902, loss: 6.176691869086426e-08\r\nStep 4903, loss: 6.307887190359907e-08\r\nStep 4904, loss: 6.318415302075664e-08\r\nStep 4905, loss: 6.255247342323855e-08\r\nStep 4906, loss: 6.165354449194638e-08\r\nStep 4907, loss: 6.120812656718044e-08\r\n",,terminal_output +4093,10345950,"TERMINAL",0,0,"4055815",,terminal_output +4094,10347015,"TERMINAL",0,0,"166926",,terminal_output +4095,10348007,"TERMINAL",0,0,"2773037",,terminal_output +4096,10349043,"TERMINAL",0,0,"388148",,terminal_output +4097,10350070,"TERMINAL",0,0,"499259",,terminal_output +4098,10350144,"TERMINAL",0,0,"Step 4908, loss: 6.17588185036766e-08\r\nStep 4909, loss: 6.117573292385714e-08\r\nStep 4910, loss: 6.147537590095453e-08\r\nStep 4911, loss: 6.341090852401976e-08\r\nStep 4912, loss: 6.448799894087642e-08\r\nStep 4913, loss: 6.198558111236707e-08\r\nStep 4914, loss: 6.160495047424774e-08\r\nStep 4915, loss: 6.127291385382705e-08\r\nStep 4916, loss: 6.304647826027576e-08\r\nStep 4917, loss: 6.213944914179592e-08\r\nStep 4918, loss: 6.126482077206674e-08\r\nStep 4919, loss: 6.27387350959907e-08\r\nStep 4920, loss: 6.19126936385328e-08\r\nStep 4921, loss: 6.344330216734306e-08\r\nStep 4922, loss: 6.33137204886225e-08\r\nStep 4923, loss: 6.155636356197647e-08\r\nStep 4924, loss: 8.721226407715221e-08\r\nStep 4925, loss: 5.685925685838811e-08\r\nStep 4926, loss: 7.713778416018613e-08\r\nStep 4927, loss: 5.5668785137186205e-08\r\nStep 4928, loss: 5.9896180459873e-08\r\nStep 4929, loss: 6.133770114047365e-08\r\nStep 4930, loss: 8.230460224467606e-08\r\nStep 4931, loss: 7.767228282773431e-08\r\nStep 4932, loss: 7.461106577011378e-08\r\nStep 4933, loss: 6.93227732995183e-08\r\nStep 4934, loss: 5.20406757686942e-08\r\nStep 4935, loss: 7.708919724791485e-08\r\nStep 4936, loss: 7.386601197367781e-08\r\nStep 4937, loss: 5.3336428607053676e-08\r\nStep 4938, loss: 5.553111037670533e-08\r\nStep 4939, loss: 6.125672058487908e-08\r\nStep 4940, loss: 6.367005767060618e-08\r\nStep 4941, loss: 6.07222219173309e-08\r\nStep 4942, loss: 5.98394862549867e-08\r\nStep 4943, loss: 6.161305066143541e-08\r\nStep 4944, loss: 6.328942703248686e-08\r\nStep 4945, loss: 6.183171308293822e-08\r\nStep 4946, loss: 6.054405332633905e-08\r\nStep 4947, loss: 6.010673558876078e-08\r\nStep 4948, loss: 6.298979116081682e-08\r\nStep 4949, loss: 6.323273993302791e-08\r\nStep 4950, loss: 6.178311906523959e-08\r\nStep 4951, loss: 6.286021658752361e-08\r\nStep 4952, loss: 8.036096943442317e-08\r\nStep 4953, loss: 5.657581070295237e-08\r\nStep 4954, loss: 5.826029081390516e-08\r\nStep 4955, loss: 6.171023159140532e-08\r\nStep 4956, loss: 6.306267863465109e-08\r\nStep 4957, loss: 6.150776954427783e-08\r\nStep 4958, loss: 5.993667429038396e-08\r\nStep 4959, loss: 6.115953965490917e-08\r\nStep 4960, loss: 8.240988336183364e-08\r\nStep 4961, loss: 5.656771406847838e-08\r\nStep 4962, loss: 5.668109182010994e-08\r\nStep 4963, loss: 6.472286173675457e-08\r\nStep 4964, loss: 6.495771742720535e-08\r\nStep 4965, loss: 6.066553481787196e-08\r\nStep 4966, loss: 6.081940284730081e-08\r\nStep 4967, loss: 8.036096943442317e-08\r\nStep 4968, loss: 5.8074025588439326e-08\r\nStep 4969, loss: 7.56557696490745e-08\r\nStep 4970, loss: 5.519097356909697e-08\r\nStep 4971, loss: 7.72430652773437e-08\r\nStep 4972, loss: 7.424663550636978e-08\r\nStep 4973, loss: 5.3676565414662036e-08\r\nStep 4974, loss: 5.719939366599647e-08\r\nStep 4975, loss: 6.189650036958483e-08\r\nStep 4976, loss: 7.84335369985456e-08\r\nStep 4977, loss: 7.333151330612964e-08\r\nStep 4978, loss: 5.518287693462298e-08\r\nStep 4979, loss: 5.895675769806985e-08\r\nStep 4980, loss: 6.281162256982498e-08\r\nStep 4981, loss: 6.103805816337626e-08\r\nStep 4982, loss: 5.964512439504688e-08\r\nStep 4983, loss: 6.085989667781178e-08\r\nStep 4984, loss: 6.38968131738693e-08\r\nStep 4985, loss: 6.251197959272758e-08\r\nStep 4986, loss: 5.8778592659791684e-08\r\nStep 4987, loss: 6.074651537346654e-08\r\nStep 4988, loss: 6.405068830872551e-08\r\nStep 4989, loss: 6.422884268886264e-08\r\nStep 4990, loss: 6.109475236826256e-08\r\nStep 4991, loss: 6.247957884397692e-08\r\nStep 4992, loss: 6.11190458243982e-08\r\nStep 4993, loss: 6.125672058487908e-08\r\nStep 4994, loss: 6.124862039769141e-08\r\nStep 4995, loss: 6.208276204233698e-08\r\nStep 4996, loss: 6.37996322438994e-08\r\nStep 4997, loss: 6.3499989266802e-08\r\nStep 4998, loss: 6.068172808681993e-08\r\nStep 4999, loss: 6.180741252137523e-08\r\nSaved checkpoint at step 5000\r\n",,terminal_output +4099,10351105,"TERMINAL",0,0,"53030369:00",,terminal_output +4100,10352090,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-causal-overfit-no-noise3373280 at: https://wandb.ai/instant-uv/jafar/runs/gqc4dswd\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250724_164926-gqc4dswd/logs\r\n",,terminal_output +4101,10352136,"TERMINAL",0,0,"611471",,terminal_output +4102,10353166,"TERMINAL",0,0,"722582",,terminal_output +4103,10354200,"TERMINAL",0,0,"833693",,terminal_output +4104,10354933,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +4105,10355254,"TERMINAL",0,0,"94475:004",,terminal_output +4106,10356271,"TERMINAL",0,0,"5055815",,terminal_output +4107,10357304,"TERMINAL",0,0,"166926",,terminal_output +4108,10358431,"TERMINAL",0,0,"2884148",,terminal_output +4109,10359379,"TERMINAL",0,0,"499259",,terminal_output +4110,10360401,"TERMINAL",0,0,"540403610",,terminal_output +4111,10361433,"TERMINAL",0,0,"611471",,terminal_output +4112,10362524,"TERMINAL",0,0,"722582",,terminal_output +4113,10363550,"TERMINAL",0,0,"833693",,terminal_output +4114,10364535,"TERMINAL",0,0,"9447104",,terminal_output +4115,10365569,"TERMINAL",0,0,"3:0055815",,terminal_output +4116,10366620,"TERMINAL",0,0,"166926",,terminal_output +4117,10367646,"TERMINAL",0,0,"2775037",,terminal_output +4118,10368774,"TERMINAL",0,0,"388148",,terminal_output +4119,10369701,"TERMINAL",0,0,"499259",,terminal_output +4120,10370819,"TERMINAL",0,0,"550503620",,terminal_output +4121,10371774,"TERMINAL",0,0,"611471",,terminal_output +4122,10372867,"TERMINAL",0,0,"722582",,terminal_output +4123,10373844,"TERMINAL",0,0,"833693",,terminal_output +4124,10374909,"TERMINAL",0,0,"9447204",,terminal_output +4125,10375943,"TERMINAL",0,0,"1055815",,terminal_output +4126,10376971,"TERMINAL",0,0,"166926",,terminal_output +4127,10378089,"TERMINAL",0,0,"27740:0037",,terminal_output +4128,10379022,"TERMINAL",0,0,"388148",,terminal_output +4129,10380051,"TERMINAL",0,0,"499259",,terminal_output +4130,10381162,"TERMINAL",0,0,"59:009:003630",,terminal_output +4131,10382189,"TERMINAL",0,0,"611471",,terminal_output +4132,10383155,"TERMINAL",0,0,"722582",,terminal_output +4133,10384185,"TERMINAL",0,0,"833693",,terminal_output +4134,10385218,"TERMINAL",0,0,"9447304",,terminal_output +4135,10386251,"TERMINAL",0,0,"2055815",,terminal_output +4136,10387286,"TERMINAL",0,0,"166926",,terminal_output +4137,10388330,"TERMINAL",0,0,"2881148",,terminal_output +4138,10389351,"TERMINAL",0,0,"499259",,terminal_output +4139,10390482,"TERMINAL",0,0,"510103640",,terminal_output +4140,10391504,"TERMINAL",0,0,"611471",,terminal_output +4141,10392476,"TERMINAL",0,0,"722582",,terminal_output +4142,10393552,"TERMINAL",0,0,"833693",,terminal_output +4143,10394576,"TERMINAL",0,0,"9447404",,terminal_output +4144,10396943,"TERMINAL",0,0,"3066926",,terminal_output +4145,10398060,"TERMINAL",0,0,"2772037",,terminal_output +4146,10399013,"TERMINAL",0,0,"388148",,terminal_output +4147,10400044,"TERMINAL",0,0,"499259",,terminal_output +4148,10401130,"TERMINAL",0,0,"520203650",,terminal_output +4149,10402157,"TERMINAL",0,0,"611471",,terminal_output +4150,10403192,"TERMINAL",0,0,"722582",,terminal_output +4151,10404179,"TERMINAL",0,0,"833693",,terminal_output +4152,10405218,"TERMINAL",0,0,"9447504",,terminal_output +4153,10406253,"TERMINAL",0,0,"4055815",,terminal_output +4154,10407288,"TERMINAL",0,0,"166926",,terminal_output +4155,10408325,"TERMINAL",0,0,"2883148",,terminal_output +4156,10409379,"TERMINAL",0,0,"499259",,terminal_output +4157,10410060,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +4158,10410448,"TERMINAL",0,0,"530303640:00",,terminal_output +4159,10411444,"TERMINAL",0,0,"\r",,terminal_output +4160,10411444,"TERMINAL",0,0,"611471",,terminal_output +4161,10412179,"TERMINAL",0,0,"watch",,terminal_focus +4162,10412457,"TERMINAL",0,0,"722582",,terminal_output +4163,10412912,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +4164,10415983,"TERMINAL",0,0,"cd ..",,terminal_command +4165,10415999,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:50 cd ..;63badae8-90b1-4579-970f-d00997b22bed]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive]633;D;0",,terminal_output +4166,10416295,"TERMINAL",0,0,"ls",,terminal_command +4167,10416310,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:51 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C3367418 3373280\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive]633;D;0",,terminal_output +4168,10417484,"TERMINAL",0,0,"cd ..",,terminal_command +4169,10418108,"TERMINAL",0,0,"ls",,terminal_command +4170,10418134,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:52 ls;63badae8-90b1-4579-970f-d00997b22bed]633;Cinteractive\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1]633;D;0",,terminal_output +4171,10419444,"TERMINAL",0,0,"cd ..",,terminal_command +4172,10419661,"TERMINAL",0,0,"ls",,terminal_command +4173,10419675,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:54 ls;63badae8-90b1-4579-970f-d00997b22bed]633;Coverfit overfit-seed69-1 overfit-seed69-1-no-noise\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal]633;D;0",,terminal_output +4174,10422757,"TERMINAL",0,0,"cd overfit-seed69-1-no-noise/",,terminal_command +4175,10422777,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:57 cd overfit-seed69-1-no-noise/;63badae8-90b1-4579-970f-d00997b22bed]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise]633;D;0",,terminal_output +4176,10424909,"TERMINAL",0,0,"cd interactive/3373280/",,terminal_command +4177,10425235,"TERMINAL",0,0,"ls",,terminal_command +4178,10425254,"TERMINAL",0,0,"]633;E;2025-07-24 16:53:59 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C003000 004000 005000\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280]633;D;0",,terminal_output +4179,10426250,"TERMINAL",0,0,"pwd",,terminal_command +4180,10428198,"TERMINAL",0,0,"srun",,terminal_focus +4181,10429137,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +4182,10430440,"TERMINAL",0,0,"yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280",,terminal_output +4183,10431703,"TERMINAL",0,0,"\r\n\r",,terminal_output +4184,10433269,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280",,terminal_output +4185,10433645,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280\r\n[?2004l\r\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=4 \\r\n --start_frame=0 \\r\n --data_dir $array_records_dir\r\n\r\n# srun python sample.py \\r\n # --checkpoint $dynamics_ckpt_dir \\r\n # --start_frame=0 \\r\n # --batch_size=12 \\r\n # --seq_len=2 \\r\n # --data_dir $array_records_dir\r\n",,terminal_output +4186,10433785,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=436020\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1753366425\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753370025\r\nSLURM_PMI2_SRUN_PORT=34243\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3373280\r\nSLURM_PTY_PORT=42147\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=48\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=194\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=46593\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3373280\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=46593\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +4187,10434021,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4188,10440719,"TERMINAL",0,0,"2025-07-24 16:54:15.418322: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4189,10449378,"TERMINAL",0,0,"2025-07-24 16:54:24.057227: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4190,10456017,"TERMINAL",0,0,"2025-07-24 16:54:30.641604: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4191,10466767,"TERMINAL",0,0,"2025-07-24 16:54:41.409961: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4192,10469181,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 4000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280/004000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 5000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280/005000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 3000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280/003000/metrics/metrics not found.\r\n",,terminal_output +4193,10475499,"TERMINAL",0,0,"2025-07-24 16:54:50.171292: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4194,10481209,"TERMINAL",0,0,"2025-07-24 16:54:55.832924: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4195,10522155,"TERMINAL",0,0,"autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nSampling token 50 from frame 1\r\nSampling token 51 from frame 1\r\nSampling token 52 from frame 1\r\nSampling token 53 from frame 1\r\nSampling token 54 from frame 1\r\nSampling token 55 from frame 1\r\nSampling token 56 from frame 1\r\nSampling token 57 from frame 1\r\nSampling token 58 from frame 1\r\nSampling token 59 from frame 1\r\nSampling token 60 from frame 1\r\nSampling token 61 from frame 1\r\nSampling token 62 from frame 1\r\nSampling token 63 from frame 1\r\nSampling token 64 from frame 1\r\nSampling token 65 from frame 1\r\nSampling token 66 from frame 1\r\nSampling token 67 from frame 1\r\nSampling token 68 from frame 1\r\nSampling token 69 from frame 1\r\nSampling token 70 from frame 1\r\nSampling token 71 from frame 1\r\nSampling token 72 from frame 1\r\nSampling token 73 from frame 1\r\nSampling token 74 from frame 1\r\nSampling token 75 from frame 1\r\nSampling token 76 from frame 1\r\nSampling token 77 from frame 1\r\nSampling token 78 from frame 1\r\nSampling token 79 from frame 1\r\nSampling token 80 from frame 1\r\nSampling token 81 from frame 1\r\nSampling token 82 from frame 1\r\nSampling token 83 from frame 1\r\nSampling token 84 from frame 1\r\nSampling token 85 from frame 1\r\nSampling token 86 from frame 1\r\nSampling token 87 from frame 1\r\nSampling token 88 from frame 1\r\nSampling token 89 from frame 1\r\nSampling token 90 from frame 1\r\nSampling token 91 from frame 1\r\nSampling token 92 from frame 1\r\nSampling token 93 from frame 1\r\nSampling token 94 from frame 1\r\nSampling token 95 from frame 1\r\nSampling token 96 from frame 1\r\nSampling token 97 from frame 1\r\nSampling token 98 from frame 1\r\nSampling token 99 from frame 1\r\nSampling token 100 from frame 1\r\nSampling token 101 from frame 1\r\nSampling token 102 from frame 1\r\nSampling token 103 from frame 1\r\nSampling token 104 from frame 1\r\nSampling token 105 from frame 1\r\nSampling token 106 from frame 1\r\nSampling token 107 from frame 1\r\nSampling token 108 from frame 1\r\nSampling token 109 from frame 1\r\nSampling token 110 from frame 1\r\nSampling token 111 from frame 1\r\nSampling token 112 from frame 1\r\nSampling token 113 from frame 1\r\nSampling token 114 from frame 1\r\nSampling token 115 from frame 1\r\nSampling token 116 from frame 1\r\nSampling token 117 from frame 1\r\nSampling token 118 from frame 1\r\nSampling token 119 from frame 1\r\nSampling token 120 from frame 1\r\nSampling token 121 from frame 1\r\nSampling token 122 from frame 1\r\nSampling token 123 from frame 1\r\nSampling token 124 from frame 1\r\nSampling token 125 from frame 1\r\nSampling token 126 from frame 1\r\nSampling token 127 from frame 1\r\nSampling token 128 from frame 1\r\nSampling token 129 from frame 1\r\nSampling token 130 from frame 1\r\nSampling token 131 from frame 1\r\nSampling token 132 from frame 1\r\nSampling token 133 from frame 1\r\nSampling token 134 from frame 1\r\nSampling token 135 from frame 1\r\nSampling token 136 from frame 1\r\nSampling token 137 from frame 1\r\nSampling token 138 from frame 1\r\nSampling token 139 from frame 1\r\nSampling token 140 from frame 1\r\nSampling token 141 from frame 1\r\nSampling token 142 from frame 1\r\nSampling token 143 from frame 1\r\nSampling token 144 from frame 1\r\nSampling token 145 from frame 1\r\nSampling token 146 from frame 1\r\nSampling token 147 from frame 1\r\nSampling token 148 from frame 1\r\nSampling token 149 from frame 1\r\nSampling token 150 from frame 1\r\nSampling token 151 from frame 1\r\nSampling token 152 from frame 1\r\nSampling token 153 from frame 1\r\nSampling token 154 from frame 1\r\nSampling token 155 from frame 1\r\nSampling token 156 from frame 1\r\nSampling token 157 from frame 1\r\nSampling token 158 from frame 1\r\nSampling token 159 from frame 1\r\nSampling token 160 from frame 1\r\nSampling token 161 from frame 1\r\nSampling token 162 from frame 1\r\nSampling token 163 from frame 1\r\nSampling token 164 from frame 1\r\nSampling token 165 from frame 1\r\nSampling token 166 from frame 1\r\nSampling token 167 from frame 1\r\nSampling token 168 from frame 1\r\nSampling token 169 from frame 1\r\nSampling token 170 from frame 1\r\nSampling token 171 from frame 1\r\nSampling token 172 from frame 1\r\nSampling token 173 from frame 1\r\nSampling token 174 from frame 1\r\nSampling token 175 from frame 1\r\nSampling token 176 from frame 1\r\nSampling token 177 from frame 1\r\nSampling token 178 from frame 1\r\nSampling token 179 from frame 1\r\nSampling token 180 from frame 1\r\nSampling token 181 from frame 1\r\nSampling token 182 from frame 1\r\nSampling token 183 from frame 1\r\nSampling token 184 from frame 1\r\nSampling token 185 from frame 1\r\nSampling token 186 from frame 1\r\nSampling token 187 from frame 1\r\nSampling token 188 from frame 1\r\nSampling token 189 from frame 1\r\nSampling token 190 from frame 1\r\nSampling token 191 from frame 1\r\nSampling token 192 from frame 1\r\nSampling token 193 from frame 1\r\nSampling token 194 from frame 1\r\nSampling token 195 from frame 1\r\nSampling token 196 from frame 1\r\nSampling token 197 from frame 1\r\nSampling token 198 from frame 1\r\nSampling token 199 from frame 1\r\nSampling token 200 from frame 1\r\nSampling token 201 from frame 1\r\nSampling token 202 from frame 1\r\nSampling token 203 from frame 1\r\nSampling token 204 from frame 1\r\nSampling token 205 from frame 1\r\nSampling token 206 from frame 1\r\nSampling token 207 from frame 1\r\nSampling token 208 from frame 1\r\nSampling token 209 from frame 1\r\nSampling token 210 from frame 1\r\nSampling token 211 from frame 1\r\nSampling token 212 from frame 1\r\nSampling token 213 from frame 1\r\nSampling token 214 from frame 1\r\nSampling token 215 from frame 1\r\nSampling token 216 from frame 1\r\nSampling token 217 from frame 1\r\nSampling token 218 from frame 1\r\nSampling token 219 from frame 1\r\nSampling token 220 from frame 1\r\nSampling token 221 from frame 1\r\nSampling token 222 from frame 1\r\nSampling token 223 from frame 1\r\nSampling token 224 from frame 1\r\nSampling token 225 from frame 1\r\nSampling token 226 from frame 1\r\nSampling token 227 from frame 1\r\nSampling token 228 from frame 1\r\nSampling token 229 from frame 1\r\nSampling token 230 from frame 1\r\nSampling token 231 from frame 1\r\nSampling token 232 from frame 1\r\nSampling token 233 from frame 1\r\nSampling token 234 from frame 1\r\nSampling token 235 from frame 1\r\nSampling token 236 from frame 1\r\nSampling token 237 from frame 1\r\nSampling token 238 from frame 1\r\nSampling token 239 from frame 1\r\nSampling token 240 from frame 1\r\nSampling token 241 from frame 1\r\nSampling token 242 from frame 1\r\nSampling token 243 from frame 1\r\nSampling token 244 from frame 1\r\nSampling token 245 from frame 1\r\nSampling token 246 from frame 1\r\nSampling token 247 from frame 1\r\nSampling token 248 from frame 1\r\nSampling token 249 from frame 1\r\nSampling token 250 from frame 1\r\nSampling token 251 from frame 1\r\nSampling token 252 from frame 1\r\nSampling token 253 from frame 1\r\nSampling token 254 from frame 1\r\nSampling token 255 from frame 1\r\nSampling token 256 from frame 1\r\nSampling token 257 from frame 1\r\n",,terminal_output +4196,10557465,"TERMINAL",0,0,"Sampling token 258 from frame 1\r\nSampling token 259 from frame 1\r\nSampling token 260 from frame 1\r\nSampling token 261 from frame 1\r\nSampling token 262 from frame 1\r\nSampling token 263 from frame 1\r\nSampling token 264 from frame 1\r\nSampling token 265 from frame 1\r\nSampling token 266 from frame 1\r\nSampling token 267 from frame 1\r\nSampling token 268 from frame 1\r\nSampling token 269 from frame 1\r\nSampling token 270 from frame 1\r\nSampling token 271 from frame 1\r\nSampling token 272 from frame 1\r\nSampling token 273 from frame 1\r\nSampling token 274 from frame 1\r\nSampling token 275 from frame 1\r\nSampling token 276 from frame 1\r\nSampling token 277 from frame 1\r\nSampling token 278 from frame 1\r\nSampling token 279 from frame 1\r\nSampling token 280 from frame 1\r\nSampling token 281 from frame 1\r\nSampling token 282 from frame 1\r\nSampling token 283 from frame 1\r\nSampling token 284 from frame 1\r\nSampling token 285 from frame 1\r\nSampling token 286 from frame 1\r\nSampling token 287 from frame 1\r\nSampling token 288 from frame 1\r\nSampling token 289 from frame 1\r\nSampling token 290 from frame 1\r\nSampling token 291 from frame 1\r\nSampling token 292 from frame 1\r\nSampling token 293 from frame 1\r\nSampling token 294 from frame 1\r\nSampling token 295 from frame 1\r\nSampling token 296 from frame 1\r\nSampling token 297 from frame 1\r\nSampling token 298 from frame 1\r\nSampling token 299 from frame 1\r\nSampling token 300 from frame 1\r\nSampling token 301 from frame 1\r\nSampling token 302 from frame 1\r\nSampling token 303 from frame 1\r\nSampling token 304 from frame 1\r\nSampling token 305 from frame 1\r\nSampling token 306 from frame 1\r\nSampling token 307 from frame 1\r\nSampling token 308 from frame 1\r\nSampling token 309 from frame 1\r\nSampling token 310 from frame 1\r\nSampling token 311 from frame 1\r\nSampling token 312 from frame 1\r\nSampling token 313 from frame 1\r\nSampling token 314 from frame 1\r\nSampling token 315 from frame 1\r\nSampling token 316 from frame 1\r\nSampling token 317 from frame 1\r\nSampling token 318 from frame 1\r\nSampling token 319 from frame 1\r\nSampling token 320 from frame 1\r\nSampling token 321 from frame 1\r\nSampling token 322 from frame 1\r\nSampling token 323 from frame 1\r\nSampling token 324 from frame 1\r\nSampling token 325 from frame 1\r\nSampling token 326 from frame 1\r\nSampling token 327 from frame 1\r\nSampling token 328 from frame 1\r\nSampling token 329 from frame 1\r\nSampling token 330 from frame 1\r\nSampling token 331 from frame 1\r\nSampling token 332 from frame 1\r\nSampling token 333 from frame 1\r\nSampling token 334 from frame 1\r\nSampling token 335 from frame 1\r\nSampling token 336 from frame 1\r\nSampling token 337 from frame 1\r\nSampling token 338 from frame 1\r\nSampling token 339 from frame 1\r\nSampling token 340 from frame 1\r\nSampling token 341 from frame 1\r\nSampling token 342 from frame 1\r\nSampling token 343 from frame 1\r\nSampling token 344 from frame 1\r\nSampling token 345 from frame 1\r\nSampling token 346 from frame 1\r\nSampling token 347 from frame 1\r\nSampling token 348 from frame 1\r\nSampling token 349 from frame 1\r\nSampling token 350 from frame 1\r\nSampling token 351 from frame 1\r\nSampling token 352 from frame 1\r\nSampling token 353 from frame 1\r\nSampling token 354 from frame 1\r\nSampling token 355 from frame 1\r\nSampling token 356 from frame 1\r\nSampling token 357 from frame 1\r\nSampling token 358 from frame 1\r\nSampling token 359 from frame 1\r\nSampling token 360 from frame 1\r\nSampling token 361 from frame 1\r\nSampling token 362 from frame 1\r\nSampling token 363 from frame 1\r\nSampling token 364 from frame 1\r\nSampling token 365 from frame 1\r\nSampling token 366 from frame 1\r\nSampling token 367 from frame 1\r\nSampling token 368 from frame 1\r\nSampling token 369 from frame 1\r\nSampling token 370 from frame 1\r\nSampling token 371 from frame 1\r\nSampling token 372 from frame 1\r\nSampling token 373 from frame 1\r\nSampling token 374 from frame 1\r\nSampling token 375 from frame 1\r\nSampling token 376 from frame 1\r\nSampling token 377 from frame 1\r\nSampling token 378 from frame 1\r\nSampling token 379 from frame 1\r\nSampling token 380 from frame 1\r\nSampling token 381 from frame 1\r\nSampling token 382 from frame 1\r\nSampling token 383 from frame 1\r\nSampling token 384 from frame 1\r\nSampling token 385 from frame 1\r\nSampling token 386 from frame 1\r\nSampling token 387 from frame 1\r\nSampling token 388 from frame 1\r\nSampling token 389 from frame 1\r\nSampling token 390 from frame 1\r\nSampling token 391 from frame 1\r\nSampling token 392 from frame 1\r\nSampling token 393 from frame 1\r\nSampling token 394 from frame 1\r\nSampling token 395 from frame 1\r\nSampling token 396 from frame 1\r\nSampling token 397 from frame 1\r\nSampling token 398 from frame 1\r\nSampling token 399 from frame 1\r\nSampling token 400 from frame 1\r\nSampling token 401 from frame 1\r\nSampling token 402 from frame 1\r\nSampling token 403 from frame 1\r\nSampling token 404 from frame 1\r\nSampling token 405 from frame 1\r\nSampling token 406 from frame 1\r\nSampling token 407 from frame 1\r\nSampling token 408 from frame 1\r\nSampling token 409 from frame 1\r\nSampling token 410 from frame 1\r\nSampling token 411 from frame 1\r\nSampling token 412 from frame 1\r\nSampling token 413 from frame 1\r\nSampling token 414 from frame 1\r\nSampling token 415 from frame 1\r\nSampling token 416 from frame 1\r\nSampling token 417 from frame 1\r\nSampling token 418 from frame 1\r\nSampling token 419 from frame 1\r\nSampling token 420 from frame 1\r\nSampling token 421 from frame 1\r\nSampling token 422 from frame 1\r\nSampling token 423 from frame 1\r\nSampling token 424 from frame 1\r\nSampling token 425 from frame 1\r\nSampling token 426 from frame 1\r\nSampling token 427 from frame 1\r\nSampling token 428 from frame 1\r\nSampling token 429 from frame 1\r\nSampling token 430 from frame 1\r\nSampling token 431 from frame 1\r\nSampling token 432 from frame 1\r\nSampling token 433 from frame 1\r\nSampling token 434 from frame 1\r\nSampling token 435 from frame 1\r\nSampling token 436 from frame 1\r\nSampling token 437 from frame 1\r\nSampling token 438 from frame 1\r\nSampling token 439 from frame 1\r\nSampling token 440 from frame 1\r\nSampling token 441 from frame 1\r\nSampling token 442 from frame 1\r\nSampling token 443 from frame 1\r\nSampling token 444 from frame 1\r\nSampling token 445 from frame 1\r\nSampling token 446 from frame 1\r\nSampling token 447 from frame 1\r\nSampling token 448 from frame 1\r\nSampling token 449 from frame 1\r\nSampling token 450 from frame 1\r\nSampling token 451 from frame 1\r\nSampling token 452 from frame 1\r\nSampling token 453 from frame 1\r\nSampling token 454 from frame 1\r\nSampling token 455 from frame 1\r\nSampling token 456 from frame 1\r\nSampling token 457 from frame 1\r\nSampling token 458 from frame 1\r\nSampling token 459 from frame 1\r\nSampling token 460 from frame 1\r\nSampling token 461 from frame 1\r\nSampling token 462 from frame 1\r\nSampling token 463 from frame 1\r\nSampling token 464 from frame 1\r\nSampling token 465 from frame 1\r\nSampling token 466 from frame 1\r\nSampling token 467 from frame 1\r\nSampling token 468 from frame 1\r\nSampling token 469 from frame 1\r\nSampling token 470 from frame 1\r\nSampling token 471 from frame 1\r\nSampling token 472 from frame 1\r\nSampling token 473 from frame 1\r\nSampling token 474 from frame 1\r\nSampling token 475 from frame 1\r\nSampling token 476 from frame 1\r\nSampling token 477 from frame 1\r\nSampling token 478 from frame 1\r\nSampling token 479 from frame 1\r\nSampling token 480 from frame 1\r\nSampling token 481 from frame 1\r\nSampling token 482 from frame 1\r\nSampling token 483 from frame 1\r\nSampling token 484 from frame 1\r\nSampling token 485 from frame 1\r\nSampling token 486 from frame 1\r\nSampling token 487 from frame 1\r\nSampling token 488 from frame 1\r\nSampling token 489 from frame 1\r\nSampling token 490 from frame 1\r\nSampling token 491 from frame 1\r\nSampling token 492 from frame 1\r\nSampling token 493 from frame 1\r\nSampling token 494 from frame 1\r\nSampling token 495 from frame 1\r\nSampling token 496 from frame 1\r\nSampling token 497 from frame 1\r\nSampling token 498 from frame 1\r\nSampling token 499 from frame 1\r\nSampling token 500 from frame 1\r\nSampling token 501 from frame 1\r\nSampling token 502 from frame 1\r\nSampling token 503 from frame 1\r\nSampling token 504 from frame 1\r\nSampling token 505 from frame 1\r\nSampling token 506 from frame 1\r\nSampling token 507 from frame 1\r\nSampling token 508 from frame 1\r\nSampling token 509 from frame 1\r\nSampling token 510 from frame 1\r\nSampling token 511 from frame 1\r\nSampling token 512 from frame 1\r\nSampling token 513 from frame 1\r\n",,terminal_output +4197,10593033,"TERMINAL",0,0,"Sampling token 514 from frame 1\r\nSampling token 515 from frame 1\r\nSampling token 516 from frame 1\r\nSampling token 517 from frame 1\r\nSampling token 518 from frame 1\r\nSampling token 519 from frame 1\r\nSampling token 520 from frame 1\r\nSampling token 521 from frame 1\r\nSampling token 522 from frame 1\r\nSampling token 523 from frame 1\r\nSampling token 524 from frame 1\r\nSampling token 525 from frame 1\r\nSampling token 526 from frame 1\r\nSampling token 527 from frame 1\r\nSampling token 528 from frame 1\r\nSampling token 529 from frame 1\r\nSampling token 530 from frame 1\r\nSampling token 531 from frame 1\r\nSampling token 532 from frame 1\r\nSampling token 533 from frame 1\r\nSampling token 534 from frame 1\r\nSampling token 535 from frame 1\r\nSampling token 536 from frame 1\r\nSampling token 537 from frame 1\r\nSampling token 538 from frame 1\r\nSampling token 539 from frame 1\r\nSampling token 540 from frame 1\r\nSampling token 541 from frame 1\r\nSampling token 542 from frame 1\r\nSampling token 543 from frame 1\r\nSampling token 544 from frame 1\r\nSampling token 545 from frame 1\r\nSampling token 546 from frame 1\r\nSampling token 547 from frame 1\r\nSampling token 548 from frame 1\r\nSampling token 549 from frame 1\r\nSampling token 550 from frame 1\r\nSampling token 551 from frame 1\r\nSampling token 552 from frame 1\r\nSampling token 553 from frame 1\r\nSampling token 554 from frame 1\r\nSampling token 555 from frame 1\r\nSampling token 556 from frame 1\r\nSampling token 557 from frame 1\r\nSampling token 558 from frame 1\r\nSampling token 559 from frame 1\r\nSampling token 560 from frame 1\r\nSampling token 561 from frame 1\r\nSampling token 562 from frame 1\r\nSampling token 563 from frame 1\r\nSampling token 564 from frame 1\r\nSampling token 565 from frame 1\r\nSampling token 566 from frame 1\r\nSampling token 567 from frame 1\r\nSampling token 568 from frame 1\r\nSampling token 569 from frame 1\r\nSampling token 570 from frame 1\r\nSampling token 571 from frame 1\r\nSampling token 572 from frame 1\r\nSampling token 573 from frame 1\r\nSampling token 574 from frame 1\r\nSampling token 575 from frame 1\r\nSampling token 576 from frame 1\r\nSampling token 577 from frame 1\r\nSampling token 578 from frame 1\r\nSampling token 579 from frame 1\r\nSampling token 580 from frame 1\r\nSampling token 581 from frame 1\r\nSampling token 582 from frame 1\r\nSampling token 583 from frame 1\r\nSampling token 584 from frame 1\r\nSampling token 585 from frame 1\r\nSampling token 586 from frame 1\r\nSampling token 587 from frame 1\r\nSampling token 588 from frame 1\r\nSampling token 589 from frame 1\r\nSampling token 590 from frame 1\r\nSampling token 591 from frame 1\r\nSampling token 592 from frame 1\r\nSampling token 593 from frame 1\r\nSampling token 594 from frame 1\r\nSampling token 595 from frame 1\r\nSampling token 596 from frame 1\r\nSampling token 597 from frame 1\r\nSampling token 598 from frame 1\r\nSampling token 599 from frame 1\r\nSampling token 600 from frame 1\r\nSampling token 601 from frame 1\r\nSampling token 602 from frame 1\r\nSampling token 603 from frame 1\r\nSampling token 604 from frame 1\r\nSampling token 605 from frame 1\r\nSampling token 606 from frame 1\r\nSampling token 607 from frame 1\r\nSampling token 608 from frame 1\r\nSampling token 609 from frame 1\r\nSampling token 610 from frame 1\r\nSampling token 611 from frame 1\r\nSampling token 612 from frame 1\r\nSampling token 613 from frame 1\r\nSampling token 614 from frame 1\r\nSampling token 615 from frame 1\r\nSampling token 616 from frame 1\r\nSampling token 617 from frame 1\r\nSampling token 618 from frame 1\r\nSampling token 619 from frame 1\r\nSampling token 620 from frame 1\r\nSampling token 621 from frame 1\r\nSampling token 622 from frame 1\r\nSampling token 623 from frame 1\r\nSampling token 624 from frame 1\r\nSampling token 625 from frame 1\r\nSampling token 626 from frame 1\r\nSampling token 627 from frame 1\r\nSampling token 628 from frame 1\r\nSampling token 629 from frame 1\r\nSampling token 630 from frame 1\r\nSampling token 631 from frame 1\r\nSampling token 632 from frame 1\r\nSampling token 633 from frame 1\r\nSampling token 634 from frame 1\r\nSampling token 635 from frame 1\r\nSampling token 636 from frame 1\r\nSampling token 637 from frame 1\r\nSampling token 638 from frame 1\r\nSampling token 639 from frame 1\r\nSampling token 640 from frame 1\r\nSampling token 641 from frame 1\r\nSampling token 642 from frame 1\r\nSampling token 643 from frame 1\r\nSampling token 644 from frame 1\r\nSampling token 645 from frame 1\r\nSampling token 646 from frame 1\r\nSampling token 647 from frame 1\r\nSampling token 648 from frame 1\r\nSampling token 649 from frame 1\r\nSampling token 650 from frame 1\r\nSampling token 651 from frame 1\r\nSampling token 652 from frame 1\r\nSampling token 653 from frame 1\r\nSampling token 654 from frame 1\r\nSampling token 655 from frame 1\r\nSampling token 656 from frame 1\r\nSampling token 657 from frame 1\r\nSampling token 658 from frame 1\r\nSampling token 659 from frame 1\r\nSampling token 660 from frame 1\r\nSampling token 661 from frame 1\r\nSampling token 662 from frame 1\r\nSampling token 663 from frame 1\r\nSampling token 664 from frame 1\r\nSampling token 665 from frame 1\r\nSampling token 666 from frame 1\r\nSampling token 667 from frame 1\r\nSampling token 668 from frame 1\r\nSampling token 669 from frame 1\r\nSampling token 670 from frame 1\r\nSampling token 671 from frame 1\r\nSampling token 672 from frame 1\r\nSampling token 673 from frame 1\r\nSampling token 674 from frame 1\r\nSampling token 675 from frame 1\r\nSampling token 676 from frame 1\r\nSampling token 677 from frame 1\r\nSampling token 678 from frame 1\r\nSampling token 679 from frame 1\r\nSampling token 680 from frame 1\r\nSampling token 681 from frame 1\r\nSampling token 682 from frame 1\r\nSampling token 683 from frame 1\r\nSampling token 684 from frame 1\r\nSampling token 685 from frame 1\r\nSampling token 686 from frame 1\r\nSampling token 687 from frame 1\r\nSampling token 688 from frame 1\r\nSampling token 689 from frame 1\r\nSampling token 690 from frame 1\r\nSampling token 691 from frame 1\r\nSampling token 692 from frame 1\r\nSampling token 693 from frame 1\r\nSampling token 694 from frame 1\r\nSampling token 695 from frame 1\r\nSampling token 696 from frame 1\r\nSampling token 697 from frame 1\r\nSampling token 698 from frame 1\r\nSampling token 699 from frame 1\r\nSampling token 700 from frame 1\r\nSampling token 701 from frame 1\r\nSampling token 702 from frame 1\r\nSampling token 703 from frame 1\r\nSampling token 704 from frame 1\r\nSampling token 705 from frame 1\r\nSampling token 706 from frame 1\r\nSampling token 707 from frame 1\r\nSampling token 708 from frame 1\r\nSampling token 709 from frame 1\r\nSampling token 710 from frame 1\r\nSampling token 711 from frame 1\r\nSampling token 712 from frame 1\r\nSampling token 713 from frame 1\r\nSampling token 714 from frame 1\r\nSampling token 715 from frame 1\r\nSampling token 716 from frame 1\r\nSampling token 717 from frame 1\r\nSampling token 718 from frame 1\r\nSampling token 719 from frame 1\r\nSampling token 720 from frame 1\r\nSampling token 721 from frame 1\r\nSampling token 722 from frame 1\r\nSampling token 723 from frame 1\r\nSampling token 724 from frame 1\r\nSampling token 725 from frame 1\r\nSampling token 726 from frame 1\r\nSampling token 727 from frame 1\r\nSampling token 728 from frame 1\r\nSampling token 729 from frame 1\r\nSampling token 730 from frame 1\r\nSampling token 731 from frame 1\r\nSampling token 732 from frame 1\r\nSampling token 733 from frame 1\r\nSampling token 734 from frame 1\r\nSampling token 735 from frame 1\r\nSampling token 736 from frame 1\r\nSampling token 737 from frame 1\r\nSampling token 738 from frame 1\r\nSampling token 739 from frame 1\r\nSampling token 740 from frame 1\r\nSampling token 741 from frame 1\r\nSampling token 742 from frame 1\r\nSampling token 743 from frame 1\r\nSampling token 744 from frame 1\r\nSampling token 745 from frame 1\r\nSampling token 746 from frame 1\r\nSampling token 747 from frame 1\r\nSampling token 748 from frame 1\r\nSampling token 749 from frame 1\r\nSampling token 750 from frame 1\r\nSampling token 751 from frame 1\r\nSampling token 752 from frame 1\r\nSampling token 753 from frame 1\r\nSampling token 754 from frame 1\r\nSampling token 755 from frame 1\r\nSampling token 756 from frame 1\r\nSampling token 757 from frame 1\r\nSampling token 758 from frame 1\r\nSampling token 759 from frame 1\r\nSampling token 760 from frame 1\r\nSampling token 761 from frame 1\r\nSampling token 762 from frame 1\r\nSampling token 763 from frame 1\r\nSampling token 764 from frame 1\r\nSampling token 765 from frame 1\r\nSampling token 766 from frame 1\r\nSampling token 767 from frame 1\r\nSampling token 768 from frame 1\r\nSampling token 769 from frame 1\r\n",,terminal_output +4198,10622284,"TERMINAL",0,0,"bash",,terminal_focus +4199,10625827,"TERMINAL",0,0,"idling",,terminal_command +4200,10625894,"TERMINAL",0,0,"]633;E;2025-07-24 16:57:20 idling;63badae8-90b1-4579-970f-d00997b22bed]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Jul 24 16:57:20 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 5 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 2 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +4201,10627029,"TERMINAL",0,0,"1\t ",,terminal_output +4202,10627338,"TERMINAL",0,0,"Sampling token 770 from frame 1\r\nSampling token 771 from frame 1\r\nSampling token 772 from frame 1\r\nSampling token 773 from frame 1\r\nSampling token 774 from frame 1\r\nSampling token 775 from frame 1\r\nSampling token 776 from frame 1\r\nSampling token 777 from frame 1\r\nSampling token 778 from frame 1\r\nSampling token 779 from frame 1\r\nSampling token 780 from frame 1\r\nSampling token 781 from frame 1\r\nSampling token 782 from frame 1\r\nSampling token 783 from frame 1\r\nSampling token 784 from frame 1\r\nSampling token 785 from frame 1\r\nSampling token 786 from frame 1\r\nSampling token 787 from frame 1\r\nSampling token 788 from frame 1\r\nSampling token 789 from frame 1\r\nSampling token 790 from frame 1\r\nSampling token 791 from frame 1\r\nSampling token 792 from frame 1\r\nSampling token 793 from frame 1\r\nSampling token 794 from frame 1\r\nSampling token 795 from frame 1\r\nSampling token 796 from frame 1\r\nSampling token 797 from frame 1\r\nSampling token 798 from frame 1\r\nSampling token 799 from frame 1\r\nSampling token 800 from frame 1\r\nSampling token 801 from frame 1\r\nSampling token 802 from frame 1\r\nSampling token 803 from frame 1\r\nSampling token 804 from frame 1\r\nSampling token 805 from frame 1\r\nSampling token 806 from frame 1\r\nSampling token 807 from frame 1\r\nSampling token 808 from frame 1\r\nSampling token 809 from frame 1\r\nSampling token 810 from frame 1\r\nSampling token 811 from frame 1\r\nSampling token 812 from frame 1\r\nSampling token 813 from frame 1\r\nSampling token 814 from frame 1\r\nSampling token 815 from frame 1\r\nSampling token 816 from frame 1\r\nSampling token 817 from frame 1\r\nSampling token 818 from frame 1\r\nSampling token 819 from frame 1\r\nSampling token 820 from frame 1\r\nSampling token 821 from frame 1\r\nSampling token 822 from frame 1\r\nSampling token 823 from frame 1\r\nSampling token 824 from frame 1\r\nSampling token 825 from frame 1\r\nSampling token 826 from frame 1\r\nSampling token 827 from frame 1\r\nSampling token 828 from frame 1\r\nSampling token 829 from frame 1\r\nSampling token 830 from frame 1\r\nSampling token 831 from frame 1\r\nSampling token 832 from frame 1\r\nSampling token 833 from frame 1\r\nSampling token 834 from frame 1\r\nSampling token 835 from frame 1\r\nSampling token 836 from frame 1\r\nSampling token 837 from frame 1\r\nSampling token 838 from frame 1\r\nSampling token 839 from frame 1\r\nSampling token 840 from frame 1\r\nSampling token 841 from frame 1\r\nSampling token 842 from frame 1\r\nSampling token 843 from frame 1\r\nSampling token 844 from frame 1\r\nSampling token 845 from frame 1\r\nSampling token 846 from frame 1\r\nSampling token 847 from frame 1\r\nSampling token 848 from frame 1\r\nSampling token 849 from frame 1\r\nSampling token 850 from frame 1\r\nSampling token 851 from frame 1\r\nSampling token 852 from frame 1\r\nSampling token 853 from frame 1\r\nSampling token 854 from frame 1\r\nSampling token 855 from frame 1\r\nSampling token 856 from frame 1\r\nSampling token 857 from frame 1\r\nSampling token 858 from frame 1\r\nSampling token 859 from frame 1\r\nSampling token 860 from frame 1\r\nSampling token 861 from frame 1\r\nSampling token 862 from frame 1\r\nSampling token 863 from frame 1\r\nSampling token 864 from frame 1\r\nSampling token 865 from frame 1\r\nSampling token 866 from frame 1\r\nSampling token 867 from frame 1\r\nSampling token 868 from frame 1\r\nSampling token 869 from frame 1\r\nSampling token 870 from frame 1\r\nSampling token 871 from frame 1\r\nSampling token 872 from frame 1\r\nSampling token 873 from frame 1\r\nSampling token 874 from frame 1\r\nSampling token 875 from frame 1\r\nSampling token 876 from frame 1\r\nSampling token 877 from frame 1\r\nSampling token 878 from frame 1\r\nSampling token 879 from frame 1\r\nSampling token 880 from frame 1\r\nSampling token 881 from frame 1\r\nSampling token 882 from frame 1\r\nSampling token 883 from frame 1\r\nSampling token 884 from frame 1\r\nSampling token 885 from frame 1\r\nSampling token 886 from frame 1\r\nSampling token 887 from frame 1\r\nSampling token 888 from frame 1\r\nSampling token 889 from frame 1\r\nSampling token 890 from frame 1\r\nSampling token 891 from frame 1\r\nSampling token 892 from frame 1\r\nSampling token 893 from frame 1\r\nSampling token 894 from frame 1\r\nSampling token 895 from frame 1\r\nSampling token 896 from frame 1\r\nSampling token 897 from frame 1\r\nSampling token 898 from frame 1\r\nSampling token 899 from frame 1\r\nSampling token 900 from frame 1\r\nSampling token 901 from frame 1\r\nSampling token 902 from frame 1\r\nSampling token 903 from frame 1\r\nSampling token 904 from frame 1\r\nSampling token 905 from frame 1\r\nSampling token 906 from frame 1\r\nSampling token 907 from frame 1\r\nSampling token 908 from frame 1\r\nSampling token 909 from frame 1\r\nSampling token 910 from frame 1\r\nSampling token 911 from frame 1\r\nSampling token 912 from frame 1\r\nSampling token 913 from frame 1\r\nSampling token 914 from frame 1\r\nSampling token 915 from frame 1\r\nSampling token 916 from frame 1\r\nSampling token 917 from frame 1\r\nSampling token 918 from frame 1\r\nSampling token 919 from frame 1\r\nSampling token 0 from frame 2\r\nSampling token 1 from frame 2\r\nSampling token 2 from frame 2\r\nSampling token 3 from frame 2\r\nSampling token 4 from frame 2\r\nSampling token 5 from frame 2\r\nSampling token 6 from frame 2\r\nSampling token 7 from frame 2\r\nSampling token 8 from frame 2\r\nSampling token 9 from frame 2\r\nSampling token 10 from frame 2\r\nSampling token 11 from frame 2\r\nSampling token 12 from frame 2\r\nSampling token 13 from frame 2\r\nSampling token 14 from frame 2\r\nSampling token 15 from frame 2\r\nSampling token 16 from frame 2\r\nSampling token 17 from frame 2\r\nSampling token 18 from frame 2\r\nSampling token 19 from frame 2\r\nSampling token 20 from frame 2\r\nSampling token 21 from frame 2\r\nSampling token 22 from frame 2\r\nSampling token 23 from frame 2\r\nSampling token 24 from frame 2\r\nSampling token 25 from frame 2\r\nSampling token 26 from frame 2\r\nSampling token 27 from frame 2\r\nSampling token 28 from frame 2\r\nSampling token 29 from frame 2\r\nSampling token 30 from frame 2\r\nSampling token 31 from frame 2\r\nSampling token 32 from frame 2\r\nSampling token 33 from frame 2\r\nSampling token 34 from frame 2\r\nSampling token 35 from frame 2\r\nSampling token 36 from frame 2\r\nSampling token 37 from frame 2\r\nSampling token 38 from frame 2\r\nSampling token 39 from frame 2\r\nSampling token 40 from frame 2\r\nSampling token 41 from frame 2\r\nSampling token 42 from frame 2\r\nSampling token 43 from frame 2\r\nSampling token 44 from frame 2\r\nSampling token 45 from frame 2\r\nSampling token 46 from frame 2\r\nSampling token 47 from frame 2\r\nSampling token 48 from frame 2\r\nSampling token 49 from frame 2\r\nSampling token 50 from frame 2\r\nSampling token 51 from frame 2\r\nSampling token 52 from frame 2\r\nSampling token 53 from frame 2\r\nSampling token 54 from frame 2\r\nSampling token 55 from frame 2\r\nSampling token 56 from frame 2\r\nSampling token 57 from frame 2\r\nSampling token 58 from frame 2\r\nSampling token 59 from frame 2\r\nSampling token 60 from frame 2\r\nSampling token 61 from frame 2\r\nSampling token 62 from frame 2\r\nSampling token 63 from frame 2\r\nSampling token 64 from frame 2\r\nSampling token 65 from frame 2\r\nSampling token 66 from frame 2\r\nSampling token 67 from frame 2\r\nSampling token 68 from frame 2\r\nSampling token 69 from frame 2\r\nSampling token 70 from frame 2\r\nSampling token 71 from frame 2\r\nSampling token 72 from frame 2\r\nSampling token 73 from frame 2\r\nSampling token 74 from frame 2\r\nSampling token 75 from frame 2\r\nSampling token 76 from frame 2\r\nSampling token 77 from frame 2\r\nSampling token 78 from frame 2\r\nSampling token 79 from frame 2\r\nSampling token 80 from frame 2\r\nSampling token 81 from frame 2\r\nSampling token 82 from frame 2\r\nSampling token 83 from frame 2\r\nSampling token 84 from frame 2\r\nSampling token 85 from frame 2\r\nSampling token 86 from frame 2\r\nSampling token 87 from frame 2\r\nSampling token 88 from frame 2\r\nSampling token 89 from frame 2\r\nSampling token 90 from frame 2\r\nSampling token 91 from frame 2\r\nSampling token 92 from frame 2\r\nSampling token 93 from frame 2\r\nSampling token 94 from frame 2\r\nSampling token 95 from frame 2\r\nSampling token 96 from frame 2\r\nSampling token 97 from frame 2\r\nSampling token 98 from frame 2\r\nSampling token 99 from frame 2\r\nSampling token 100 from frame 2\r\nSampling token 101 from frame 2\r\nSampling token 102 from frame 2\r\nSampling token 103 from frame 2\r\nSampling token 104 from frame 2\r\nSampling token 105 from frame 2\r\nSampling token 106 from frame 2\r\nSampling token 107 from frame 2\r\nSampling token 108 from frame 2\r\n",,terminal_output +4203,10627968,"TERMINAL",0,0,"2\t ",,terminal_output +4204,10629011,"TERMINAL",0,0,"3\t ",,terminal_output +4205,10630099,"TERMINAL",0,0,"4\t ",,terminal_output +4206,10631128,"TERMINAL",0,0,"5\t ",,terminal_output +4207,10632152,"TERMINAL",0,0,"6\t ",,terminal_output +4208,10633282,"TERMINAL",0,0,"7\t ",,terminal_output +4209,10634298,"TERMINAL",0,0,"8\t ",,terminal_output +4210,10635243,"TERMINAL",0,0,"9\t ",,terminal_output +4211,10636298,"TERMINAL",0,0,"30\t ",,terminal_output +4212,10637218,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280]633;D;0",,terminal_output +4213,10642271,"TERMINAL",0,0,"bash",,terminal_focus +4214,10649886,"TERMINAL",0,0,"checkout ne^Carch-sampling",,terminal_command +4215,10649904,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +4216,10651727,"TERMINAL",0,0,"srun",,terminal_focus +4217,10660460,"TERMINAL",0,0,"Sampling token 109 from frame 2\r\nSampling token 110 from frame 2\r\nSampling token 111 from frame 2\r\nSampling token 112 from frame 2\r\nSampling token 113 from frame 2\r\nSampling token 114 from frame 2\r\nSampling token 115 from frame 2\r\nSampling token 116 from frame 2\r\nSampling token 117 from frame 2\r\nSampling token 118 from frame 2\r\nSampling token 119 from frame 2\r\nSampling token 120 from frame 2\r\nSampling token 121 from frame 2\r\nSampling token 122 from frame 2\r\nSampling token 123 from frame 2\r\nSampling token 124 from frame 2\r\nSampling token 125 from frame 2\r\nSampling token 126 from frame 2\r\nSampling token 127 from frame 2\r\nSampling token 128 from frame 2\r\nSampling token 129 from frame 2\r\nSampling token 130 from frame 2\r\nSampling token 131 from frame 2\r\nSampling token 132 from frame 2\r\nSampling token 133 from frame 2\r\nSampling token 134 from frame 2\r\nSampling token 135 from frame 2\r\nSampling token 136 from frame 2\r\nSampling token 137 from frame 2\r\nSampling token 138 from frame 2\r\nSampling token 139 from frame 2\r\nSampling token 140 from frame 2\r\nSampling token 141 from frame 2\r\nSampling token 142 from frame 2\r\nSampling token 143 from frame 2\r\nSampling token 144 from frame 2\r\nSampling token 145 from frame 2\r\nSampling token 146 from frame 2\r\nSampling token 147 from frame 2\r\nSampling token 148 from frame 2\r\nSampling token 149 from frame 2\r\nSampling token 150 from frame 2\r\nSampling token 151 from frame 2\r\nSampling token 152 from frame 2\r\nSampling token 153 from frame 2\r\nSampling token 154 from frame 2\r\nSampling token 155 from frame 2\r\nSampling token 156 from frame 2\r\nSampling token 157 from frame 2\r\nSampling token 158 from frame 2\r\nSampling token 159 from frame 2\r\nSampling token 160 from frame 2\r\nSampling token 161 from frame 2\r\nSampling token 162 from frame 2\r\nSampling token 163 from frame 2\r\nSampling token 164 from frame 2\r\nSampling token 165 from frame 2\r\nSampling token 166 from frame 2\r\nSampling token 167 from frame 2\r\nSampling token 168 from frame 2\r\nSampling token 169 from frame 2\r\nSampling token 170 from frame 2\r\nSampling token 171 from frame 2\r\nSampling token 172 from frame 2\r\nSampling token 173 from frame 2\r\nSampling token 174 from frame 2\r\nSampling token 175 from frame 2\r\nSampling token 176 from frame 2\r\nSampling token 177 from frame 2\r\nSampling token 178 from frame 2\r\nSampling token 179 from frame 2\r\nSampling token 180 from frame 2\r\nSampling token 181 from frame 2\r\nSampling token 182 from frame 2\r\nSampling token 183 from frame 2\r\nSampling token 184 from frame 2\r\nSampling token 185 from frame 2\r\nSampling token 186 from frame 2\r\nSampling token 187 from frame 2\r\nSampling token 188 from frame 2\r\nSampling token 189 from frame 2\r\nSampling token 190 from frame 2\r\nSampling token 191 from frame 2\r\nSampling token 192 from frame 2\r\nSampling token 193 from frame 2\r\nSampling token 194 from frame 2\r\nSampling token 195 from frame 2\r\nSampling token 196 from frame 2\r\nSampling token 197 from frame 2\r\nSampling token 198 from frame 2\r\nSampling token 199 from frame 2\r\nSampling token 200 from frame 2\r\nSampling token 201 from frame 2\r\nSampling token 202 from frame 2\r\nSampling token 203 from frame 2\r\nSampling token 204 from frame 2\r\nSampling token 205 from frame 2\r\nSampling token 206 from frame 2\r\nSampling token 207 from frame 2\r\nSampling token 208 from frame 2\r\nSampling token 209 from frame 2\r\nSampling token 210 from frame 2\r\nSampling token 211 from frame 2\r\nSampling token 212 from frame 2\r\nSampling token 213 from frame 2\r\nSampling token 214 from frame 2\r\nSampling token 215 from frame 2\r\nSampling token 216 from frame 2\r\nSampling token 217 from frame 2\r\nSampling token 218 from frame 2\r\nSampling token 219 from frame 2\r\nSampling token 220 from frame 2\r\nSampling token 221 from frame 2\r\nSampling token 222 from frame 2\r\nSampling token 223 from frame 2\r\nSampling token 224 from frame 2\r\nSampling token 225 from frame 2\r\nSampling token 226 from frame 2\r\nSampling token 227 from frame 2\r\nSampling token 228 from frame 2\r\nSampling token 229 from frame 2\r\nSampling token 230 from frame 2\r\nSampling token 231 from frame 2\r\nSampling token 232 from frame 2\r\nSampling token 233 from frame 2\r\nSampling token 234 from frame 2\r\nSampling token 235 from frame 2\r\nSampling token 236 from frame 2\r\nSampling token 237 from frame 2\r\nSampling token 238 from frame 2\r\nSampling token 239 from frame 2\r\nSampling token 240 from frame 2\r\nSampling token 241 from frame 2\r\nSampling token 242 from frame 2\r\nSampling token 243 from frame 2\r\nSampling token 244 from frame 2\r\nSampling token 245 from frame 2\r\nSampling token 246 from frame 2\r\nSampling token 247 from frame 2\r\nSampling token 248 from frame 2\r\nSampling token 249 from frame 2\r\nSampling token 250 from frame 2\r\nSampling token 251 from frame 2\r\nSampling token 252 from frame 2\r\nSampling token 253 from frame 2\r\nSampling token 254 from frame 2\r\nSampling token 255 from frame 2\r\nSampling token 256 from frame 2\r\nSampling token 257 from frame 2\r\nSampling token 258 from frame 2\r\nSampling token 259 from frame 2\r\nSampling token 260 from frame 2\r\nSampling token 261 from frame 2\r\nSampling token 262 from frame 2\r\nSampling token 263 from frame 2\r\nSampling token 264 from frame 2\r\nSampling token 265 from frame 2\r\nSampling token 266 from frame 2\r\nSampling token 267 from frame 2\r\nSampling token 268 from frame 2\r\nSampling token 269 from frame 2\r\nSampling token 270 from frame 2\r\nSampling token 271 from frame 2\r\nSampling token 272 from frame 2\r\nSampling token 273 from frame 2\r\nSampling token 274 from frame 2\r\nSampling token 275 from frame 2\r\nSampling token 276 from frame 2\r\nSampling token 277 from frame 2\r\nSampling token 278 from frame 2\r\nSampling token 279 from frame 2\r\nSampling token 280 from frame 2\r\nSampling token 281 from frame 2\r\nSampling token 282 from frame 2\r\nSampling token 283 from frame 2\r\nSampling token 284 from frame 2\r\nSampling token 285 from frame 2\r\nSampling token 286 from frame 2\r\nSampling token 287 from frame 2\r\nSampling token 288 from frame 2\r\nSampling token 289 from frame 2\r\nSampling token 290 from frame 2\r\nSampling token 291 from frame 2\r\nSampling token 292 from frame 2\r\nSampling token 293 from frame 2\r\nSampling token 294 from frame 2\r\nSampling token 295 from frame 2\r\nSampling token 296 from frame 2\r\nSampling token 297 from frame 2\r\nSampling token 298 from frame 2\r\nSampling token 299 from frame 2\r\nSampling token 300 from frame 2\r\nSampling token 301 from frame 2\r\nSampling token 302 from frame 2\r\nSampling token 303 from frame 2\r\nSampling token 304 from frame 2\r\nSampling token 305 from frame 2\r\nSampling token 306 from frame 2\r\nSampling token 307 from frame 2\r\nSampling token 308 from frame 2\r\nSampling token 309 from frame 2\r\nSampling token 310 from frame 2\r\nSampling token 311 from frame 2\r\nSampling token 312 from frame 2\r\nSampling token 313 from frame 2\r\nSampling token 314 from frame 2\r\nSampling token 315 from frame 2\r\nSampling token 316 from frame 2\r\nSampling token 317 from frame 2\r\nSampling token 318 from frame 2\r\nSampling token 319 from frame 2\r\nSampling token 320 from frame 2\r\nSampling token 321 from frame 2\r\nSampling token 322 from frame 2\r\nSampling token 323 from frame 2\r\nSampling token 324 from frame 2\r\nSampling token 325 from frame 2\r\nSampling token 326 from frame 2\r\nSampling token 327 from frame 2\r\nSampling token 328 from frame 2\r\nSampling token 329 from frame 2\r\nSampling token 330 from frame 2\r\nSampling token 331 from frame 2\r\nSampling token 332 from frame 2\r\nSampling token 333 from frame 2\r\nSampling token 334 from frame 2\r\nSampling token 335 from frame 2\r\nSampling token 336 from frame 2\r\nSampling token 337 from frame 2\r\nSampling token 338 from frame 2\r\nSampling token 339 from frame 2\r\nSampling token 340 from frame 2\r\nSampling token 341 from frame 2\r\nSampling token 342 from frame 2\r\nSampling token 343 from frame 2\r\nSampling token 344 from frame 2\r\nSampling token 345 from frame 2\r\nSampling token 346 from frame 2\r\nSampling token 347 from frame 2\r\nSampling token 348 from frame 2\r\nSampling token 349 from frame 2\r\nSampling token 350 from frame 2\r\nSampling token 351 from frame 2\r\nSampling token 352 from frame 2\r\nSampling token 353 from frame 2\r\nSampling token 354 from frame 2\r\nSampling token 355 from frame 2\r\nSampling token 356 from frame 2\r\nSampling token 357 from frame 2\r\nSampling token 358 from frame 2\r\nSampling token 359 from frame 2\r\nSampling token 360 from frame 2\r\nSampling token 361 from frame 2\r\nSampling token 362 from frame 2\r\nSampling token 363 from frame 2\r\nSampling token 364 from frame 2\r\n",,terminal_output +4218,10664384,"TERMINAL",0,0,"bash",,terminal_focus +4219,10672358,"TERMINAL",0,0,"bash",,terminal_focus +4220,10673198,"TERMINAL",0,0,"pwd",,terminal_command +4221,10678074,"TERMINAL",0,0,"bash",,terminal_focus +4222,10693898,"TERMINAL",0,0,"Sampling token 365 from frame 2\r\nSampling token 366 from frame 2\r\nSampling token 367 from frame 2\r\nSampling token 368 from frame 2\r\nSampling token 369 from frame 2\r\nSampling token 370 from frame 2\r\nSampling token 371 from frame 2\r\nSampling token 372 from frame 2\r\nSampling token 373 from frame 2\r\nSampling token 374 from frame 2\r\nSampling token 375 from frame 2\r\nSampling token 376 from frame 2\r\nSampling token 377 from frame 2\r\nSampling token 378 from frame 2\r\nSampling token 379 from frame 2\r\nSampling token 380 from frame 2\r\nSampling token 381 from frame 2\r\nSampling token 382 from frame 2\r\nSampling token 383 from frame 2\r\nSampling token 384 from frame 2\r\nSampling token 385 from frame 2\r\nSampling token 386 from frame 2\r\nSampling token 387 from frame 2\r\nSampling token 388 from frame 2\r\nSampling token 389 from frame 2\r\nSampling token 390 from frame 2\r\nSampling token 391 from frame 2\r\nSampling token 392 from frame 2\r\nSampling token 393 from frame 2\r\nSampling token 394 from frame 2\r\nSampling token 395 from frame 2\r\nSampling token 396 from frame 2\r\nSampling token 397 from frame 2\r\nSampling token 398 from frame 2\r\nSampling token 399 from frame 2\r\nSampling token 400 from frame 2\r\nSampling token 401 from frame 2\r\nSampling token 402 from frame 2\r\nSampling token 403 from frame 2\r\nSampling token 404 from frame 2\r\nSampling token 405 from frame 2\r\nSampling token 406 from frame 2\r\nSampling token 407 from frame 2\r\nSampling token 408 from frame 2\r\nSampling token 409 from frame 2\r\nSampling token 410 from frame 2\r\nSampling token 411 from frame 2\r\nSampling token 412 from frame 2\r\nSampling token 413 from frame 2\r\nSampling token 414 from frame 2\r\nSampling token 415 from frame 2\r\nSampling token 416 from frame 2\r\nSampling token 417 from frame 2\r\nSampling token 418 from frame 2\r\nSampling token 419 from frame 2\r\nSampling token 420 from frame 2\r\nSampling token 421 from frame 2\r\nSampling token 422 from frame 2\r\nSampling token 423 from frame 2\r\nSampling token 424 from frame 2\r\nSampling token 425 from frame 2\r\nSampling token 426 from frame 2\r\nSampling token 427 from frame 2\r\nSampling token 428 from frame 2\r\nSampling token 429 from frame 2\r\nSampling token 430 from frame 2\r\nSampling token 431 from frame 2\r\nSampling token 432 from frame 2\r\nSampling token 433 from frame 2\r\nSampling token 434 from frame 2\r\nSampling token 435 from frame 2\r\nSampling token 436 from frame 2\r\nSampling token 437 from frame 2\r\nSampling token 438 from frame 2\r\nSampling token 439 from frame 2\r\nSampling token 440 from frame 2\r\nSampling token 441 from frame 2\r\nSampling token 442 from frame 2\r\nSampling token 443 from frame 2\r\nSampling token 444 from frame 2\r\nSampling token 445 from frame 2\r\nSampling token 446 from frame 2\r\nSampling token 447 from frame 2\r\nSampling token 448 from frame 2\r\nSampling token 449 from frame 2\r\nSampling token 450 from frame 2\r\nSampling token 451 from frame 2\r\nSampling token 452 from frame 2\r\nSampling token 453 from frame 2\r\nSampling token 454 from frame 2\r\nSampling token 455 from frame 2\r\nSampling token 456 from frame 2\r\nSampling token 457 from frame 2\r\nSampling token 458 from frame 2\r\nSampling token 459 from frame 2\r\nSampling token 460 from frame 2\r\nSampling token 461 from frame 2\r\nSampling token 462 from frame 2\r\nSampling token 463 from frame 2\r\nSampling token 464 from frame 2\r\nSampling token 465 from frame 2\r\nSampling token 466 from frame 2\r\nSampling token 467 from frame 2\r\nSampling token 468 from frame 2\r\nSampling token 469 from frame 2\r\nSampling token 470 from frame 2\r\nSampling token 471 from frame 2\r\nSampling token 472 from frame 2\r\nSampling token 473 from frame 2\r\nSampling token 474 from frame 2\r\nSampling token 475 from frame 2\r\nSampling token 476 from frame 2\r\nSampling token 477 from frame 2\r\nSampling token 478 from frame 2\r\nSampling token 479 from frame 2\r\nSampling token 480 from frame 2\r\nSampling token 481 from frame 2\r\nSampling token 482 from frame 2\r\nSampling token 483 from frame 2\r\nSampling token 484 from frame 2\r\nSampling token 485 from frame 2\r\nSampling token 486 from frame 2\r\nSampling token 487 from frame 2\r\nSampling token 488 from frame 2\r\nSampling token 489 from frame 2\r\nSampling token 490 from frame 2\r\nSampling token 491 from frame 2\r\nSampling token 492 from frame 2\r\nSampling token 493 from frame 2\r\nSampling token 494 from frame 2\r\nSampling token 495 from frame 2\r\nSampling token 496 from frame 2\r\nSampling token 497 from frame 2\r\nSampling token 498 from frame 2\r\nSampling token 499 from frame 2\r\nSampling token 500 from frame 2\r\nSampling token 501 from frame 2\r\nSampling token 502 from frame 2\r\nSampling token 503 from frame 2\r\nSampling token 504 from frame 2\r\nSampling token 505 from frame 2\r\nSampling token 506 from frame 2\r\nSampling token 507 from frame 2\r\nSampling token 508 from frame 2\r\nSampling token 509 from frame 2\r\nSampling token 510 from frame 2\r\nSampling token 511 from frame 2\r\nSampling token 512 from frame 2\r\nSampling token 513 from frame 2\r\nSampling token 514 from frame 2\r\nSampling token 515 from frame 2\r\nSampling token 516 from frame 2\r\nSampling token 517 from frame 2\r\nSampling token 518 from frame 2\r\nSampling token 519 from frame 2\r\nSampling token 520 from frame 2\r\nSampling token 521 from frame 2\r\nSampling token 522 from frame 2\r\nSampling token 523 from frame 2\r\nSampling token 524 from frame 2\r\nSampling token 525 from frame 2\r\nSampling token 526 from frame 2\r\nSampling token 527 from frame 2\r\nSampling token 528 from frame 2\r\nSampling token 529 from frame 2\r\nSampling token 530 from frame 2\r\nSampling token 531 from frame 2\r\nSampling token 532 from frame 2\r\nSampling token 533 from frame 2\r\nSampling token 534 from frame 2\r\nSampling token 535 from frame 2\r\nSampling token 536 from frame 2\r\nSampling token 537 from frame 2\r\nSampling token 538 from frame 2\r\nSampling token 539 from frame 2\r\nSampling token 540 from frame 2\r\nSampling token 541 from frame 2\r\nSampling token 542 from frame 2\r\nSampling token 543 from frame 2\r\nSampling token 544 from frame 2\r\nSampling token 545 from frame 2\r\nSampling token 546 from frame 2\r\nSampling token 547 from frame 2\r\nSampling token 548 from frame 2\r\nSampling token 549 from frame 2\r\nSampling token 550 from frame 2\r\nSampling token 551 from frame 2\r\nSampling token 552 from frame 2\r\nSampling token 553 from frame 2\r\nSampling token 554 from frame 2\r\nSampling token 555 from frame 2\r\nSampling token 556 from frame 2\r\nSampling token 557 from frame 2\r\nSampling token 558 from frame 2\r\nSampling token 559 from frame 2\r\nSampling token 560 from frame 2\r\nSampling token 561 from frame 2\r\nSampling token 562 from frame 2\r\nSampling token 563 from frame 2\r\nSampling token 564 from frame 2\r\nSampling token 565 from frame 2\r\nSampling token 566 from frame 2\r\nSampling token 567 from frame 2\r\nSampling token 568 from frame 2\r\nSampling token 569 from frame 2\r\nSampling token 570 from frame 2\r\nSampling token 571 from frame 2\r\nSampling token 572 from frame 2\r\nSampling token 573 from frame 2\r\nSampling token 574 from frame 2\r\nSampling token 575 from frame 2\r\nSampling token 576 from frame 2\r\nSampling token 577 from frame 2\r\nSampling token 578 from frame 2\r\nSampling token 579 from frame 2\r\nSampling token 580 from frame 2\r\nSampling token 581 from frame 2\r\nSampling token 582 from frame 2\r\nSampling token 583 from frame 2\r\nSampling token 584 from frame 2\r\nSampling token 585 from frame 2\r\nSampling token 586 from frame 2\r\nSampling token 587 from frame 2\r\nSampling token 588 from frame 2\r\nSampling token 589 from frame 2\r\nSampling token 590 from frame 2\r\nSampling token 591 from frame 2\r\nSampling token 592 from frame 2\r\nSampling token 593 from frame 2\r\nSampling token 594 from frame 2\r\nSampling token 595 from frame 2\r\nSampling token 596 from frame 2\r\nSampling token 597 from frame 2\r\nSampling token 598 from frame 2\r\nSampling token 599 from frame 2\r\nSampling token 600 from frame 2\r\nSampling token 601 from frame 2\r\nSampling token 602 from frame 2\r\nSampling token 603 from frame 2\r\nSampling token 604 from frame 2\r\nSampling token 605 from frame 2\r\nSampling token 606 from frame 2\r\nSampling token 607 from frame 2\r\nSampling token 608 from frame 2\r\nSampling token 609 from frame 2\r\nSampling token 610 from frame 2\r\nSampling token 611 from frame 2\r\nSampling token 612 from frame 2\r\nSampling token 613 from frame 2\r\nSampling token 614 from frame 2\r\nSampling token 615 from frame 2\r\nSampling token 616 from frame 2\r\nSampling token 617 from frame 2\r\nSampling token 618 from frame 2\r\nSampling token 619 from frame 2\r\nSampling token 620 from frame 2\r\n",,terminal_output +4223,10695940,"TERMINAL",0,0,"# --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280""",,terminal_command +4224,10695961,"TERMINAL",0,0,"\r\n\r\r\n[?2004l\r]633;E;;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +4225,10727519,"TERMINAL",0,0,"Sampling token 621 from frame 2\r\nSampling token 622 from frame 2\r\nSampling token 623 from frame 2\r\nSampling token 624 from frame 2\r\nSampling token 625 from frame 2\r\nSampling token 626 from frame 2\r\nSampling token 627 from frame 2\r\nSampling token 628 from frame 2\r\nSampling token 629 from frame 2\r\nSampling token 630 from frame 2\r\nSampling token 631 from frame 2\r\nSampling token 632 from frame 2\r\nSampling token 633 from frame 2\r\nSampling token 634 from frame 2\r\nSampling token 635 from frame 2\r\nSampling token 636 from frame 2\r\nSampling token 637 from frame 2\r\nSampling token 638 from frame 2\r\nSampling token 639 from frame 2\r\nSampling token 640 from frame 2\r\nSampling token 641 from frame 2\r\nSampling token 642 from frame 2\r\nSampling token 643 from frame 2\r\nSampling token 644 from frame 2\r\nSampling token 645 from frame 2\r\nSampling token 646 from frame 2\r\nSampling token 647 from frame 2\r\nSampling token 648 from frame 2\r\nSampling token 649 from frame 2\r\nSampling token 650 from frame 2\r\nSampling token 651 from frame 2\r\nSampling token 652 from frame 2\r\nSampling token 653 from frame 2\r\nSampling token 654 from frame 2\r\nSampling token 655 from frame 2\r\nSampling token 656 from frame 2\r\nSampling token 657 from frame 2\r\nSampling token 658 from frame 2\r\nSampling token 659 from frame 2\r\nSampling token 660 from frame 2\r\nSampling token 661 from frame 2\r\nSampling token 662 from frame 2\r\nSampling token 663 from frame 2\r\nSampling token 664 from frame 2\r\nSampling token 665 from frame 2\r\nSampling token 666 from frame 2\r\nSampling token 667 from frame 2\r\nSampling token 668 from frame 2\r\nSampling token 669 from frame 2\r\nSampling token 670 from frame 2\r\nSampling token 671 from frame 2\r\nSampling token 672 from frame 2\r\nSampling token 673 from frame 2\r\nSampling token 674 from frame 2\r\nSampling token 675 from frame 2\r\nSampling token 676 from frame 2\r\nSampling token 677 from frame 2\r\nSampling token 678 from frame 2\r\nSampling token 679 from frame 2\r\nSampling token 680 from frame 2\r\nSampling token 681 from frame 2\r\nSampling token 682 from frame 2\r\nSampling token 683 from frame 2\r\nSampling token 684 from frame 2\r\nSampling token 685 from frame 2\r\nSampling token 686 from frame 2\r\nSampling token 687 from frame 2\r\nSampling token 688 from frame 2\r\nSampling token 689 from frame 2\r\nSampling token 690 from frame 2\r\nSampling token 691 from frame 2\r\nSampling token 692 from frame 2\r\nSampling token 693 from frame 2\r\nSampling token 694 from frame 2\r\nSampling token 695 from frame 2\r\nSampling token 696 from frame 2\r\nSampling token 697 from frame 2\r\nSampling token 698 from frame 2\r\nSampling token 699 from frame 2\r\nSampling token 700 from frame 2\r\nSampling token 701 from frame 2\r\nSampling token 702 from frame 2\r\nSampling token 703 from frame 2\r\nSampling token 704 from frame 2\r\nSampling token 705 from frame 2\r\nSampling token 706 from frame 2\r\nSampling token 707 from frame 2\r\nSampling token 708 from frame 2\r\nSampling token 709 from frame 2\r\nSampling token 710 from frame 2\r\nSampling token 711 from frame 2\r\nSampling token 712 from frame 2\r\nSampling token 713 from frame 2\r\nSampling token 714 from frame 2\r\nSampling token 715 from frame 2\r\nSampling token 716 from frame 2\r\nSampling token 717 from frame 2\r\nSampling token 718 from frame 2\r\nSampling token 719 from frame 2\r\nSampling token 720 from frame 2\r\nSampling token 721 from frame 2\r\nSampling token 722 from frame 2\r\nSampling token 723 from frame 2\r\nSampling token 724 from frame 2\r\nSampling token 725 from frame 2\r\nSampling token 726 from frame 2\r\nSampling token 727 from frame 2\r\nSampling token 728 from frame 2\r\nSampling token 729 from frame 2\r\nSampling token 730 from frame 2\r\nSampling token 731 from frame 2\r\nSampling token 732 from frame 2\r\nSampling token 733 from frame 2\r\nSampling token 734 from frame 2\r\nSampling token 735 from frame 2\r\nSampling token 736 from frame 2\r\nSampling token 737 from frame 2\r\nSampling token 738 from frame 2\r\nSampling token 739 from frame 2\r\nSampling token 740 from frame 2\r\nSampling token 741 from frame 2\r\nSampling token 742 from frame 2\r\nSampling token 743 from frame 2\r\nSampling token 744 from frame 2\r\nSampling token 745 from frame 2\r\nSampling token 746 from frame 2\r\nSampling token 747 from frame 2\r\nSampling token 748 from frame 2\r\nSampling token 749 from frame 2\r\nSampling token 750 from frame 2\r\nSampling token 751 from frame 2\r\nSampling token 752 from frame 2\r\nSampling token 753 from frame 2\r\nSampling token 754 from frame 2\r\nSampling token 755 from frame 2\r\nSampling token 756 from frame 2\r\nSampling token 757 from frame 2\r\nSampling token 758 from frame 2\r\nSampling token 759 from frame 2\r\nSampling token 760 from frame 2\r\nSampling token 761 from frame 2\r\nSampling token 762 from frame 2\r\nSampling token 763 from frame 2\r\nSampling token 764 from frame 2\r\nSampling token 765 from frame 2\r\nSampling token 766 from frame 2\r\nSampling token 767 from frame 2\r\nSampling token 768 from frame 2\r\nSampling token 769 from frame 2\r\nSampling token 770 from frame 2\r\nSampling token 771 from frame 2\r\nSampling token 772 from frame 2\r\nSampling token 773 from frame 2\r\nSampling token 774 from frame 2\r\nSampling token 775 from frame 2\r\nSampling token 776 from frame 2\r\nSampling token 777 from frame 2\r\nSampling token 778 from frame 2\r\nSampling token 779 from frame 2\r\nSampling token 780 from frame 2\r\nSampling token 781 from frame 2\r\nSampling token 782 from frame 2\r\nSampling token 783 from frame 2\r\nSampling token 784 from frame 2\r\nSampling token 785 from frame 2\r\nSampling token 786 from frame 2\r\nSampling token 787 from frame 2\r\nSampling token 788 from frame 2\r\nSampling token 789 from frame 2\r\nSampling token 790 from frame 2\r\nSampling token 791 from frame 2\r\nSampling token 792 from frame 2\r\nSampling token 793 from frame 2\r\nSampling token 794 from frame 2\r\nSampling token 795 from frame 2\r\nSampling token 796 from frame 2\r\nSampling token 797 from frame 2\r\nSampling token 798 from frame 2\r\nSampling token 799 from frame 2\r\nSampling token 800 from frame 2\r\nSampling token 801 from frame 2\r\nSampling token 802 from frame 2\r\nSampling token 803 from frame 2\r\nSampling token 804 from frame 2\r\nSampling token 805 from frame 2\r\nSampling token 806 from frame 2\r\nSampling token 807 from frame 2\r\nSampling token 808 from frame 2\r\nSampling token 809 from frame 2\r\nSampling token 810 from frame 2\r\nSampling token 811 from frame 2\r\nSampling token 812 from frame 2\r\nSampling token 813 from frame 2\r\nSampling token 814 from frame 2\r\nSampling token 815 from frame 2\r\nSampling token 816 from frame 2\r\nSampling token 817 from frame 2\r\nSampling token 818 from frame 2\r\nSampling token 819 from frame 2\r\nSampling token 820 from frame 2\r\nSampling token 821 from frame 2\r\nSampling token 822 from frame 2\r\nSampling token 823 from frame 2\r\nSampling token 824 from frame 2\r\nSampling token 825 from frame 2\r\nSampling token 826 from frame 2\r\nSampling token 827 from frame 2\r\nSampling token 828 from frame 2\r\nSampling token 829 from frame 2\r\nSampling token 830 from frame 2\r\nSampling token 831 from frame 2\r\nSampling token 832 from frame 2\r\nSampling token 833 from frame 2\r\nSampling token 834 from frame 2\r\nSampling token 835 from frame 2\r\nSampling token 836 from frame 2\r\nSampling token 837 from frame 2\r\nSampling token 838 from frame 2\r\nSampling token 839 from frame 2\r\nSampling token 840 from frame 2\r\nSampling token 841 from frame 2\r\nSampling token 842 from frame 2\r\nSampling token 843 from frame 2\r\nSampling token 844 from frame 2\r\nSampling token 845 from frame 2\r\nSampling token 846 from frame 2\r\nSampling token 847 from frame 2\r\nSampling token 848 from frame 2\r\nSampling token 849 from frame 2\r\nSampling token 850 from frame 2\r\nSampling token 851 from frame 2\r\nSampling token 852 from frame 2\r\nSampling token 853 from frame 2\r\nSampling token 854 from frame 2\r\nSampling token 855 from frame 2\r\nSampling token 856 from frame 2\r\nSampling token 857 from frame 2\r\nSampling token 858 from frame 2\r\nSampling token 859 from frame 2\r\nSampling token 860 from frame 2\r\nSampling token 861 from frame 2\r\nSampling token 862 from frame 2\r\nSampling token 863 from frame 2\r\nSampling token 864 from frame 2\r\nSampling token 865 from frame 2\r\nSampling token 866 from frame 2\r\nSampling token 867 from frame 2\r\nSampling token 868 from frame 2\r\nSampling token 869 from frame 2\r\nSampling token 870 from frame 2\r\nSampling token 871 from frame 2\r\nSampling token 872 from frame 2\r\nSampling token 873 from frame 2\r\nSampling token 874 from frame 2\r\nSampling token 875 from frame 2\r\nSampling token 876 from frame 2\r\n",,terminal_output +4226,10750604,"TERMINAL",0,0,"# sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280""",,terminal_command +4227,10750639,"TERMINAL",0,0,"\r\n\r\r\n[?2004l\r]633;E;;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +4228,10756307,"TERMINAL",0,0,"srun",,terminal_focus +4229,10757955,"TERMINAL",0,0,"bash",,terminal_focus +4230,10760154,"TERMINAL",0,0,"srun",,terminal_focus +4231,10761596,"TERMINAL",0,0,"Sampling token 877 from frame 2\r\nSampling token 878 from frame 2\r\nSampling token 879 from frame 2\r\nSampling token 880 from frame 2\r\nSampling token 881 from frame 2\r\nSampling token 882 from frame 2\r\nSampling token 883 from frame 2\r\nSampling token 884 from frame 2\r\nSampling token 885 from frame 2\r\nSampling token 886 from frame 2\r\nSampling token 887 from frame 2\r\nSampling token 888 from frame 2\r\nSampling token 889 from frame 2\r\nSampling token 890 from frame 2\r\nSampling token 891 from frame 2\r\nSampling token 892 from frame 2\r\nSampling token 893 from frame 2\r\nSampling token 894 from frame 2\r\nSampling token 895 from frame 2\r\nSampling token 896 from frame 2\r\nSampling token 897 from frame 2\r\nSampling token 898 from frame 2\r\nSampling token 899 from frame 2\r\nSampling token 900 from frame 2\r\nSampling token 901 from frame 2\r\nSampling token 902 from frame 2\r\nSampling token 903 from frame 2\r\nSampling token 904 from frame 2\r\nSampling token 905 from frame 2\r\nSampling token 906 from frame 2\r\nSampling token 907 from frame 2\r\nSampling token 908 from frame 2\r\nSampling token 909 from frame 2\r\nSampling token 910 from frame 2\r\nSampling token 911 from frame 2\r\nSampling token 912 from frame 2\r\nSampling token 913 from frame 2\r\nSampling token 914 from frame 2\r\nSampling token 915 from frame 2\r\nSampling token 916 from frame 2\r\nSampling token 917 from frame 2\r\nSampling token 918 from frame 2\r\nSampling token 919 from frame 2\r\nSampling token 0 from frame 3\r\nSampling token 1 from frame 3\r\nSampling token 2 from frame 3\r\nSampling token 3 from frame 3\r\nSampling token 4 from frame 3\r\nSampling token 5 from frame 3\r\nSampling token 6 from frame 3\r\nSampling token 7 from frame 3\r\nSampling token 8 from frame 3\r\nSampling token 9 from frame 3\r\nSampling token 10 from frame 3\r\nSampling token 11 from frame 3\r\nSampling token 12 from frame 3\r\nSampling token 13 from frame 3\r\nSampling token 14 from frame 3\r\nSampling token 15 from frame 3\r\nSampling token 16 from frame 3\r\nSampling token 17 from frame 3\r\nSampling token 18 from frame 3\r\nSampling token 19 from frame 3\r\nSampling token 20 from frame 3\r\nSampling token 21 from frame 3\r\nSampling token 22 from frame 3\r\nSampling token 23 from frame 3\r\nSampling token 24 from frame 3\r\nSampling token 25 from frame 3\r\nSampling token 26 from frame 3\r\nSampling token 27 from frame 3\r\nSampling token 28 from frame 3\r\nSampling token 29 from frame 3\r\nSampling token 30 from frame 3\r\nSampling token 31 from frame 3\r\nSampling token 32 from frame 3\r\nSampling token 33 from frame 3\r\nSampling token 34 from frame 3\r\nSampling token 35 from frame 3\r\nSampling token 36 from frame 3\r\nSampling token 37 from frame 3\r\nSampling token 38 from frame 3\r\nSampling token 39 from frame 3\r\nSampling token 40 from frame 3\r\nSampling token 41 from frame 3\r\nSampling token 42 from frame 3\r\nSampling token 43 from frame 3\r\nSampling token 44 from frame 3\r\nSampling token 45 from frame 3\r\nSampling token 46 from frame 3\r\nSampling token 47 from frame 3\r\nSampling token 48 from frame 3\r\nSampling token 49 from frame 3\r\nSampling token 50 from frame 3\r\nSampling token 51 from frame 3\r\nSampling token 52 from frame 3\r\nSampling token 53 from frame 3\r\nSampling token 54 from frame 3\r\nSampling token 55 from frame 3\r\nSampling token 56 from frame 3\r\nSampling token 57 from frame 3\r\nSampling token 58 from frame 3\r\nSampling token 59 from frame 3\r\nSampling token 60 from frame 3\r\nSampling token 61 from frame 3\r\nSampling token 62 from frame 3\r\nSampling token 63 from frame 3\r\nSampling token 64 from frame 3\r\nSampling token 65 from frame 3\r\nSampling token 66 from frame 3\r\nSampling token 67 from frame 3\r\nSampling token 68 from frame 3\r\nSampling token 69 from frame 3\r\nSampling token 70 from frame 3\r\nSampling token 71 from frame 3\r\nSampling token 72 from frame 3\r\nSampling token 73 from frame 3\r\nSampling token 74 from frame 3\r\nSampling token 75 from frame 3\r\nSampling token 76 from frame 3\r\nSampling token 77 from frame 3\r\nSampling token 78 from frame 3\r\nSampling token 79 from frame 3\r\nSampling token 80 from frame 3\r\nSampling token 81 from frame 3\r\nSampling token 82 from frame 3\r\nSampling token 83 from frame 3\r\nSampling token 84 from frame 3\r\nSampling token 85 from frame 3\r\nSampling token 86 from frame 3\r\nSampling token 87 from frame 3\r\nSampling token 88 from frame 3\r\nSampling token 89 from frame 3\r\nSampling token 90 from frame 3\r\nSampling token 91 from frame 3\r\nSampling token 92 from frame 3\r\nSampling token 93 from frame 3\r\nSampling token 94 from frame 3\r\nSampling token 95 from frame 3\r\nSampling token 96 from frame 3\r\nSampling token 97 from frame 3\r\nSampling token 98 from frame 3\r\nSampling token 99 from frame 3\r\nSampling token 100 from frame 3\r\nSampling token 101 from frame 3\r\nSampling token 102 from frame 3\r\nSampling token 103 from frame 3\r\nSampling token 104 from frame 3\r\nSampling token 105 from frame 3\r\nSampling token 106 from frame 3\r\nSampling token 107 from frame 3\r\nSampling token 108 from frame 3\r\nSampling token 109 from frame 3\r\nSampling token 110 from frame 3\r\nSampling token 111 from frame 3\r\nSampling token 112 from frame 3\r\nSampling token 113 from frame 3\r\nSampling token 114 from frame 3\r\nSampling token 115 from frame 3\r\nSampling token 116 from frame 3\r\nSampling token 117 from frame 3\r\nSampling token 118 from frame 3\r\nSampling token 119 from frame 3\r\nSampling token 120 from frame 3\r\nSampling token 121 from frame 3\r\nSampling token 122 from frame 3\r\nSampling token 123 from frame 3\r\nSampling token 124 from frame 3\r\nSampling token 125 from frame 3\r\nSampling token 126 from frame 3\r\nSampling token 127 from frame 3\r\nSampling token 128 from frame 3\r\nSampling token 129 from frame 3\r\nSampling token 130 from frame 3\r\nSampling token 131 from frame 3\r\nSampling token 132 from frame 3\r\nSampling token 133 from frame 3\r\nSampling token 134 from frame 3\r\nSampling token 135 from frame 3\r\nSampling token 136 from frame 3\r\nSampling token 137 from frame 3\r\nSampling token 138 from frame 3\r\nSampling token 139 from frame 3\r\nSampling token 140 from frame 3\r\nSampling token 141 from frame 3\r\nSampling token 142 from frame 3\r\nSampling token 143 from frame 3\r\nSampling token 144 from frame 3\r\nSampling token 145 from frame 3\r\nSampling token 146 from frame 3\r\nSampling token 147 from frame 3\r\nSampling token 148 from frame 3\r\nSampling token 149 from frame 3\r\nSampling token 150 from frame 3\r\nSampling token 151 from frame 3\r\nSampling token 152 from frame 3\r\nSampling token 153 from frame 3\r\nSampling token 154 from frame 3\r\nSampling token 155 from frame 3\r\nSampling token 156 from frame 3\r\nSampling token 157 from frame 3\r\nSampling token 158 from frame 3\r\nSampling token 159 from frame 3\r\nSampling token 160 from frame 3\r\nSampling token 161 from frame 3\r\nSampling token 162 from frame 3\r\nSampling token 163 from frame 3\r\nSampling token 164 from frame 3\r\nSampling token 165 from frame 3\r\nSampling token 166 from frame 3\r\nSampling token 167 from frame 3\r\nSampling token 168 from frame 3\r\nSampling token 169 from frame 3\r\nSampling token 170 from frame 3\r\nSampling token 171 from frame 3\r\nSampling token 172 from frame 3\r\nSampling token 173 from frame 3\r\nSampling token 174 from frame 3\r\nSampling token 175 from frame 3\r\nSampling token 176 from frame 3\r\nSampling token 177 from frame 3\r\nSampling token 178 from frame 3\r\nSampling token 179 from frame 3\r\nSampling token 180 from frame 3\r\nSampling token 181 from frame 3\r\nSampling token 182 from frame 3\r\nSampling token 183 from frame 3\r\nSampling token 184 from frame 3\r\nSampling token 185 from frame 3\r\nSampling token 186 from frame 3\r\nSampling token 187 from frame 3\r\nSampling token 188 from frame 3\r\nSampling token 189 from frame 3\r\nSampling token 190 from frame 3\r\nSampling token 191 from frame 3\r\nSampling token 192 from frame 3\r\nSampling token 193 from frame 3\r\nSampling token 194 from frame 3\r\nSampling token 195 from frame 3\r\nSampling token 196 from frame 3\r\nSampling token 197 from frame 3\r\nSampling token 198 from frame 3\r\nSampling token 199 from frame 3\r\nSampling token 200 from frame 3\r\nSampling token 201 from frame 3\r\nSampling token 202 from frame 3\r\nSampling token 203 from frame 3\r\nSampling token 204 from frame 3\r\nSampling token 205 from frame 3\r\nSampling token 206 from frame 3\r\nSampling token 207 from frame 3\r\nSampling token 208 from frame 3\r\nSampling token 209 from frame 3\r\nSampling token 210 from frame 3\r\nSampling token 211 from frame 3\r\nSampling token 212 from frame 3\r\nSampling token 213 from frame 3\r\nSampling token 214 from frame 3\r\nSampling token 215 from frame 3\r\n",,terminal_output +4232,10761768,"TERMINAL",0,0,"bash",,terminal_focus +4233,10762877,"TERMINAL",0,0,"srun",,terminal_focus +4234,10764403,"TERMINAL",0,0,"bash",,terminal_focus +4235,10770509,"TERMINAL",0,0,"runner-2",,terminal_command +4236,10774350,"TERMINAL",0,0,"sync-runner-2",,terminal_command +4237,10774416,"TERMINAL",0,0,"]633;E;2025-07-24 16:59:49 sync-runner-2;406cfb31-2341-454a-afa8-cae7781806b2]633;Csending incremental file list\r\n",,terminal_output +4238,10778706,"TERMINAL",0,0,"./\r\ngeneration_1753367947.9147565.gif\r\ngeneration_1753368434.8660712.gif\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\n",,terminal_output +4239,10779714,"TERMINAL",0,0,"models/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nslurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch\r\nslurm/dev/mihir/horeka/yolo-runs/sampling.sh\r\nslurm/dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch\r\nutils/\r\nutils/nn.py\r\n",,terminal_output +4240,10779835,"TERMINAL",0,0,"\r\nsent 250,270 bytes received 393 bytes 45,575.09 bytes/sec\r\ntotal size is 185,291,268 speedup is 739.20\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4241,10791213,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +4242,10794154,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,1,"",shellscript,content +4243,10794532,"TERMINAL",0,0,"Sampling token 216 from frame 3\r\nSampling token 217 from frame 3\r\nSampling token 218 from frame 3\r\nSampling token 219 from frame 3\r\nSampling token 220 from frame 3\r\nSampling token 221 from frame 3\r\nSampling token 222 from frame 3\r\nSampling token 223 from frame 3\r\nSampling token 224 from frame 3\r\nSampling token 225 from frame 3\r\nSampling token 226 from frame 3\r\nSampling token 227 from frame 3\r\nSampling token 228 from frame 3\r\nSampling token 229 from frame 3\r\nSampling token 230 from frame 3\r\nSampling token 231 from frame 3\r\nSampling token 232 from frame 3\r\nSampling token 233 from frame 3\r\nSampling token 234 from frame 3\r\nSampling token 235 from frame 3\r\nSampling token 236 from frame 3\r\nSampling token 237 from frame 3\r\nSampling token 238 from frame 3\r\nSampling token 239 from frame 3\r\nSampling token 240 from frame 3\r\nSampling token 241 from frame 3\r\nSampling token 242 from frame 3\r\nSampling token 243 from frame 3\r\nSampling token 244 from frame 3\r\nSampling token 245 from frame 3\r\nSampling token 246 from frame 3\r\nSampling token 247 from frame 3\r\nSampling token 248 from frame 3\r\nSampling token 249 from frame 3\r\nSampling token 250 from frame 3\r\nSampling token 251 from frame 3\r\nSampling token 252 from frame 3\r\nSampling token 253 from frame 3\r\nSampling token 254 from frame 3\r\nSampling token 255 from frame 3\r\nSampling token 256 from frame 3\r\nSampling token 257 from frame 3\r\nSampling token 258 from frame 3\r\nSampling token 259 from frame 3\r\nSampling token 260 from frame 3\r\nSampling token 261 from frame 3\r\nSampling token 262 from frame 3\r\nSampling token 263 from frame 3\r\nSampling token 264 from frame 3\r\nSampling token 265 from frame 3\r\nSampling token 266 from frame 3\r\nSampling token 267 from frame 3\r\nSampling token 268 from frame 3\r\nSampling token 269 from frame 3\r\nSampling token 270 from frame 3\r\nSampling token 271 from frame 3\r\nSampling token 272 from frame 3\r\nSampling token 273 from frame 3\r\nSampling token 274 from frame 3\r\nSampling token 275 from frame 3\r\nSampling token 276 from frame 3\r\nSampling token 277 from frame 3\r\nSampling token 278 from frame 3\r\nSampling token 279 from frame 3\r\nSampling token 280 from frame 3\r\nSampling token 281 from frame 3\r\nSampling token 282 from frame 3\r\nSampling token 283 from frame 3\r\nSampling token 284 from frame 3\r\nSampling token 285 from frame 3\r\nSampling token 286 from frame 3\r\nSampling token 287 from frame 3\r\nSampling token 288 from frame 3\r\nSampling token 289 from frame 3\r\nSampling token 290 from frame 3\r\nSampling token 291 from frame 3\r\nSampling token 292 from frame 3\r\nSampling token 293 from frame 3\r\nSampling token 294 from frame 3\r\nSampling token 295 from frame 3\r\nSampling token 296 from frame 3\r\nSampling token 297 from frame 3\r\nSampling token 298 from frame 3\r\nSampling token 299 from frame 3\r\nSampling token 300 from frame 3\r\nSampling token 301 from frame 3\r\nSampling token 302 from frame 3\r\nSampling token 303 from frame 3\r\nSampling token 304 from frame 3\r\nSampling token 305 from frame 3\r\nSampling token 306 from frame 3\r\nSampling token 307 from frame 3\r\nSampling token 308 from frame 3\r\nSampling token 309 from frame 3\r\nSampling token 310 from frame 3\r\nSampling token 311 from frame 3\r\nSampling token 312 from frame 3\r\nSampling token 313 from frame 3\r\nSampling token 314 from frame 3\r\nSampling token 315 from frame 3\r\nSampling token 316 from frame 3\r\nSampling token 317 from frame 3\r\nSampling token 318 from frame 3\r\nSampling token 319 from frame 3\r\nSampling token 320 from frame 3\r\nSampling token 321 from frame 3\r\nSampling token 322 from frame 3\r\nSampling token 323 from frame 3\r\nSampling token 324 from frame 3\r\nSampling token 325 from frame 3\r\nSampling token 326 from frame 3\r\nSampling token 327 from frame 3\r\nSampling token 328 from frame 3\r\nSampling token 329 from frame 3\r\nSampling token 330 from frame 3\r\nSampling token 331 from frame 3\r\nSampling token 332 from frame 3\r\nSampling token 333 from frame 3\r\nSampling token 334 from frame 3\r\nSampling token 335 from frame 3\r\nSampling token 336 from frame 3\r\nSampling token 337 from frame 3\r\nSampling token 338 from frame 3\r\nSampling token 339 from frame 3\r\nSampling token 340 from frame 3\r\nSampling token 341 from frame 3\r\nSampling token 342 from frame 3\r\nSampling token 343 from frame 3\r\nSampling token 344 from frame 3\r\nSampling token 345 from frame 3\r\nSampling token 346 from frame 3\r\nSampling token 347 from frame 3\r\nSampling token 348 from frame 3\r\nSampling token 349 from frame 3\r\nSampling token 350 from frame 3\r\nSampling token 351 from frame 3\r\nSampling token 352 from frame 3\r\nSampling token 353 from frame 3\r\nSampling token 354 from frame 3\r\nSampling token 355 from frame 3\r\nSampling token 356 from frame 3\r\nSampling token 357 from frame 3\r\nSampling token 358 from frame 3\r\nSampling token 359 from frame 3\r\nSampling token 360 from frame 3\r\nSampling token 361 from frame 3\r\nSampling token 362 from frame 3\r\nSampling token 363 from frame 3\r\nSampling token 364 from frame 3\r\nSampling token 365 from frame 3\r\nSampling token 366 from frame 3\r\nSampling token 367 from frame 3\r\nSampling token 368 from frame 3\r\nSampling token 369 from frame 3\r\nSampling token 370 from frame 3\r\nSampling token 371 from frame 3\r\nSampling token 372 from frame 3\r\nSampling token 373 from frame 3\r\nSampling token 374 from frame 3\r\nSampling token 375 from frame 3\r\nSampling token 376 from frame 3\r\nSampling token 377 from frame 3\r\nSampling token 378 from frame 3\r\nSampling token 379 from frame 3\r\nSampling token 380 from frame 3\r\nSampling token 381 from frame 3\r\nSampling token 382 from frame 3\r\nSampling token 383 from frame 3\r\nSampling token 384 from frame 3\r\nSampling token 385 from frame 3\r\nSampling token 386 from frame 3\r\nSampling token 387 from frame 3\r\nSampling token 388 from frame 3\r\nSampling token 389 from frame 3\r\nSampling token 390 from frame 3\r\nSampling token 391 from frame 3\r\nSampling token 392 from frame 3\r\nSampling token 393 from frame 3\r\nSampling token 394 from frame 3\r\nSampling token 395 from frame 3\r\nSampling token 396 from frame 3\r\nSampling token 397 from frame 3\r\nSampling token 398 from frame 3\r\nSampling token 399 from frame 3\r\nSampling token 400 from frame 3\r\nSampling token 401 from frame 3\r\nSampling token 402 from frame 3\r\nSampling token 403 from frame 3\r\nSampling token 404 from frame 3\r\nSampling token 405 from frame 3\r\nSampling token 406 from frame 3\r\nSampling token 407 from frame 3\r\nSampling token 408 from frame 3\r\nSampling token 409 from frame 3\r\nSampling token 410 from frame 3\r\nSampling token 411 from frame 3\r\nSampling token 412 from frame 3\r\nSampling token 413 from frame 3\r\nSampling token 414 from frame 3\r\nSampling token 415 from frame 3\r\nSampling token 416 from frame 3\r\nSampling token 417 from frame 3\r\nSampling token 418 from frame 3\r\nSampling token 419 from frame 3\r\nSampling token 420 from frame 3\r\nSampling token 421 from frame 3\r\nSampling token 422 from frame 3\r\nSampling token 423 from frame 3\r\nSampling token 424 from frame 3\r\nSampling token 425 from frame 3\r\nSampling token 426 from frame 3\r\nSampling token 427 from frame 3\r\nSampling token 428 from frame 3\r\nSampling token 429 from frame 3\r\nSampling token 430 from frame 3\r\nSampling token 431 from frame 3\r\nSampling token 432 from frame 3\r\nSampling token 433 from frame 3\r\nSampling token 434 from frame 3\r\nSampling token 435 from frame 3\r\nSampling token 436 from frame 3\r\nSampling token 437 from frame 3\r\nSampling token 438 from frame 3\r\nSampling token 439 from frame 3\r\nSampling token 440 from frame 3\r\nSampling token 441 from frame 3\r\nSampling token 442 from frame 3\r\nSampling token 443 from frame 3\r\nSampling token 444 from frame 3\r\nSampling token 445 from frame 3\r\nSampling token 446 from frame 3\r\nSampling token 447 from frame 3\r\nSampling token 448 from frame 3\r\nSampling token 449 from frame 3\r\nSampling token 450 from frame 3\r\nSampling token 451 from frame 3\r\nSampling token 452 from frame 3\r\nSampling token 453 from frame 3\r\nSampling token 454 from frame 3\r\nSampling token 455 from frame 3\r\nSampling token 456 from frame 3\r\nSampling token 457 from frame 3\r\nSampling token 458 from frame 3\r\nSampling token 459 from frame 3\r\nSampling token 460 from frame 3\r\nSampling token 461 from frame 3\r\nSampling token 462 from frame 3\r\nSampling token 463 from frame 3\r\nSampling token 464 from frame 3\r\nSampling token 465 from frame 3\r\nSampling token 466 from frame 3\r\nSampling token 467 from frame 3\r\nSampling token 468 from frame 3\r\nSampling token 469 from frame 3\r\nSampling token 470 from frame 3\r\nSampling token 471 from frame 3\r\n",,terminal_output +4244,10794571,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",742,0,"1",shellscript,content +4245,10794572,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_keyboard +4246,10794661,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"6",shellscript,content +4247,10794661,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",744,0,"",shellscript,selection_keyboard +4248,10795194,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",743,0,"",shellscript,selection_command +4249,10799219,"TERMINAL",0,0,"bash",,terminal_focus +4250,10801869,"TERMINAL",0,0,"bash",,terminal_focus +4251,10804719,"TERMINAL",0,0,"bash",,terminal_focus +4252,10807435,"TERMINAL",0,0,"sync-runner-2",,terminal_command +4253,10807508,"TERMINAL",0,0,"]633;E;2025-07-24 17:00:22 sync-runner-2;63badae8-90b1-4579-970f-d00997b22bed]633;Csending incremental file list\r\n",,terminal_output +4254,10807580,"TERMINAL",0,0,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh\r\n\r\nsent 25,794 bytes received 167 bytes 51,922.00 bytes/sec\r\ntotal size is 185,291,269 speedup is 7,137.29\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280]633;D;0",,terminal_output +4255,10809377,"TERMINAL",0,0,"bash",,terminal_focus +4256,10820347,"train_dynamics.py",0,0,"",python,tab +4257,10823552,"train_dynamics.py",12128,0,"",python,selection_mouse +4258,10824143,"train_dynamics.py",12171,0,"",python,selection_mouse +4259,10825101,"train_dynamics.py",12153,0,"",python,selection_command +4260,10826292,"train_dynamics.py",12153,0,"#",python,content +4261,10826293,"train_dynamics.py",12154,0,"",python,selection_keyboard +4262,10826359,"train_dynamics.py",12154,0," ",python,content +4263,10826360,"train_dynamics.py",12155,0,"",python,selection_keyboard +4264,10826820,"train_dynamics.py",12154,0,"",python,selection_command +4265,10826940,"train_dynamics.py",12111,0,"",python,selection_command +4266,10827080,"train_dynamics.py",12038,0,"",python,selection_command +4267,10827313,"train_dynamics.py",12037,0,"",python,selection_command +4268,10827666,"train_dynamics.py",12037,0," ",python,content +4269,10827668,"train_dynamics.py",12038,0,"",python,selection_keyboard +4270,10827989,"TERMINAL",0,0,"Sampling token 472 from frame 3\r\nSampling token 473 from frame 3\r\nSampling token 474 from frame 3\r\nSampling token 475 from frame 3\r\nSampling token 476 from frame 3\r\nSampling token 477 from frame 3\r\nSampling token 478 from frame 3\r\nSampling token 479 from frame 3\r\nSampling token 480 from frame 3\r\nSampling token 481 from frame 3\r\nSampling token 482 from frame 3\r\nSampling token 483 from frame 3\r\nSampling token 484 from frame 3\r\nSampling token 485 from frame 3\r\nSampling token 486 from frame 3\r\nSampling token 487 from frame 3\r\nSampling token 488 from frame 3\r\nSampling token 489 from frame 3\r\nSampling token 490 from frame 3\r\nSampling token 491 from frame 3\r\nSampling token 492 from frame 3\r\nSampling token 493 from frame 3\r\nSampling token 494 from frame 3\r\nSampling token 495 from frame 3\r\nSampling token 496 from frame 3\r\nSampling token 497 from frame 3\r\nSampling token 498 from frame 3\r\nSampling token 499 from frame 3\r\nSampling token 500 from frame 3\r\nSampling token 501 from frame 3\r\nSampling token 502 from frame 3\r\nSampling token 503 from frame 3\r\nSampling token 504 from frame 3\r\nSampling token 505 from frame 3\r\nSampling token 506 from frame 3\r\nSampling token 507 from frame 3\r\nSampling token 508 from frame 3\r\nSampling token 509 from frame 3\r\nSampling token 510 from frame 3\r\nSampling token 511 from frame 3\r\nSampling token 512 from frame 3\r\nSampling token 513 from frame 3\r\nSampling token 514 from frame 3\r\nSampling token 515 from frame 3\r\nSampling token 516 from frame 3\r\nSampling token 517 from frame 3\r\nSampling token 518 from frame 3\r\nSampling token 519 from frame 3\r\nSampling token 520 from frame 3\r\nSampling token 521 from frame 3\r\nSampling token 522 from frame 3\r\nSampling token 523 from frame 3\r\nSampling token 524 from frame 3\r\nSampling token 525 from frame 3\r\nSampling token 526 from frame 3\r\nSampling token 527 from frame 3\r\nSampling token 528 from frame 3\r\nSampling token 529 from frame 3\r\nSampling token 530 from frame 3\r\nSampling token 531 from frame 3\r\nSampling token 532 from frame 3\r\nSampling token 533 from frame 3\r\nSampling token 534 from frame 3\r\nSampling token 535 from frame 3\r\nSampling token 536 from frame 3\r\nSampling token 537 from frame 3\r\nSampling token 538 from frame 3\r\nSampling token 539 from frame 3\r\nSampling token 540 from frame 3\r\nSampling token 541 from frame 3\r\nSampling token 542 from frame 3\r\nSampling token 543 from frame 3\r\nSampling token 544 from frame 3\r\nSampling token 545 from frame 3\r\nSampling token 546 from frame 3\r\nSampling token 547 from frame 3\r\nSampling token 548 from frame 3\r\nSampling token 549 from frame 3\r\nSampling token 550 from frame 3\r\nSampling token 551 from frame 3\r\nSampling token 552 from frame 3\r\nSampling token 553 from frame 3\r\nSampling token 554 from frame 3\r\nSampling token 555 from frame 3\r\nSampling token 556 from frame 3\r\nSampling token 557 from frame 3\r\nSampling token 558 from frame 3\r\nSampling token 559 from frame 3\r\nSampling token 560 from frame 3\r\nSampling token 561 from frame 3\r\nSampling token 562 from frame 3\r\nSampling token 563 from frame 3\r\nSampling token 564 from frame 3\r\nSampling token 565 from frame 3\r\nSampling token 566 from frame 3\r\nSampling token 567 from frame 3\r\nSampling token 568 from frame 3\r\nSampling token 569 from frame 3\r\nSampling token 570 from frame 3\r\nSampling token 571 from frame 3\r\nSampling token 572 from frame 3\r\nSampling token 573 from frame 3\r\nSampling token 574 from frame 3\r\nSampling token 575 from frame 3\r\nSampling token 576 from frame 3\r\nSampling token 577 from frame 3\r\nSampling token 578 from frame 3\r\nSampling token 579 from frame 3\r\nSampling token 580 from frame 3\r\nSampling token 581 from frame 3\r\nSampling token 582 from frame 3\r\nSampling token 583 from frame 3\r\nSampling token 584 from frame 3\r\nSampling token 585 from frame 3\r\nSampling token 586 from frame 3\r\nSampling token 587 from frame 3\r\nSampling token 588 from frame 3\r\nSampling token 589 from frame 3\r\nSampling token 590 from frame 3\r\nSampling token 591 from frame 3\r\nSampling token 592 from frame 3\r\nSampling token 593 from frame 3\r\nSampling token 594 from frame 3\r\nSampling token 595 from frame 3\r\nSampling token 596 from frame 3\r\nSampling token 597 from frame 3\r\nSampling token 598 from frame 3\r\nSampling token 599 from frame 3\r\nSampling token 600 from frame 3\r\nSampling token 601 from frame 3\r\nSampling token 602 from frame 3\r\nSampling token 603 from frame 3\r\nSampling token 604 from frame 3\r\nSampling token 605 from frame 3\r\nSampling token 606 from frame 3\r\nSampling token 607 from frame 3\r\nSampling token 608 from frame 3\r\nSampling token 609 from frame 3\r\nSampling token 610 from frame 3\r\nSampling token 611 from frame 3\r\nSampling token 612 from frame 3\r\nSampling token 613 from frame 3\r\nSampling token 614 from frame 3\r\nSampling token 615 from frame 3\r\nSampling token 616 from frame 3\r\nSampling token 617 from frame 3\r\nSampling token 618 from frame 3\r\nSampling token 619 from frame 3\r\nSampling token 620 from frame 3\r\nSampling token 621 from frame 3\r\nSampling token 622 from frame 3\r\nSampling token 623 from frame 3\r\nSampling token 624 from frame 3\r\nSampling token 625 from frame 3\r\nSampling token 626 from frame 3\r\nSampling token 627 from frame 3\r\nSampling token 628 from frame 3\r\nSampling token 629 from frame 3\r\nSampling token 630 from frame 3\r\nSampling token 631 from frame 3\r\nSampling token 632 from frame 3\r\nSampling token 633 from frame 3\r\nSampling token 634 from frame 3\r\nSampling token 635 from frame 3\r\nSampling token 636 from frame 3\r\nSampling token 637 from frame 3\r\nSampling token 638 from frame 3\r\nSampling token 639 from frame 3\r\nSampling token 640 from frame 3\r\nSampling token 641 from frame 3\r\nSampling token 642 from frame 3\r\nSampling token 643 from frame 3\r\nSampling token 644 from frame 3\r\nSampling token 645 from frame 3\r\nSampling token 646 from frame 3\r\nSampling token 647 from frame 3\r\nSampling token 648 from frame 3\r\nSampling token 649 from frame 3\r\nSampling token 650 from frame 3\r\nSampling token 651 from frame 3\r\nSampling token 652 from frame 3\r\nSampling token 653 from frame 3\r\nSampling token 654 from frame 3\r\nSampling token 655 from frame 3\r\nSampling token 656 from frame 3\r\nSampling token 657 from frame 3\r\nSampling token 658 from frame 3\r\nSampling token 659 from frame 3\r\nSampling token 660 from frame 3\r\nSampling token 661 from frame 3\r\nSampling token 662 from frame 3\r\nSampling token 663 from frame 3\r\nSampling token 664 from frame 3\r\nSampling token 665 from frame 3\r\nSampling token 666 from frame 3\r\nSampling token 667 from frame 3\r\nSampling token 668 from frame 3\r\nSampling token 669 from frame 3\r\nSampling token 670 from frame 3\r\nSampling token 671 from frame 3\r\nSampling token 672 from frame 3\r\nSampling token 673 from frame 3\r\nSampling token 674 from frame 3\r\nSampling token 675 from frame 3\r\nSampling token 676 from frame 3\r\nSampling token 677 from frame 3\r\nSampling token 678 from frame 3\r\nSampling token 679 from frame 3\r\nSampling token 680 from frame 3\r\nSampling token 681 from frame 3\r\nSampling token 682 from frame 3\r\nSampling token 683 from frame 3\r\nSampling token 684 from frame 3\r\nSampling token 685 from frame 3\r\nSampling token 686 from frame 3\r\nSampling token 687 from frame 3\r\nSampling token 688 from frame 3\r\nSampling token 689 from frame 3\r\nSampling token 690 from frame 3\r\nSampling token 691 from frame 3\r\nSampling token 692 from frame 3\r\nSampling token 693 from frame 3\r\nSampling token 694 from frame 3\r\nSampling token 695 from frame 3\r\nSampling token 696 from frame 3\r\nSampling token 697 from frame 3\r\nSampling token 698 from frame 3\r\nSampling token 699 from frame 3\r\nSampling token 700 from frame 3\r\nSampling token 701 from frame 3\r\nSampling token 702 from frame 3\r\nSampling token 703 from frame 3\r\nSampling token 704 from frame 3\r\nSampling token 705 from frame 3\r\nSampling token 706 from frame 3\r\nSampling token 707 from frame 3\r\nSampling token 708 from frame 3\r\nSampling token 709 from frame 3\r\nSampling token 710 from frame 3\r\nSampling token 711 from frame 3\r\nSampling token 712 from frame 3\r\nSampling token 713 from frame 3\r\nSampling token 714 from frame 3\r\nSampling token 715 from frame 3\r\nSampling token 716 from frame 3\r\nSampling token 717 from frame 3\r\nSampling token 718 from frame 3\r\nSampling token 719 from frame 3\r\nSampling token 720 from frame 3\r\nSampling token 721 from frame 3\r\nSampling token 722 from frame 3\r\nSampling token 723 from frame 3\r\nSampling token 724 from frame 3\r\nSampling token 725 from frame 3\r\nSampling token 726 from frame 3\r\nSampling token 727 from frame 3\r\n",,terminal_output +4271,10828093,"train_dynamics.py",12037,1,"",python,content +4272,10828450,"train_dynamics.py",12037,0,"ä",python,content +4273,10828452,"train_dynamics.py",12038,0,"",python,selection_keyboard +4274,10828605,"train_dynamics.py",12038,0," ",python,content +4275,10828606,"train_dynamics.py",12039,0,"",python,selection_keyboard +4276,10829129,"train_dynamics.py",12038,1,"",python,content +4277,10829244,"train_dynamics.py",12037,1,"",python,content +4278,10829793,"train_dynamics.py",12037,0,"#",python,content +4279,10829794,"train_dynamics.py",12038,0,"",python,selection_keyboard +4280,10829902,"train_dynamics.py",12038,0," ",python,content +4281,10829903,"train_dynamics.py",12039,0,"",python,selection_keyboard +4282,10830108,"train_dynamics.py",12038,0,"",python,selection_command +4283,10830274,"train_dynamics.py",12005,0,"",python,selection_command +4284,10830452,"train_dynamics.py",11936,0,"",python,selection_command +4285,10830595,"train_dynamics.py",11877,0,"",python,selection_command +4286,10830745,"train_dynamics.py",11834,0,"",python,selection_command +4287,10830904,"train_dynamics.py",11798,0,"",python,selection_command +4288,10831898,"train_dynamics.py",11797,0,"",python,selection_command +4289,10832755,"train_dynamics.py",11797,1,"",python,content +4290,10832882,"train_dynamics.py",11797,1,"",python,content +4291,10833045,"train_dynamics.py",11796,0,"",python,selection_command +4292,10834316,"train_dynamics.py",11830,0,"",python,selection_command +4293,10834424,"train_dynamics.py",11873,0,"",python,selection_command +4294,10834933,"train_dynamics.py",11932,0,"",python,selection_command +4295,10834942,"train_dynamics.py",12001,0,"",python,selection_command +4296,10835003,"train_dynamics.py",12034,0,"",python,selection_command +4297,10835004,"train_dynamics.py",12109,0,"",python,selection_command +4298,10835147,"train_dynamics.py",12152,0,"",python,selection_command +4299,10835312,"train_dynamics.py",12235,0,"",python,selection_command +4300,10835557,"train_dynamics.py",12236,0,"",python,selection_command +4301,10836179,"train_dynamics.py",12236,0,"#",python,content +4302,10836180,"train_dynamics.py",12237,0,"",python,selection_keyboard +4303,10836220,"train_dynamics.py",12237,0," ",python,content +4304,10836221,"train_dynamics.py",12238,0,"",python,selection_keyboard +4305,10836547,"train_dynamics.py",12237,0,"",python,selection_command +4306,10858467,"TERMINAL",0,0,"Sampling token 728 from frame 3\r\nSampling token 729 from frame 3\r\nSampling token 730 from frame 3\r\nSampling token 731 from frame 3\r\nSampling token 732 from frame 3\r\nSampling token 733 from frame 3\r\nSampling token 734 from frame 3\r\nSampling token 735 from frame 3\r\nSampling token 736 from frame 3\r\nSampling token 737 from frame 3\r\nSampling token 738 from frame 3\r\nSampling token 739 from frame 3\r\nSampling token 740 from frame 3\r\nSampling token 741 from frame 3\r\nSampling token 742 from frame 3\r\nSampling token 743 from frame 3\r\nSampling token 744 from frame 3\r\nSampling token 745 from frame 3\r\nSampling token 746 from frame 3\r\nSampling token 747 from frame 3\r\nSampling token 748 from frame 3\r\nSampling token 749 from frame 3\r\nSampling token 750 from frame 3\r\nSampling token 751 from frame 3\r\nSampling token 752 from frame 3\r\nSampling token 753 from frame 3\r\nSampling token 754 from frame 3\r\nSampling token 755 from frame 3\r\nSampling token 756 from frame 3\r\nSampling token 757 from frame 3\r\nSampling token 758 from frame 3\r\nSampling token 759 from frame 3\r\nSampling token 760 from frame 3\r\nSampling token 761 from frame 3\r\nSampling token 762 from frame 3\r\nSampling token 763 from frame 3\r\nSampling token 764 from frame 3\r\nSampling token 765 from frame 3\r\nSampling token 766 from frame 3\r\nSampling token 767 from frame 3\r\nSampling token 768 from frame 3\r\nSampling token 769 from frame 3\r\nSampling token 770 from frame 3\r\nSampling token 771 from frame 3\r\nSampling token 772 from frame 3\r\nSampling token 773 from frame 3\r\nSampling token 774 from frame 3\r\nSampling token 775 from frame 3\r\nSampling token 776 from frame 3\r\nSampling token 777 from frame 3\r\nSampling token 778 from frame 3\r\nSampling token 779 from frame 3\r\nSampling token 780 from frame 3\r\nSampling token 781 from frame 3\r\nSampling token 782 from frame 3\r\nSampling token 783 from frame 3\r\nSampling token 784 from frame 3\r\nSampling token 785 from frame 3\r\nSampling token 786 from frame 3\r\nSampling token 787 from frame 3\r\nSampling token 788 from frame 3\r\nSampling token 789 from frame 3\r\nSampling token 790 from frame 3\r\nSampling token 791 from frame 3\r\nSampling token 792 from frame 3\r\nSampling token 793 from frame 3\r\nSampling token 794 from frame 3\r\nSampling token 795 from frame 3\r\nSampling token 796 from frame 3\r\nSampling token 797 from frame 3\r\nSampling token 798 from frame 3\r\nSampling token 799 from frame 3\r\nSampling token 800 from frame 3\r\nSampling token 801 from frame 3\r\nSampling token 802 from frame 3\r\nSampling token 803 from frame 3\r\nSampling token 804 from frame 3\r\nSampling token 805 from frame 3\r\nSampling token 806 from frame 3\r\nSampling token 807 from frame 3\r\nSampling token 808 from frame 3\r\nSampling token 809 from frame 3\r\nSampling token 810 from frame 3\r\nSampling token 811 from frame 3\r\nSampling token 812 from frame 3\r\nSampling token 813 from frame 3\r\nSampling token 814 from frame 3\r\nSampling token 815 from frame 3\r\nSampling token 816 from frame 3\r\nSampling token 817 from frame 3\r\nSampling token 818 from frame 3\r\nSampling token 819 from frame 3\r\nSampling token 820 from frame 3\r\nSampling token 821 from frame 3\r\nSampling token 822 from frame 3\r\nSampling token 823 from frame 3\r\nSampling token 824 from frame 3\r\nSampling token 825 from frame 3\r\nSampling token 826 from frame 3\r\nSampling token 827 from frame 3\r\nSampling token 828 from frame 3\r\nSampling token 829 from frame 3\r\nSampling token 830 from frame 3\r\nSampling token 831 from frame 3\r\nSampling token 832 from frame 3\r\nSampling token 833 from frame 3\r\nSampling token 834 from frame 3\r\nSampling token 835 from frame 3\r\nSampling token 836 from frame 3\r\nSampling token 837 from frame 3\r\nSampling token 838 from frame 3\r\nSampling token 839 from frame 3\r\nSampling token 840 from frame 3\r\nSampling token 841 from frame 3\r\nSampling token 842 from frame 3\r\nSampling token 843 from frame 3\r\nSampling token 844 from frame 3\r\nSampling token 845 from frame 3\r\nSampling token 846 from frame 3\r\nSampling token 847 from frame 3\r\nSampling token 848 from frame 3\r\nSampling token 849 from frame 3\r\nSampling token 850 from frame 3\r\nSampling token 851 from frame 3\r\nSampling token 852 from frame 3\r\nSampling token 853 from frame 3\r\nSampling token 854 from frame 3\r\nSampling token 855 from frame 3\r\nSampling token 856 from frame 3\r\nSampling token 857 from frame 3\r\nSampling token 858 from frame 3\r\nSampling token 859 from frame 3\r\nSampling token 860 from frame 3\r\nSampling token 861 from frame 3\r\nSampling token 862 from frame 3\r\nSampling token 863 from frame 3\r\nSampling token 864 from frame 3\r\nSampling token 865 from frame 3\r\nSampling token 866 from frame 3\r\nSampling token 867 from frame 3\r\nSampling token 868 from frame 3\r\nSampling token 869 from frame 3\r\nSampling token 870 from frame 3\r\nSampling token 871 from frame 3\r\nSampling token 872 from frame 3\r\nSampling token 873 from frame 3\r\nSampling token 874 from frame 3\r\nSampling token 875 from frame 3\r\nSampling token 876 from frame 3\r\nSampling token 877 from frame 3\r\nSampling token 878 from frame 3\r\nSampling token 879 from frame 3\r\nSampling token 880 from frame 3\r\nSampling token 881 from frame 3\r\nSampling token 882 from frame 3\r\nSampling token 883 from frame 3\r\nSampling token 884 from frame 3\r\nSampling token 885 from frame 3\r\nSampling token 886 from frame 3\r\nSampling token 887 from frame 3\r\nSampling token 888 from frame 3\r\nSampling token 889 from frame 3\r\nSampling token 890 from frame 3\r\nSampling token 891 from frame 3\r\nSampling token 892 from frame 3\r\nSampling token 893 from frame 3\r\nSampling token 894 from frame 3\r\nSampling token 895 from frame 3\r\nSampling token 896 from frame 3\r\nSampling token 897 from frame 3\r\nSampling token 898 from frame 3\r\nSampling token 899 from frame 3\r\nSampling token 900 from frame 3\r\nSampling token 901 from frame 3\r\nSampling token 902 from frame 3\r\nSampling token 903 from frame 3\r\nSampling token 904 from frame 3\r\nSampling token 905 from frame 3\r\nSampling token 906 from frame 3\r\nSampling token 907 from frame 3\r\nSampling token 908 from frame 3\r\nSampling token 909 from frame 3\r\nSampling token 910 from frame 3\r\nSampling token 911 from frame 3\r\nSampling token 912 from frame 3\r\nSampling token 913 from frame 3\r\nSampling token 914 from frame 3\r\nSampling token 915 from frame 3\r\nSampling token 916 from frame 3\r\nSampling token 917 from frame 3\r\nSampling token 918 from frame 3\r\nSampling token 919 from frame 3\r\nautoreg sampling done. calculating ssim and saving video\r\nSSIM: 0.7038034200668335\r\n",,terminal_output +4307,10860814,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +4308,10884032,"TERMINAL",0,0,"srun",,terminal_focus +4309,10957080,"TERMINAL",0,0,"bash",,terminal_focus +4310,11088585,"TERMINAL",0,0,"bash",,terminal_focus +4311,11089885,"TERMINAL",0,0,"bash",,terminal_focus +4312,11092267,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +4313,11093658,"TERMINAL",0,0,"bash",,terminal_focus +4314,11095809,"TERMINAL",0,0,"sync-runner-2",,terminal_command +4315,11095874,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:10 sync-runner-2;63badae8-90b1-4579-970f-d00997b22bed]633;Csending incremental file list\r\n",,terminal_output +4316,11098072,"TERMINAL",0,0,"bash",,terminal_focus +4317,11101045,"TERMINAL",0,0,"./\r\ngeneration_1753369273.0620549.gif\r\ntrain_dynamics.py\r\n",,terminal_output +4318,11101673,"TERMINAL",0,0,"\r\nsent 147,767 bytes received 187 bytes 22,762.15 bytes/sec\r\ntotal size is 185,399,477 speedup is 1,253.09\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280]633;D;0",,terminal_output +4319,11108100,"TERMINAL",0,0,"bash",,terminal_focus +4320,11109152,"TERMINAL",0,0,"cd ..",,terminal_command +4321,11109167,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:23 cd ..;63badae8-90b1-4579-970f-d00997b22bed]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive]633;D;0",,terminal_output +4322,11109415,"TERMINAL",0,0,"ls",,terminal_command +4323,11110493,"TERMINAL",0,0,"cd ..",,terminal_command +4324,11110711,"TERMINAL",0,0,"ls",,terminal_command +4325,11110748,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:25 ls;63badae8-90b1-4579-970f-d00997b22bed]633;Cinteractive\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise]633;D;0",,terminal_output +4326,11112172,"TERMINAL",0,0,"cd ..",,terminal_command +4327,11112489,"TERMINAL",0,0,"ls",,terminal_command +4328,11115149,"TERMINAL",0,0,"cd overfit-seed69-1",,terminal_command +4329,11115160,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:29 cd overfit-seed69-1;63badae8-90b1-4579-970f-d00997b22bed]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1]633;D;0",,terminal_output +4330,11121615,"TERMINAL",0,0,"cd interactive/3373280/",,terminal_command +4331,11121631,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:36 cd interactive/3373280/;63badae8-90b1-4579-970f-d00997b22bed]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +4332,11122928,"TERMINAL",0,0,"pwd",,terminal_command +4333,11125053,"TERMINAL",0,0,"ls",,terminal_command +4334,11125068,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:39 ls;63badae8-90b1-4579-970f-d00997b22bed]633;C003000 004000 005000\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +4335,11126822,"TERMINAL",0,0,"pwd",,terminal_command +4336,11127408,"TERMINAL",0,0,"bash",,terminal_focus +4337,11128105,"TERMINAL",0,0,"sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280""",,terminal_command +4338,11128126,"TERMINAL",0,0,"]633;E;2025-07-24 17:05:42 sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280"";406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373400\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4339,11132625,"TERMINAL",0,0,"bash",,terminal_focus +4340,11135069,"TERMINAL",0,0,"bash",,terminal_focus +4341,11147068,"TERMINAL",0,0,"sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280""",,terminal_command +4342,11147093,"TERMINAL",0,0,"]633;E;2025-07-24 17:06:01 sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280"";406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373404\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4343,11251108,"TERMINAL",0,0,"srun",,terminal_focus +4344,11252829,"TERMINAL",0,0,"qu",,terminal_output +4345,11253021,"TERMINAL",0,0,"eu",,terminal_output +4346,11253143,"TERMINAL",0,0,"e",,terminal_output +4347,11253247,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Thu Jul 24 17:07:47 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3371237 accelerat train_dy tum_cte0 R 16:33:32\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:33:32\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:54:35\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:39:58\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373400 accelerat wrap tum_cte0 R\t0:11\t 1 hkn07233373404 accelerat wrap tum_cte0 R\t0:11\t 1 hkn07233373280 dev_accel interact tum_cte0 R54:02\t 1 hkn0901",,terminal_output +4348,11254369,"TERMINAL",0,0,"83369223",,terminal_output +4349,11255360,"TERMINAL",0,0,"944740:00334",,terminal_output +4350,11256359,"TERMINAL",0,0,"505581445",,terminal_output +4351,11257409,"TERMINAL",0,0,"177403667",,terminal_output +4352,11258335,"TERMINAL",0,0,"38814778",,terminal_output +4353,11259379,"TERMINAL",0,0,"49925889",,terminal_output +4354,11260385,"TERMINAL",0,0,"54040369910",,terminal_output +4355,11261405,"TERMINAL",0,0,"6114720201",,terminal_output +4356,11262530,"TERMINAL",0,0,"72258112",,terminal_output +4357,11263456,"TERMINAL",0,0,"83369223",,terminal_output +4358,11264481,"TERMINAL",0,0,"944710334",,terminal_output +4359,11265617,"TERMINAL",0,0,"8:005581445",,terminal_output +4360,11266557,"TERMINAL",0,0,"16692556",,terminal_output +4361,11266732,"TERMINAL",0,0,"bash",,terminal_focus +4362,11267547,"TERMINAL",0,0,"277503667",,terminal_output +4363,11268672,"TERMINAL",0,0,"38814778",,terminal_output +4364,11269695,"TERMINAL",0,0,"49925889",,terminal_output +4365,11270661,"TERMINAL",0,0,"55050369920",,terminal_output +4366,11270891,"TERMINAL",0,0,"bash",,terminal_focus +4367,11271651,"TERMINAL",0,0,"6114730301",,terminal_output +4368,11272659,"TERMINAL",0,0,"72258112",,terminal_output +4369,11273712,"TERMINAL",0,0,"83369223",,terminal_output +4370,11274720,"TERMINAL",0,0,"944720334",,terminal_output +4371,11274790,"TERMINAL",0,0,"ls *.gif",,terminal_command +4372,11275728,"TERMINAL",0,0,"105581445",,terminal_output +4373,11276750,"TERMINAL",0,0,"16692556",,terminal_output +4374,11277835,"TERMINAL",0,0,"2775:003667",,terminal_output +4375,11278798,"TERMINAL",0,0,"38814778",,terminal_output +4376,11279822,"TERMINAL",0,0,"49925889",,terminal_output +4377,11280850,"TERMINAL",0,0,"54:004:00369930",,terminal_output +4378,11281893,"TERMINAL",0,0,"6114740401",,terminal_output +4379,11282403,"TERMINAL",0,0,"mv *.gif gifs/",,terminal_command +4380,11282417,"TERMINAL",0,0,"]633;E;2025-07-24 17:08:17 mv *.gif gifs/;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4381,11282915,"TERMINAL",0,0,"72258112",,terminal_output +4382,11283332,"TERMINAL",0,0,"ls",,terminal_command +4383,11283381,"TERMINAL",0,0,"]633;E;2025-07-24 17:08:18 ls;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +4384,11283438,"TERMINAL",0,0,"debug frame.png gifs overfit_dir read_tf_record.py scripts_cremers slurm-3359334.out tests utils\r\ndiff.diff frames input_pipeline overfit_dir.zip requirements-franz.txt scripts_horeka slurm-3359338.out train_dynamics.py wandb\r\ndiff.log generate_dataset.py LICENSE __pycache__ requirements.txt slurm slurm-3373400.out train_lam.py weekend-job-requeuer.sh\r\nframe-knoms.png genie.py models README.md sample.py slurm-3359333.out slurm-3373404.out train_tokenizer.py weekend-job-starter.sh\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4385,11283943,"TERMINAL",0,0,"83369223",,terminal_output +4386,11284954,"TERMINAL",0,0,"944730334",,terminal_output +4387,11285980,"TERMINAL",0,0,"205581445",,terminal_output +4388,11286991,"TERMINAL",0,0,"16692556",,terminal_output +4389,11288012,"TERMINAL",0,0,"277103667",,terminal_output +4390,11289049,"TERMINAL",0,0,"38814778",,terminal_output +4391,11290059,"TERMINAL",0,0,"49925889",,terminal_output +4392,11290660,"TERMINAL",0,0,"cd ..",,terminal_command +4393,11290677,"TERMINAL",0,0,"]633;E;2025-07-24 17:08:25 cd ..;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0",,terminal_output +4394,11291092,"TERMINAL",0,0,"51010369940",,terminal_output +4395,11292110,"TERMINAL",0,0,"6114750501",,terminal_output +4396,11292601,"TERMINAL",0,0,"cd jafar",,terminal_command +4397,11292969,"TERMINAL",0,0,"ls",,terminal_command +4398,11293028,"TERMINAL",0,0,"]633;E;2025-07-24 17:08:27 ls;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +4399,11293087,"TERMINAL",0,0,"data frame.png generation_1753368434.8660712.gif LICENSE __pycache__ sample.py train_dynamics.py weekend-job-requeuer.sh\r\ndebug frames generation_1753369273.0620549.gif logs README.md scripts_cremers train_lam.py weekend-job-starter.sh\r\ndiff.diff generate_dataset.py genie.py models read_tf_record.py scripts_horeka train_tokenizer.py\r\ndiff.log generation_1753196800.0453017.gif gifs overfit_dir requirements-franz.txt slurm utils\r\nframe-knoms.png generation_1753367947.9147565.gif input_pipeline overfit_dir.zip requirements.txt tests wandb\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4400,11293139,"TERMINAL",0,0,"72258112",,terminal_output +4401,11294157,"TERMINAL",0,0,"83369223",,terminal_output +4402,11295178,"TERMINAL",0,0,"944740334",,terminal_output +4403,11296202,"TERMINAL",0,0,"305581445",,terminal_output +4404,11297243,"TERMINAL",0,0,"16692556",,terminal_output +4405,11298268,"TERMINAL",0,0,"277203667",,terminal_output +4406,11299276,"TERMINAL",0,0,"38814778",,terminal_output +4407,11300314,"TERMINAL",0,0,"49925889",,terminal_output +4408,11301339,"TERMINAL",0,0,"52121471:001:0051",,terminal_output +4409,11302364,"TERMINAL",0,0,"72258112",,terminal_output +4410,11303388,"TERMINAL",0,0,"83369223",,terminal_output +4411,11304387,"TERMINAL",0,0,"944750334",,terminal_output +4412,11305435,"TERMINAL",0,0,"405581445",,terminal_output +4413,11306459,"TERMINAL",0,0,"16692556",,terminal_output +4414,11307482,"TERMINAL",0,0,"277303667",,terminal_output +4415,11308487,"TERMINAL",0,0,"38814778",,terminal_output +4416,11309529,"TERMINAL",0,0,"49925889",,terminal_output +4417,11310555,"TERMINAL",0,0,"5303036995:00",,terminal_output +4418,11311555,"TERMINAL",0,0,"6114710101",,terminal_output +4419,11312603,"TERMINAL",0,0,"72258112",,terminal_output +4420,11313641,"TERMINAL",0,0,"83369223",,terminal_output +4421,11314656,"TERMINAL",0,0,"94471:00334",,terminal_output +4422,11315678,"TERMINAL",0,0,"505581445",,terminal_output +4423,11316700,"TERMINAL",0,0,"16692556",,terminal_output +4424,11317709,"TERMINAL",0,0,"277403667",,terminal_output +4425,11318752,"TERMINAL",0,0,"38814778",,terminal_output +4426,11319760,"TERMINAL",0,0,"49925889",,terminal_output +4427,11320779,"TERMINAL",0,0,"54040369910",,terminal_output +4428,11321802,"TERMINAL",0,0,"6114720201",,terminal_output +4429,11322827,"TERMINAL",0,0,"72258112",,terminal_output +4430,11323848,"TERMINAL",0,0,"83369223",,terminal_output +4431,11324892,"TERMINAL",0,0,"944710334",,terminal_output +4432,11325915,"TERMINAL",0,0,"9:005581445",,terminal_output +4433,11326940,"TERMINAL",0,0,"16692556",,terminal_output +4434,11327972,"TERMINAL",0,0,"277503667",,terminal_output +4435,11328986,"TERMINAL",0,0,"38814778",,terminal_output +4436,11329991,"TERMINAL",0,0,"49925889",,terminal_output +4437,11331034,"TERMINAL",0,0,"55050369920",,terminal_output +4438,11332037,"TERMINAL",0,0,"6114730301",,terminal_output +4439,11333084,"TERMINAL",0,0,"72258112",,terminal_output +4440,11334085,"TERMINAL",0,0,"83369223",,terminal_output +4441,11335130,"TERMINAL",0,0,"944720334",,terminal_output +4442,11336156,"TERMINAL",0,0,"105581445",,terminal_output +4443,11337156,"TERMINAL",0,0,"16692556",,terminal_output +4444,11338204,"TERMINAL",0,0,"2776:003667",,terminal_output +4445,11339206,"TERMINAL",0,0,"38814778",,terminal_output +4446,11340253,"TERMINAL",0,0,"49925889",,terminal_output +4447,11341256,"TERMINAL",0,0,"55:005:00369930",,terminal_output +4448,11342283,"TERMINAL",0,0,"6114740401",,terminal_output +4449,11343345,"TERMINAL",0,0,"72258112",,terminal_output +4450,11344383,"TERMINAL",0,0,"944730334",,terminal_output +4451,11345366,"TERMINAL",0,0,"205581445",,terminal_output +4452,11346375,"TERMINAL",0,0,"16692556",,terminal_output +4453,11347418,"TERMINAL",0,0,"277103667",,terminal_output +4454,11348419,"TERMINAL",0,0,"38814778",,terminal_output +4455,11349446,"TERMINAL",0,0,"49925889",,terminal_output +4456,11350468,"TERMINAL",0,0,"51010369940",,terminal_output +4457,11351513,"TERMINAL",0,0,"6114750501",,terminal_output +4458,11352580,"TERMINAL",0,0,"72258112",,terminal_output +4459,11353564,"TERMINAL",0,0,"83369223",,terminal_output +4460,11354598,"TERMINAL",0,0,"944740334",,terminal_output +4461,11355590,"TERMINAL",0,0,"305581445",,terminal_output +4462,11356610,"TERMINAL",0,0,"16692556",,terminal_output +4463,11357637,"TERMINAL",0,0,"277203667",,terminal_output +4464,11358655,"TERMINAL",0,0,"38814778",,terminal_output +4465,11359682,"TERMINAL",0,0,"49925889",,terminal_output +4466,11360731,"TERMINAL",0,0,"52020369950",,terminal_output +4467,11362471,"TERMINAL",0,0,"622582:012:012",,terminal_output +4468,11363394,"TERMINAL",0,0,"83369223",,terminal_output +4469,11363836,"train_dynamics.py",0,0,"",python,tab +4470,11364434,"TERMINAL",0,0,"944750334",,terminal_output +4471,11365543,"TERMINAL",0,0,"405581445",,terminal_output +4472,11366081,"train_dynamics.py",11799,0,"",python,selection_mouse +4473,11366209,"train_dynamics.py",11797,3,"for",python,selection_mouse +4474,11366414,"train_dynamics.py",11797,4,"for ",python,selection_mouse +4475,11366432,"train_dynamics.py",11797,10,"for videos",python,selection_mouse +4476,11366466,"TERMINAL",0,0,"16692556",,terminal_output +4477,11366528,"train_dynamics.py",11797,13,"for videos in",python,selection_mouse +4478,11366530,"train_dynamics.py",11797,50,"for videos in dataloader:\n # for i in range",python,selection_mouse +4479,11366561,"train_dynamics.py",11797,51,"for videos in dataloader:\n # for i in range(",python,selection_mouse +4480,11366610,"train_dynamics.py",11797,57,"for videos in dataloader:\n # for i in range(videos",python,selection_mouse +4481,11366763,"train_dynamics.py",11797,58,"for videos in dataloader:\n # for i in range(videos.",python,selection_mouse +4482,11366826,"train_dynamics.py",11797,63,"for videos in dataloader:\n # for i in range(videos.shape",python,selection_mouse +4483,11367233,"train_dynamics.py",11857,0,"",python,selection_mouse +4484,11367597,"TERMINAL",0,0,"277303667",,terminal_output +4485,11367635,"train_dynamics.py",11786,0,"",python,selection_mouse +4486,11367787,"train_dynamics.py",11778,9,"num_steps",python,selection_mouse +4487,11367970,"train_dynamics.py",11778,43,"num_steps:\n for videos in dataloader",python,selection_mouse +4488,11368500,"train_dynamics.py",11812,0,"",python,selection_mouse +4489,11368501,"train_dynamics.py",11811,10,"dataloader",python,selection_mouse +4490,11368566,"TERMINAL",0,0,"38814778",,terminal_output +4491,11368658,"train_dynamics.py",11789,34," for videos in dataloader:\n",python,selection_mouse +4492,11369563,"TERMINAL",0,0,"49925889",,terminal_output +4493,11370629,"TERMINAL",0,0,"5303036996:00",,terminal_output +4494,11371631,"TERMINAL",0,0,"6114710101",,terminal_output +4495,11372610,"TERMINAL",0,0,"72258112",,terminal_output +4496,11373638,"TERMINAL",0,0,"83369223",,terminal_output +4497,11374711,"TERMINAL",0,0,"94472:00334",,terminal_output +4498,11375684,"TERMINAL",0,0,"505581445",,terminal_output +4499,11376708,"TERMINAL",0,0,"16692556",,terminal_output +4500,11377092,"models/lam.py",0,0,"",python,tab +4501,11377731,"TERMINAL",0,0,"277403667",,terminal_output +4502,11378810,"TERMINAL",0,0,"38814778",,terminal_output +4503,11379776,"TERMINAL",0,0,"49925889",,terminal_output +4504,11380795,"TERMINAL",0,0,"54040369910",,terminal_output +4505,11381834,"TERMINAL",0,0,"6114720201",,terminal_output +4506,11382861,"TERMINAL",0,0,"72258112",,terminal_output +4507,11383987,"TERMINAL",0,0,"83369223",,terminal_output +4508,11384997,"TERMINAL",0,0,"944710334",,terminal_output +4509,11385937,"TERMINAL",0,0,"10:005581445",,terminal_output +4510,11387049,"TERMINAL",0,0,"16692556",,terminal_output +4511,11388070,"TERMINAL",0,0,"277503667",,terminal_output +4512,11389122,"TERMINAL",0,0,"38814778",,terminal_output +4513,11390059,"TERMINAL",0,0,"49925889",,terminal_output +4514,11391143,"TERMINAL",0,0,"55050369920",,terminal_output +4515,11392171,"TERMINAL",0,0,"6114730301",,terminal_output +4516,11393100,"TERMINAL",0,0,"72258112",,terminal_output +4517,11394213,"TERMINAL",0,0,"83369223",,terminal_output +4518,11395239,"TERMINAL",0,0,"944720334",,terminal_output +4519,11396262,"TERMINAL",0,0,"105581445",,terminal_output +4520,11397191,"TERMINAL",0,0,"16692556",,terminal_output +4521,11398217,"TERMINAL",0,0,"2777:003667",,terminal_output +4522,11399243,"TERMINAL",0,0,"38814778",,terminal_output +4523,11400359,"TERMINAL",0,0,"49925889",,terminal_output +4524,11401363,"TERMINAL",0,0,"56:006:00369930",,terminal_output +4525,11402408,"TERMINAL",0,0,"6225841412",,terminal_output +4526,11403339,"TERMINAL",0,0,"83369223",,terminal_output +4527,11404362,"TERMINAL",0,0,"944730334",,terminal_output +4528,11405482,"TERMINAL",0,0,"205581445",,terminal_output +4529,11406411,"TERMINAL",0,0,"16692556",,terminal_output +4530,11407536,"TERMINAL",0,0,"277103667",,terminal_output +4531,11408467,"TERMINAL",0,0,"38814778",,terminal_output +4532,11409480,"TERMINAL",0,0,"49925889",,terminal_output +4533,11410603,"TERMINAL",0,0,"51010369940",,terminal_output +4534,11411625,"TERMINAL",0,0,"6114750501",,terminal_output +4535,11412556,"TERMINAL",0,0,"72258112",,terminal_output +4536,11413600,"TERMINAL",0,0,"83369223",,terminal_output +4537,11414612,"TERMINAL",0,0,"944740334",,terminal_output +4538,11415636,"TERMINAL",0,0,"305581445",,terminal_output +4539,11416854,"TERMINAL",0,0,"16692556",,terminal_output +4540,11417828,"TERMINAL",0,0,"277203667",,terminal_output +4541,11418618,"TERMINAL",0,0,"srun",,terminal_focus +4542,11418774,"TERMINAL",0,0,"38814778",,terminal_output +4543,11419741,"TERMINAL",0,0,"49925889",,terminal_output +4544,11420832,"TERMINAL",0,0,"52020369950",,terminal_output +4545,11422274,"TERMINAL",0,0,"611473:003:001",,terminal_output +4546,11422827,"TERMINAL",0,0,"72258112",,terminal_output +4547,11423714,"TERMINAL",0,0,"bash",,terminal_focus +4548,11423890,"TERMINAL",0,0,"83369223",,terminal_output +4549,11424893,"TERMINAL",0,0,"944750334",,terminal_output +4550,11425925,"TERMINAL",0,0,"405581445",,terminal_output +4551,11426965,"TERMINAL",0,0,"16692556",,terminal_output +4552,11428417,"TERMINAL",0,0,"277303667",,terminal_output +4553,11429225,"TERMINAL",0,0,"38814778",,terminal_output +4554,11430474,"TERMINAL",0,0,"49925889",,terminal_output +4555,11431388,"TERMINAL",0,0,"5303036997:00",,terminal_output +4556,11432163,"TERMINAL",0,0,"6114710101",,terminal_output +4557,11433133,"TERMINAL",0,0,"72258112",,terminal_output +4558,11434153,"TERMINAL",0,0,"83369223",,terminal_output +4559,11435106,"TERMINAL",0,0,"94473:00334",,terminal_output +4560,11435148,"TERMINAL",0,0,"smi",,terminal_command +4561,11435182,"TERMINAL",0,0,"]633;E;2025-07-24 17:10:49 smi;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: nvidia-smihkn1991.localdomain: Thu Jul 24 17:10:49 2025sh: line 1: nvidia-smi: command not found",,terminal_output +4562,11436134,"TERMINAL",0,0,"505581445",,terminal_output +4563,11436196,"TERMINAL",0,0,"50\t ",,terminal_output +4564,11436219,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4565,11437180,"TERMINAL",0,0,"16692556",,terminal_output +4566,11437502,"TERMINAL",0,0,"srun",,terminal_focus +4567,11438184,"TERMINAL",0,0,"277403667",,terminal_output +4568,11439390,"TERMINAL",0,0,"38814778",,terminal_output +4569,11440230,"TERMINAL",0,0,"49925889",,terminal_output +4570,11440830,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +4571,11441014,"TERMINAL",0,0,"s",,terminal_output +4572,11441141,"TERMINAL",0,0,"m",,terminal_output +4573,11441353,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4574,11441535,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4575,11441605,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: nvidia-smihkn0901.localdomain: Thu Jul 24 17:10:56 2025Thu Jul 24 17:10:56 2025\r+-----------------------------------------------------------------------------------------+\r| NVIDIA-SMI 570.133.20Driver Version: 570.133.20 CUDA Version: 12.8 |\r|-----------------------------------------+------------------------+----------------------+\r| GPU NamePersistence-M | Bus-IdDisp.A | Volatile Uncorr. ECC |\r| Fan Temp PerfPwr:Usage/Cap |Memory-Usage | GPU-Util Compute M. |\r|||MIG M. |\r|=========================================+========================+======================|\r| 0 NVIDIA H100On | 00000000:26:00.0 Off |0 |\r| N/A 41C P068W / 415W |\t 28MiB / 95830MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r+-----------------------------------------------------------------------------------------+\r| Processes:|\r| GPU GI CIPID Type Process nameGPU Memory |\r|ID IDUsage\t |\r|=========================================================================================|\r| 0 N/A N/A3199G /usr/libexec/Xorg17MiB |\r+-----------------------------------------------------------------------------------------+",,terminal_output +4576,11442511,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +4577,11456605,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_2_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --init_lr=0 \\n --max_lr=2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-2-node-$slurm_job_id \\n --tags dynamics causal 2-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4578,11474379,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4579,11474538,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +4580,11474652,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4581,11474924,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4582,11475031,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4583,11475271,"TERMINAL",0,0,"r",,terminal_output +4584,11475324,"TERMINAL",0,0,"u",,terminal_output +4585,11475509,"TERMINAL",0,0,"n",,terminal_output +4586,11475576,"TERMINAL",0,0,"n",,terminal_output +4587,11475659,"TERMINAL",0,0,"e",,terminal_output +4588,11475727,"TERMINAL",0,0,"r",,terminal_output +4589,11475881,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4590,11475949,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +4591,11476582,"TERMINAL",0,0,"\r\n[?2004l\rsending incremental file list\r\n",,terminal_output +4592,11477061,"TERMINAL",0,0,"./\r\ngeneration_1753196800.0453017.gif\r\ngeneration_1753367947.9147565.gif\r\ngeneration_1753368434.8660712.gif\r\ngeneration_1753369273.0620549.gif\r\n",,terminal_output +4593,11477255,"TERMINAL",0,0,"gifs/\r\n\r\nsent 332,231 bytes received 232 bytes 664,926.00 bytes/sec\r\ntotal size is 185,399,477 speedup is 557.65\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +4594,11483519,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4595,11483578,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4596,11483687,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4597,11483758,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +4598,11483875,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4599,11483929,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4600,11484275,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4601,11484378,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +4602,11484741,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4603,11487506,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4604,11487574,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4605,11488043,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4606,11488695,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +4607,11488947,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4608,11489316,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4609,11489384,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4610,11489756,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +4611,11490578,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4612,11491701,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4613,11492242,"TERMINAL",0,0,"[?25lls[?25h",,terminal_output +4614,11492392,"TERMINAL",0,0,"\r\n[?2004l\rdebug frame.png gifs overfit_dir read_tf_record.py scripts_cremers slurm-3359334.out tests utils\r\ndiff.diff frames input_pipeline overfit_dir.zip requirements-franz.txt scripts_horeka slurm-3359338.out train_dynamics.py wandb\r\ndiff.log generate_dataset.py LICENSE __pycache__ requirements.txt slurm slurm-3373400.out train_lam.py weekend-job-requeuer.sh\r\nframe-knoms.png genie.py models README.md sample.py slurm-3359333.out slurm-3373404.out train_tokenizer.py weekend-job-starter.sh\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4615,11501708,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",,terminal_output +4616,11502521,"TERMINAL",0,0,"[?25l\rslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch[?25h",,terminal_output +4617,11504442,"TERMINAL",0,0,"[?25llsslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h",,terminal_output +4618,11504520,"TERMINAL",0,0,"[?25lsbslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h",,terminal_output +4619,11504586,"TERMINAL",0,0,"aslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r",,terminal_output +4620,11504940,"TERMINAL",0,0,"[?25lcslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h",,terminal_output +4621,11505279,"TERMINAL",0,0,"[?25lcslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h",,terminal_output +4622,11505765,"TERMINAL",0,0,"[?25ltlslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h[?25lscslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25hhslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r",,terminal_output +4623,11506053,"TERMINAL",0,0,"[?25l slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r[?25h",,terminal_output +4624,11506150,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3373407\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4625,11507833,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/causal/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_causal_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-8-node-$slurm_job_id \\n --tags dynamics causal 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4626,11511453,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4627,11511507,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4628,11511616,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4629,11511737,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4630,11511805,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4631,11511918,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4632,11512032,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4633,11512594,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch",,terminal_output +4634,11513728,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch\r\n[?2004l\rSubmitted batch job 3373408\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4635,11514545,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +4636,11514611,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4637,11514724,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4638,11514773,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4639,11514885,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4640,11515676,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0901.localdomain: Thu Jul 24 17:12:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:37:55\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:37:55\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 2:58:58\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:44:21\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373400 accelerat wrap tum_cte0 R\t4:34\t 1 hkn07233373404 accelerat wrap tum_cte0 R\t4:34\t 1 hkn07233373280 dev_accel interact tum_cte0 R58:25\t 1 hkn0901",,terminal_output +4641,11516738,"TERMINAL",0,0,"M113373400 accelerat wrap tum_cte0 CG\t4:34\t 1 hkn0723669256",,terminal_output +4642,11517712,"TERMINAL",0,0,"2779:00367",,terminal_output +4643,11518739,"TERMINAL",0,0,"3881478",,terminal_output +4644,11519765,"TERMINAL",0,0,"M43373404 accelerat wrap tum_cte0 CG\t4:38\t 1 hkn072399259",,terminal_output +4645,11520791,"TERMINAL",0,0,"58:008:003630",,terminal_output +4646,11521808,"TERMINAL",0,0,"611471",,terminal_output +4647,11523039,"TERMINAL",0,0,"722582",,terminal_output +4648,11523843,"TERMINAL",0,0,"833693",,terminal_output +4649,11524389,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4650,11525542,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4651,11525612,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4652,11525766,"TERMINAL",0,0,"\r\n[?2004l\rdebug frames gifs overfit_dir.zip requirements.txt slurm-3359333.out tests wandb\r\ndiff.diff generate_dataset.py input_pipeline __pycache__ sample.py slurm-3359334.out train_dynamics.py weekend-job-requeuer.sh\r\ndiff.log generation_1753369927.19308.gif LICENSE README.md scripts_cremers slurm-3359338.out train_lam.py weekend-job-starter.sh\r\nframe-knoms.png generation_1753369931.998801.gif models read_tf_record.py scripts_horeka slurm-3373400.out train_tokenizer.py\r\nframe.png genie.py overfit_dir requirements-franz.txt slurm slurm-3373404.out utils\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4653,11527441,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4654,11527494,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +4655,11527547,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4656,11528071,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +4657,11528129,"TERMINAL",0,0,"ü",,terminal_output +4658,11529415,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +4659,11529557,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4660,11530742,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +4661,11530855,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4662,11531536,"TERMINAL",0,0,"f",,terminal_output +4663,11531800,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4664,11539510,"TERMINAL",0,0,"bash",,terminal_focus +4665,11544186,"TERMINAL",0,0,"mv *.gif gifs",,terminal_command +4666,11544229,"TERMINAL",0,0,"]633;E;2025-07-24 17:12:38 mv *.gif gifs;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4667,11546358,"TERMINAL",0,0,"srun",,terminal_focus +4668,11548988,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4669,11549098,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +4670,11549832,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +4671,11550107,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +4672,11550158,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4673,11550353,"TERMINAL",0,0,"far",,terminal_output +4674,11551462,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +4675,11552534,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar_jobs_2[?2004h(jafar) [tum_cte0515@hkn0901 jafar_jobs_2]$ ",,terminal_output +4676,11567125,"TERMINAL",0,0,"mv *.gif ../jafar/",,terminal_output +4677,11567505,"TERMINAL",0,0,"ls",,terminal_output +4678,11567922,"TERMINAL",0,0,"mv *.gif ../jafar/",,terminal_output +4679,11568077,"TERMINAL",0,0,"",,terminal_output +4680,11572861,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +4681,11578796,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +4682,11579075,"TERMINAL",0,0,"[?25ls': ls[?25h",,terminal_output +4683,11579147,"TERMINAL",0,0,"[?25lsb': sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch[?25h",,terminal_output +4684,11579294,"TERMINAL",0,0,"[?25ls\ra': sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch[?25h",,terminal_output +4685,11579366,"TERMINAL",0,0,"[?25ls\rt': sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch[?25h",,terminal_output +4686,11579672,"TERMINAL",0,0,"[?25ls\rc': sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch[?25h",,terminal_output +4687,11579865,"TERMINAL",0,0,"[?25ls\rh': sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch[?25h",,terminal_output +4688,11580433,"TERMINAL",0,0,"\rsbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch\r",,terminal_output +4689,11581036,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch",,terminal_output +4690,11581846,"TERMINAL",0,0,"\rsbatch slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch\r",,terminal_output +4691,11586085,"TERMINAL",0,0,"sh slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",,terminal_output +4692,11590868,"TERMINAL",0,0,"\rbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch",,terminal_output +4693,11592009,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch\r",,terminal_output +4694,11592415,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch",,terminal_output +4695,11592777,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch\r",,terminal_output +4696,11593134,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch",,terminal_output +4697,11593381,"TERMINAL",0,0,"\rsbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch\r",,terminal_output +4698,11593575,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch",,terminal_output +4699,11594622,"TERMINAL",0,0,"bash",,terminal_focus +4700,11598110,"TERMINAL",0,0,"=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/cau^Cl/overfit-seed69-1/interactive/3373280""",,terminal_command +4701,11598146,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +4702,11612574,"TERMINAL",0,0,"sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280""",,terminal_command +4703,11612604,"TERMINAL",0,0,"]633;E;2025-07-24 17:13:47 sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280"";406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373409\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4704,11613354,"TERMINAL",0,0,"salloc: Job 3373280 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3373280.interactive ON hkn0901 CANCELLED AT 2025-07-24T17:13:48 DUE TO TIME LIMIT ***\r\n",,terminal_output +4705,11627120,"TERMINAL",0,0,"sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280""",,terminal_command +4706,11627168,"TERMINAL",0,0,"]633;E;2025-07-24 17:14:01 sbatch --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --wrap=""sh slurm/dev/mihir/horeka/yolo-runs/sampling.sh /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1-no-noise/interactive/3373280"";406cfb31-2341-454a-afa8-cae7781806b2]633;CSubmitted batch job 3373410\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4707,11642987,"TERMINAL",0,0,"srun: error: hkn0901: task 0: Killed\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;137",,terminal_output +4708,11688107,"TERMINAL",0,0,"queue",,terminal_command +4709,11688179,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:02 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 17:15:02 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373410 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3373409 accelerat wrap tum_cte0 PD\t0:00\t 1 (Priority)3371237 accelerat train_dy tum_cte0 R 16:40:47\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:40:47\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 3:01:50\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:47:13\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +4710,11689233,"TERMINAL",0,0,"38814",,terminal_output +4711,11690272,"TERMINAL",0,0,"49925",,terminal_output +4712,11691385,"TERMINAL",0,0,"5515147",,terminal_output +4713,11692409,"TERMINAL",0,0,"72258",,terminal_output +4714,11693297,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4715,11695307,"TERMINAL",0,0,"bash",,terminal_focus +4716,11709669,"TERMINAL",0,0,"runner-2",,terminal_command +4717,11711553,"TERMINAL",0,0,"ls",,terminal_command +4718,11711608,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:26 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +4719,11711681,"TERMINAL",0,0,"debug frame.png gifs overfit_dir read_tf_record.py scripts_cremers slurm-3359334.out tests utils\r\ndiff.diff frames input_pipeline overfit_dir.zip requirements-franz.txt scripts_horeka slurm-3359338.out train_dynamics.py wandb\r\ndiff.log generate_dataset.py LICENSE __pycache__ requirements.txt slurm slurm-3373400.out train_lam.py weekend-job-requeuer.sh\r\nframe-knoms.png genie.py models README.md sample.py slurm-3359333.out slurm-3373404.out train_tokenizer.py weekend-job-starter.sh\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +4720,11720219,"TERMINAL",0,0,"cd ../jafar",,terminal_command +4721,11720689,"TERMINAL",0,0,"ls",,terminal_command +4722,11720708,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:35 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cdata frame-knoms.png generation_1753369927.19308.gif input_pipeline overfit_dir read_tf_record.py scripts_cremers train_dynamics.py wandb\r\ndebug frame.png generation_1753369931.998801.gif LICENSE overfit_dir.zip requirements-franz.txt scripts_horeka train_lam.py weekend-job-requeuer.sh\r\ndiff.diff frames genie.py logs __pycache__ requirements.txt slurm train_tokenizer.py weekend-job-starter.sh\r\ndiff.log generate_dataset.py gifs models README.md sample.py tests utils\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4723,11727470,"TERMINAL",0,0,"rm *.gif",,terminal_command +4724,11727488,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:42 rm *.gif;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4725,11729473,"TERMINAL",0,0,"ls",,terminal_command +4726,11729489,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:44 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cdata frame-knoms.png genie.py logs __pycache__ requirements.txt slurm train_tokenizer.py weekend-job-starter.sh\r\ndebug frame.png gifs models README.md sample.py tests utils\r\ndiff.diff frames input_pipeline overfit_dir read_tf_record.py scripts_cremers train_dynamics.py wandb\r\ndiff.log generate_dataset.py LICENSE overfit_dir.zip requirements-franz.txt scripts_horeka train_lam.py weekend-job-requeuer.sh\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4727,11732713,"genie.py",0,0,"",python,tab +4728,11734705,"sample.py",0,0,"",python,tab +4729,11735485,"TERMINAL",0,0,"bash",,terminal_focus +4730,11737876,"TERMINAL",0,0,"queue",,terminal_command +4731,11737962,"TERMINAL",0,0,"]633;E;2025-07-24 17:15:52 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 17:15:52 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:41:37\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:41:37\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 3:02:40\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:48:03\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373409 accelerat wrap tum_cte0 R\t0:13\t 1 hkn07233373410 accelerat wrap tum_cte0 R\t0:13\t 1 hkn0723",,terminal_output +4732,11739011,"TERMINAL",0,0,"3881444",,terminal_output +4733,11740055,"TERMINAL",0,0,"4992555",,terminal_output +4734,11741150,"TERMINAL",0,0,"540403666",,terminal_output +4735,11742151,"TERMINAL",0,0,"6114777",,terminal_output +4736,11743192,"TERMINAL",0,0,"7225888",,terminal_output +4737,11744233,"TERMINAL",0,0,"8336999",,terminal_output +4738,11745277,"TERMINAL",0,0,"9447102020",,terminal_output +4739,11746321,"TERMINAL",0,0,"6:00669222",,terminal_output +4740,11747371,"TERMINAL",0,0,"27750333",,terminal_output +4741,11748422,"TERMINAL",0,0,"3881444",,terminal_output +4742,11749476,"TERMINAL",0,0,"4992555",,terminal_output +4743,11750518,"TERMINAL",0,0,"550503666",,terminal_output +4744,11751598,"TERMINAL",0,0,"6114777",,terminal_output +4745,11752617,"TERMINAL",0,0,"7225888",,terminal_output +4746,11753672,"TERMINAL",0,0,"8336999",,terminal_output +4747,11754721,"TERMINAL",0,0,"9447203030",,terminal_output +4748,11755796,"TERMINAL",0,0,"10558111",,terminal_output +4749,11756815,"TERMINAL",0,0,"1669222",,terminal_output +4750,11757859,"TERMINAL",0,0,"2773:00333",,terminal_output +4751,11758908,"TERMINAL",0,0,"3881444",,terminal_output +4752,11759976,"TERMINAL",0,0,"4992555",,terminal_output +4753,11761001,"TERMINAL",0,0,"52:002:003666",,terminal_output +4754,11762143,"TERMINAL",0,0,"6114777",,terminal_output +4755,11763172,"TERMINAL",0,0,"7225888",,terminal_output +4756,11764198,"TERMINAL",0,0,"8336999",,terminal_output +4757,11765217,"TERMINAL",0,0,"9447304040",,terminal_output +4758,11766250,"TERMINAL",0,0,"20558111",,terminal_output +4759,11767289,"TERMINAL",0,0,"1669222",,terminal_output +4760,11768340,"TERMINAL",0,0,"28811444",,terminal_output +4761,11769386,"TERMINAL",0,0,"4992555",,terminal_output +4762,11770439,"TERMINAL",0,0,"510103666",,terminal_output +4763,11771566,"TERMINAL",0,0,"6114777",,terminal_output +4764,11772593,"TERMINAL",0,0,"7225888",,terminal_output +4765,11773619,"TERMINAL",0,0,"8336999",,terminal_output +4766,11774628,"TERMINAL",0,0,"9447405050",,terminal_output +4767,11775763,"TERMINAL",0,0,"30558111",,terminal_output +4768,11776787,"TERMINAL",0,0,"1669222",,terminal_output +4769,11777769,"TERMINAL",0,0,"27720333",,terminal_output +4770,11778851,"TERMINAL",0,0,"3881444",,terminal_output +4771,11779863,"TERMINAL",0,0,"4992555",,terminal_output +4772,11780900,"TERMINAL",0,0,"520203666",,terminal_output +4773,11781950,"TERMINAL",0,0,"6114777",,terminal_output +4774,11782997,"TERMINAL",0,0,"7225888",,terminal_output +4775,11784056,"TERMINAL",0,0,"8336999",,terminal_output +4776,11785183,"TERMINAL",0,0,"9447501:001:00",,terminal_output +4777,11786160,"TERMINAL",0,0,"40558111",,terminal_output +4778,11787232,"TERMINAL",0,0,"1669222",,terminal_output +4779,11788360,"TERMINAL",0,0,"27730333",,terminal_output +4780,11789387,"TERMINAL",0,0,"3881444",,terminal_output +4781,11790408,"TERMINAL",0,0,"430303666",,terminal_output +4782,11791430,"TERMINAL",0,0,"6114777",,terminal_output +4783,11792454,"TERMINAL",0,0,"7225888",,terminal_output +4784,11793488,"TERMINAL",0,0,"8336999",,terminal_output +4785,11794538,"TERMINAL",0,0,"94479:001010",,terminal_output +4786,11795627,"TERMINAL",0,0,"50558111",,terminal_output +4787,11796653,"TERMINAL",0,0,"1669222",,terminal_output +4788,11797673,"TERMINAL",0,0,"27740333",,terminal_output +4789,11798803,"TERMINAL",0,0,"3881444",,terminal_output +4790,11799834,"TERMINAL",0,0,"4992555",,terminal_output +4791,11800826,"TERMINAL",0,0,"540403666",,terminal_output +4792,11801883,"TERMINAL",0,0,"6114777",,terminal_output +4793,11802943,"TERMINAL",0,0,"7225888",,terminal_output +4794,11803996,"TERMINAL",0,0,"8336999",,terminal_output +4795,11805050,"TERMINAL",0,0,"9447102020",,terminal_output +4796,11806090,"TERMINAL",0,0,"7:00558111",,terminal_output +4797,11807149,"TERMINAL",0,0,"1669222",,terminal_output +4798,11808225,"TERMINAL",0,0,"27750333",,terminal_output +4799,11809250,"TERMINAL",0,0,"3881444",,terminal_output +4800,11810373,"TERMINAL",0,0,"4992555",,terminal_output +4801,11811402,"TERMINAL",0,0,"551514777",,terminal_output +4802,11812422,"TERMINAL",0,0,"7225888",,terminal_output +4803,11813546,"TERMINAL",0,0,"8336999",,terminal_output +4804,11814483,"TERMINAL",0,0,"9447203030",,terminal_output +4805,11815524,"TERMINAL",0,0,"10558111",,terminal_output +4806,11816619,"TERMINAL",0,0,"1669222",,terminal_output +4807,11817612,"TERMINAL",0,0,"2774:00333",,terminal_output +4808,11818670,"TERMINAL",0,0,"3881444",,terminal_output +4809,11819704,"TERMINAL",0,0,"4992555",,terminal_output +4810,11820834,"TERMINAL",0,0,"53:003:003666",,terminal_output +4811,11821842,"TERMINAL",0,0,"6114777",,terminal_output +4812,11822868,"TERMINAL",0,0,"7225888",,terminal_output +4813,11823905,"TERMINAL",0,0,"8336999",,terminal_output +4814,11824992,"TERMINAL",0,0,"9447304040",,terminal_output +4815,11826008,"TERMINAL",0,0,"20558111",,terminal_output +4816,11827050,"TERMINAL",0,0,"1669222",,terminal_output +4817,11828103,"TERMINAL",0,0,"27710333",,terminal_output +4818,11829155,"TERMINAL",0,0,"3881444",,terminal_output +4819,11830240,"TERMINAL",0,0,"4992555",,terminal_output +4820,11831262,"TERMINAL",0,0,"510103666",,terminal_output +4821,11832392,"TERMINAL",0,0,"6114777",,terminal_output +4822,11832654,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4823,11838320,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"",shellscript,tab +4824,11849515,"slurm/dev/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +4825,11850160,"TERMINAL",0,0,"bash",,terminal_focus +4826,11852235,"TERMINAL",0,0,"bash",,terminal_focus +4827,12059375,"TERMINAL",0,0,"queue",,terminal_command +4828,12059431,"TERMINAL",0,0,"]633;E;2025-07-24 17:21:14 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 17:21:14 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 16:46:59\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 16:46:59\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 3:08:02\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 3:53:25\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373409 accelerat wrap tum_cte0 R\t5:35\t 1 hkn07233373410 accelerat wrap tum_cte0 R\t5:35\t 1 hkn0723",,terminal_output +4829,12060540,"TERMINAL",0,0,"57:007:003666",,terminal_output +4830,12061670,"TERMINAL",0,0,"6114777",,terminal_output +4831,12062589,"TERMINAL",0,0,"7225888",,terminal_output +4832,12063614,"TERMINAL",0,0,"8336999",,terminal_output +4833,12064743,"TERMINAL",0,0,"9447304040",,terminal_output +4834,12065763,"TERMINAL",0,0,"20558111",,terminal_output +4835,12066283,"TERMINAL",0,0,"bash",,terminal_focus +4836,12066772,"TERMINAL",0,0,"1669222",,terminal_output +4837,12067666,"TERMINAL",0,0,"python",,terminal_command +4838,12067733,"TERMINAL",0,0,"]633;E;2025-07-24 17:21:22 python;63badae8-90b1-4579-970f-d00997b22bed]633;CPython 3.9.18 (main, Jun 27 2025, 00:00:00) \r\n[GCC 11.4.1 20231218 (Red Hat 11.4.1-4)] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n>>> ",,terminal_output +4839,12067829,"TERMINAL",0,0,"27710333",,terminal_output +4840,12068217,"TERMINAL",0,0,"4",,terminal_output +4841,12068418,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +4842,12068633,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +4843,12068834,"TERMINAL",0,0,"3881444",,terminal_output +4844,12068984,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +4845,12069113,"TERMINAL",0,0,"\r\n64\r\n>>> ",,terminal_output +4846,12069870,"TERMINAL",0,0,"4992555",,terminal_output +4847,12070923,"TERMINAL",0,0,"510103666",,terminal_output +4848,12072007,"TERMINAL",0,0,"6114777",,terminal_output +4849,12073027,"TERMINAL",0,0,"7225888",,terminal_output +4850,12073125,"TERMINAL",0,0,"watch",,terminal_focus +4851,12074061,"TERMINAL",0,0,"8336999",,terminal_output +4852,12075112,"TERMINAL",0,0,"9447405050",,terminal_output +4853,12075189,"TERMINAL",0,0,"python",,terminal_focus +4854,12076051,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/causal/overfit-seed69-1/interactive/3373280]633;D;0",,terminal_output +4855,12076168,"TERMINAL",0,0,"30558111",,terminal_output +4856,12077231,"TERMINAL",0,0,"1669222",,terminal_output +4857,12078357,"TERMINAL",0,0,"27720333",,terminal_output +4858,12079380,"TERMINAL",0,0,"3881444",,terminal_output +4859,12080355,"TERMINAL",0,0,"420203666",,terminal_output +4860,12081431,"TERMINAL",0,0,"6114777",,terminal_output +4861,12082556,"TERMINAL",0,0,"7225888",,terminal_output +4862,12083586,"TERMINAL",0,0,"8336999",,terminal_output +4863,12084604,"TERMINAL",0,0,"9447506:006:00",,terminal_output +4864,12085628,"TERMINAL",0,0,"40558111",,terminal_output +4865,12086654,"TERMINAL",0,0,"1669222",,terminal_output +4866,12087982,"TERMINAL",0,0,"27730333",,terminal_output +4867,12089034,"TERMINAL",0,0,"3881444",,terminal_output +4868,12090081,"TERMINAL",0,0,"4992555",,terminal_output +4869,12091127,"TERMINAL",0,0,"530303666",,terminal_output +4870,12092183,"TERMINAL",0,0,"6114777",,terminal_output +4871,12093250,"TERMINAL",0,0,"7225888",,terminal_output +4872,12094368,"TERMINAL",0,0,"8336999",,terminal_output +4873,12095367,"TERMINAL",0,0,"95584:011111",,terminal_output +4874,12096334,"TERMINAL",0,0,"bash",,terminal_focus +4875,12096407,"TERMINAL",0,0,"51669222",,terminal_output +4876,12097428,"TERMINAL",0,0,"27740333",,terminal_output +4877,12098531,"TERMINAL",0,0,"3881444",,terminal_output +4878,12099532,"TERMINAL",0,0,"4992555",,terminal_output +4879,12100578,"TERMINAL",0,0,"540403666",,terminal_output +4880,12101708,"TERMINAL",0,0,"6114777",,terminal_output +4881,12102649,"TERMINAL",0,0,"7225888",,terminal_output +4882,12103803,"models/lam.py",0,0,"",python,tab +4883,12103828,"TERMINAL",0,0,"8336999",,terminal_output +4884,12104777,"TERMINAL",0,0,"9447102020",,terminal_output +4885,12105805,"TERMINAL",0,0,"2:00558111",,terminal_output +4886,12106877,"TERMINAL",0,0,"1669222",,terminal_output +4887,12107880,"TERMINAL",0,0,"27750333",,terminal_output +4888,12108923,"TERMINAL",0,0,"3881444",,terminal_output +4889,12110001,"TERMINAL",0,0,"4992555",,terminal_output +4890,12111014,"TERMINAL",0,0,"550503666",,terminal_output +4891,12112066,"TERMINAL",0,0,"6114777",,terminal_output +4892,12113107,"TERMINAL",0,0,"7225888",,terminal_output +4893,12114156,"TERMINAL",0,0,"8336999",,terminal_output +4894,12115219,"TERMINAL",0,0,"9447203030",,terminal_output +4895,12116258,"TERMINAL",0,0,"10558111",,terminal_output +4896,12117321,"TERMINAL",0,0,"1669222",,terminal_output +4897,12118363,"TERMINAL",0,0,"2889:01444",,terminal_output +4898,12119439,"TERMINAL",0,0,"4992555",,terminal_output +4899,12120453,"TERMINAL",0,0,"58:008:003666",,terminal_output +4900,12121531,"TERMINAL",0,0,"6114777",,terminal_output +4901,12122582,"TERMINAL",0,0,"7225888",,terminal_output +4902,12123671,"TERMINAL",0,0,"8336999",,terminal_output +4903,12124633,"TERMINAL",0,0,"9447304040",,terminal_output +4904,12125688,"TERMINAL",0,0,"20558111",,terminal_output +4905,12126721,"TERMINAL",0,0,"1669222",,terminal_output +4906,12127773,"TERMINAL",0,0,"27710333",,terminal_output +4907,12128860,"TERMINAL",0,0,"3881444",,terminal_output +4908,12129856,"TERMINAL",0,0,"4992555",,terminal_output +4909,12130928,"TERMINAL",0,0,"510103666",,terminal_output +4910,12131942,"TERMINAL",0,0,"6114777",,terminal_output +4911,12133017,"TERMINAL",0,0,"7225888",,terminal_output +4912,12134028,"TERMINAL",0,0,"8336999",,terminal_output +4913,12135125,"TERMINAL",0,0,"9447405050",,terminal_output +4914,12136136,"TERMINAL",0,0,"30558111",,terminal_output +4915,12137190,"TERMINAL",0,0,"1669222",,terminal_output +4916,12138212,"TERMINAL",0,0,"27720333",,terminal_output +4917,12139264,"TERMINAL",0,0,"3881444",,terminal_output +4918,12140368,"TERMINAL",0,0,"420203666",,terminal_output +4919,12141412,"TERMINAL",0,0,"6114777",,terminal_output +4920,12142391,"TERMINAL",0,0,"7225888",,terminal_output +4921,12143431,"TERMINAL",0,0,"8336999",,terminal_output +4922,12144487,"TERMINAL",0,0,"9447507:007:00",,terminal_output +4923,12145529,"TERMINAL",0,0,"40558111",,terminal_output +4924,12146596,"TERMINAL",0,0,"1669222",,terminal_output +4925,12147651,"TERMINAL",0,0,"27730333",,terminal_output +4926,12148669,"TERMINAL",0,0,"3881444",,terminal_output +4927,12149736,"TERMINAL",0,0,"4992555",,terminal_output +4928,12150763,"TERMINAL",0,0,"530303666",,terminal_output +4929,12151787,"TERMINAL",0,0,"6114777",,terminal_output +4930,12152861,"TERMINAL",0,0,"7225888",,terminal_output +4931,12153874,"TERMINAL",0,0,"8336999",,terminal_output +4932,12154914,"TERMINAL",0,0,"94475:001010",,terminal_output +4933,12155976,"TERMINAL",0,0,"50558111",,terminal_output +4934,12157033,"TERMINAL",0,0,"1669222",,terminal_output +4935,12158056,"TERMINAL",0,0,"27740333",,terminal_output +4936,12159110,"TERMINAL",0,0,"3881444",,terminal_output +4937,12160158,"TERMINAL",0,0,"4992555",,terminal_output +4938,12161229,"TERMINAL",0,0,"540403666",,terminal_output +4939,12162312,"TERMINAL",0,0,"6114777",,terminal_output +4940,12163326,"TERMINAL",0,0,"7225888",,terminal_output +4941,12164388,"TERMINAL",0,0,"8447102020",,terminal_output +4942,12165458,"TERMINAL",0,0,"3:00558111",,terminal_output +4943,12166470,"TERMINAL",0,0,"1669222",,terminal_output +4944,12167550,"TERMINAL",0,0,"27750333",,terminal_output +4945,12168577,"TERMINAL",0,0,"3881444",,terminal_output +4946,12169625,"TERMINAL",0,0,"4992555",,terminal_output +4947,12170649,"TERMINAL",0,0,"550503666",,terminal_output +4948,12171665,"TERMINAL",0,0,"6114777",,terminal_output +4949,12172724,"TERMINAL",0,0,"7225888",,terminal_output +4950,12173797,"TERMINAL",0,0,"8336999",,terminal_output +4951,12174782,"TERMINAL",0,0,"9447203030",,terminal_output +4952,12175824,"TERMINAL",0,0,"10558111",,terminal_output +4953,12176970,"TERMINAL",0,0,"1669222",,terminal_output +4954,12178006,"TERMINAL",0,0,"27710:00333",,terminal_output +4955,12178961,"TERMINAL",0,0,"3881444",,terminal_output +4956,12180044,"TERMINAL",0,0,"4992555",,terminal_output +4957,12181040,"TERMINAL",0,0,"59:009:003666",,terminal_output +4958,12182085,"TERMINAL",0,0,"6114777",,terminal_output +4959,12183135,"TERMINAL",0,0,"7225888",,terminal_output +4960,12184168,"TERMINAL",0,0,"8336999",,terminal_output +4961,12185218,"TERMINAL",0,0,"9447304040",,terminal_output +4962,12186288,"TERMINAL",0,0,"20558111",,terminal_output +4963,12187309,"TERMINAL",0,0,"1669222",,terminal_output +4964,12188384,"TERMINAL",0,0,"38811444",,terminal_output +4965,12189420,"TERMINAL",0,0,"4992555",,terminal_output +4966,12190471,"TERMINAL",0,0,"510103666",,terminal_output +4967,12191516,"TERMINAL",0,0,"6114777",,terminal_output +4968,12192640,"TERMINAL",0,0,"7225888",,terminal_output +4969,12193615,"TERMINAL",0,0,"8336999",,terminal_output +4970,12194643,"TERMINAL",0,0,"9447405050",,terminal_output +4971,12195716,"TERMINAL",0,0,"30558111",,terminal_output +4972,12196735,"TERMINAL",0,0,"1669222",,terminal_output +4973,12197806,"TERMINAL",0,0,"27720333",,terminal_output +4974,12198803,"TERMINAL",0,0,"3881444",,terminal_output +4975,12199846,"TERMINAL",0,0,"4992555",,terminal_output +4976,12201054,"TERMINAL",0,0,"520203666",,terminal_output +4977,12202079,"TERMINAL",0,0,"6114777",,terminal_output +4978,12202992,"TERMINAL",0,0,"7225888",,terminal_output +4979,12204037,"TERMINAL",0,0,"8336999",,terminal_output +4980,12205089,"TERMINAL",0,0,"9447508:008:00",,terminal_output +4981,12206161,"TERMINAL",0,0,"40558111",,terminal_output +4982,12207183,"TERMINAL",0,0,"1669222",,terminal_output +4983,12208617,"TERMINAL",0,0,"28831444",,terminal_output +4984,12209643,"TERMINAL",0,0,"4992555",,terminal_output +4985,12210688,"TERMINAL",0,0,"530303666",,terminal_output +4986,12211732,"TERMINAL",0,0,"6114777",,terminal_output +4987,12212779,"TERMINAL",0,0,"7225888",,terminal_output +4988,12213824,"TERMINAL",0,0,"8336999",,terminal_output +4989,12214868,"TERMINAL",0,0,"94476:001010",,terminal_output +4990,12215920,"TERMINAL",0,0,"50558111",,terminal_output +4991,12216977,"TERMINAL",0,0,"1669222",,terminal_output +4992,12218014,"TERMINAL",0,0,"27740333",,terminal_output +4993,12219078,"TERMINAL",0,0,"3881444",,terminal_output +4994,12220110,"TERMINAL",0,0,"4992555",,terminal_output +4995,12221152,"TERMINAL",0,0,"540403666",,terminal_output +4996,12222232,"TERMINAL",0,0,"6114777",,terminal_output +4997,12223263,"TERMINAL",0,0,"7225888",,terminal_output +4998,12224313,"TERMINAL",0,0,"8336999",,terminal_output +4999,12225334,"TERMINAL",0,0,"9558112121",,terminal_output +5000,12226381,"TERMINAL",0,0,"4:01669222",,terminal_output +5001,12227461,"TERMINAL",0,0,"27750333",,terminal_output +5002,12228480,"TERMINAL",0,0,"3881444",,terminal_output +5003,12229494,"TERMINAL",0,0,"4992555",,terminal_output +5004,12230535,"TERMINAL",0,0,"550503666",,terminal_output +5005,12231655,"TERMINAL",0,0,"6114777",,terminal_output +5006,12232676,"TERMINAL",0,0,"7225888",,terminal_output +5007,12233702,"TERMINAL",0,0,"8336999",,terminal_output +5008,12234829,"TERMINAL",0,0,"9447203030",,terminal_output +5009,12235851,"TERMINAL",0,0,"10558111",,terminal_output +5010,12236875,"TERMINAL",0,0,"1669222",,terminal_output +5011,12237903,"TERMINAL",0,0,"2771:00333",,terminal_output +5012,12238923,"TERMINAL",0,0,"3881444",,terminal_output +5013,12240049,"TERMINAL",0,0,"4992555",,terminal_output +5014,12240996,"TERMINAL",0,0,"550:0050:003666",,terminal_output +5015,12242097,"TERMINAL",0,0,"6114777",,terminal_output +5016,12243119,"TERMINAL",0,0,"7225888",,terminal_output +5017,12244139,"TERMINAL",0,0,"8336999",,terminal_output +5018,12245182,"TERMINAL",0,0,"9447304040",,terminal_output +5019,12246296,"TERMINAL",0,0,"20558111",,terminal_output +5020,12247280,"TERMINAL",0,0,"1669222",,terminal_output +5021,12248310,"TERMINAL",0,0,"28811444",,terminal_output +5022,12249372,"TERMINAL",0,0,"4992555",,terminal_output +5023,12250393,"TERMINAL",0,0,"510103666",,terminal_output +5024,12251433,"TERMINAL",0,0,"6114777",,terminal_output +5025,12252481,"TERMINAL",0,0,"7225888",,terminal_output +5026,12253568,"TERMINAL",0,0,"8336999",,terminal_output +5027,12254561,"TERMINAL",0,0,"9447405050",,terminal_output +5028,12255717,"TERMINAL",0,0,"30558111",,terminal_output +5029,12256741,"TERMINAL",0,0,"1669222",,terminal_output +5030,12257768,"TERMINAL",0,0,"27720333",,terminal_output +5031,12258793,"TERMINAL",0,0,"3881444",,terminal_output +5032,12259812,"TERMINAL",0,0,"4992555",,terminal_output +5033,12260838,"TERMINAL",0,0,"520203666",,terminal_output +5034,12261887,"TERMINAL",0,0,"6114777",,terminal_output +5035,12262937,"TERMINAL",0,0,"7225888",,terminal_output +5036,12263981,"TERMINAL",0,0,"8336999",,terminal_output +5037,12265063,"TERMINAL",0,0,"9447509:009:00",,terminal_output +5038,12266095,"TERMINAL",0,0,"40558111",,terminal_output +5039,12267183,"TERMINAL",0,0,"1669222",,terminal_output +5040,12268177,"TERMINAL",0,0,"27730333",,terminal_output +5041,12269747,"TERMINAL",0,0,"3992555",,terminal_output +5042,12270767,"TERMINAL",0,0,"530303666",,terminal_output +5043,12271791,"TERMINAL",0,0,"6114777",,terminal_output +5044,12272781,"TERMINAL",0,0,"7225888",,terminal_output +5045,12273830,"TERMINAL",0,0,"8336999",,terminal_output +5046,12274901,"TERMINAL",0,0,"94477:001010",,terminal_output +5047,12275989,"TERMINAL",0,0,"50558111",,terminal_output +5048,12277013,"TERMINAL",0,0,"1669222",,terminal_output +5049,12278140,"TERMINAL",0,0,"27740333",,terminal_output +5050,12279084,"TERMINAL",0,0,"3881444",,terminal_output +5051,12280190,"TERMINAL",0,0,"4992555",,terminal_output +5052,12281177,"TERMINAL",0,0,"540403666",,terminal_output +5053,12282305,"TERMINAL",0,0,"6114777",,terminal_output +5054,12283323,"TERMINAL",0,0,"7336999",,terminal_output +5055,12284371,"TERMINAL",0,0,"9447102020",,terminal_output +5056,12285410,"TERMINAL",0,0,"5:00558111",,terminal_output +5057,12286453,"TERMINAL",0,0,"1669222",,terminal_output +5058,12286484,"models/lam.py",2208,0,"",python,selection_mouse +5059,12286503,"models/lam.py",2207,0,"",python,selection_command +5060,12287326,"models/lam.py",1887,0,"",python,selection_mouse +5061,12287505,"TERMINAL",0,0,"27750333",,terminal_output +5062,12287884,"models/lam.py",1880,0,"",python,selection_mouse +5063,12288006,"models/lam.py",1879,8,"__call__",python,selection_mouse +5064,12288529,"TERMINAL",0,0,"3881444",,terminal_output +5065,12289574,"TERMINAL",0,0,"4992555",,terminal_output +5066,12290644,"TERMINAL",0,0,"550503666",,terminal_output +5067,12291659,"TERMINAL",0,0,"6114777",,terminal_output +5068,12291875,"models/lam.py",2597,0,"",python,selection_mouse +5069,12291997,"models/lam.py",2592,9,"vq_encode",python,selection_mouse +5070,12292698,"TERMINAL",0,0,"7225888",,terminal_output +5071,12293660,"models/lam.py",2964,0,"",python,selection_mouse +5072,12293830,"TERMINAL",0,0,"8336999",,terminal_output +5073,12293850,"models/lam.py",2963,10,"action_pad",python,selection_mouse +5074,12294400,"models/lam.py",2975,0,"",python,selection_mouse +5075,12294540,"models/lam.py",2975,7,"patches",python,selection_mouse +5076,12294803,"TERMINAL",0,0,"9447203030",,terminal_output +5077,12295267,"models/lam.py",2884,0,"",python,selection_mouse +5078,12295401,"models/lam.py",2880,5,"FIXME",python,selection_mouse +5079,12295829,"TERMINAL",0,0,"10558111",,terminal_output +5080,12296012,"models/lam.py",2599,0,"",python,selection_mouse +5081,12296896,"TERMINAL",0,0,"1669222",,terminal_output +5082,12297963,"TERMINAL",0,0,"2772:00333",,terminal_output +5083,12299036,"TERMINAL",0,0,"3881444",,terminal_output +5084,12300326,"TERMINAL",0,0,"4992555",,terminal_output +5085,12301081,"TERMINAL",0,0,"51:001:003666",,terminal_output +5086,12302205,"TERMINAL",0,0,"6114777",,terminal_output +5087,12303155,"TERMINAL",0,0,"7225888",,terminal_output +5088,12304253,"TERMINAL",0,0,"8336999",,terminal_output +5089,12305230,"TERMINAL",0,0,"9447304040",,terminal_output +5090,12306404,"TERMINAL",0,0,"20558111",,terminal_output +5091,12307328,"TERMINAL",0,0,"17710333",,terminal_output +5092,12308363,"TERMINAL",0,0,"3881444",,terminal_output +5093,12309412,"TERMINAL",0,0,"4992555",,terminal_output +5094,12310452,"TERMINAL",0,0,"510103666",,terminal_output +5095,12311500,"TERMINAL",0,0,"6114777",,terminal_output +5096,12312652,"TERMINAL",0,0,"7225888",,terminal_output +5097,12313675,"TERMINAL",0,0,"8336999",,terminal_output +5098,12314636,"TERMINAL",0,0,"9447405050",,terminal_output +5099,12315678,"TERMINAL",0,0,"30558111",,terminal_output +5100,12316754,"TERMINAL",0,0,"1669222",,terminal_output +5101,12317812,"TERMINAL",0,0,"27720333",,terminal_output +5102,12318911,"TERMINAL",0,0,"3881444",,terminal_output +5103,12319927,"TERMINAL",0,0,"4992555",,terminal_output +5104,12320945,"TERMINAL",0,0,"520203666",,terminal_output +5105,12321969,"TERMINAL",0,0,"6114777",,terminal_output +5106,12323132,"TERMINAL",0,0,"7225888",,terminal_output +5107,12323873,"models/lam.py",2058,0,"",python,selection_mouse +5108,12324076,"models/lam.py",2054,9,"vq_encode",python,selection_mouse +5109,12324078,"TERMINAL",0,0,"8336999",,terminal_output +5110,12325091,"TERMINAL",0,0,"94475010:0010:00",,terminal_output +5111,12326106,"TERMINAL",0,0,"40558111",,terminal_output +5112,12327193,"TERMINAL",0,0,"1669222",,terminal_output +5113,12328179,"TERMINAL",0,0,"27730333",,terminal_output +5114,12329369,"TERMINAL",0,0,"3992555",,terminal_output +5115,12330383,"TERMINAL",0,0,"530303666",,terminal_output +5116,12331434,"TERMINAL",0,0,"6114777",,terminal_output +5117,12332471,"TERMINAL",0,0,"7225888",,terminal_output +5118,12333570,"TERMINAL",0,0,"8336999",,terminal_output +5119,12334564,"TERMINAL",0,0,"94478:001010",,terminal_output +5120,12335693,"TERMINAL",0,0,"50558111",,terminal_output +5121,12336717,"TERMINAL",0,0,"1669222",,terminal_output +5122,12337760,"TERMINAL",0,0,"27740333",,terminal_output +5123,12338767,"TERMINAL",0,0,"3881444",,terminal_output +5124,12339794,"TERMINAL",0,0,"4992555",,terminal_output +5125,12340839,"TERMINAL",0,0,"540403666",,terminal_output +5126,12341840,"TERMINAL",0,0,"6114777",,terminal_output +5127,12342966,"TERMINAL",0,0,"7225888",,terminal_output +5128,12343939,"TERMINAL",0,0,"8336999",,terminal_output +5129,12345013,"TERMINAL",0,0,"9447102020",,terminal_output +5130,12346072,"TERMINAL",0,0,"6:00558111",,terminal_output +5131,12347065,"TERMINAL",0,0,"1669222",,terminal_output +5132,12348184,"TERMINAL",0,0,"27750333",,terminal_output +5133,12349161,"TERMINAL",0,0,"3881444",,terminal_output +5134,12350265,"TERMINAL",0,0,"4992555",,terminal_output +5135,12351381,"TERMINAL",0,0,"550503666",,terminal_output +5136,12352386,"TERMINAL",0,0,"6114777",,terminal_output +5137,12353334,"TERMINAL",0,0,"7336999",,terminal_output +5138,12354382,"TERMINAL",0,0,"9447203030",,terminal_output +5139,12355416,"TERMINAL",0,0,"10558111",,terminal_output +5140,12356457,"TERMINAL",0,0,"1669222",,terminal_output +5141,12357493,"TERMINAL",0,0,"2773:00333",,terminal_output +5142,12358529,"TERMINAL",0,0,"3881444",,terminal_output +5143,12359573,"TERMINAL",0,0,"4992555",,terminal_output +5144,12360677,"TERMINAL",0,0,"52:002:003666",,terminal_output +5145,12361651,"TERMINAL",0,0,"6114777",,terminal_output +5146,12362728,"TERMINAL",0,0,"7225888",,terminal_output +5147,12363734,"TERMINAL",0,0,"8336999",,terminal_output +5148,12364876,"TERMINAL",0,0,"9447304040",,terminal_output +5149,12365906,"TERMINAL",0,0,"20558111",,terminal_output +5150,12366941,"TERMINAL",0,0,"1669222",,terminal_output +5151,12367919,"TERMINAL",0,0,"27710333",,terminal_output +5152,12368983,"TERMINAL",0,0,"3881444",,terminal_output +5153,12370103,"TERMINAL",0,0,"4992555",,terminal_output +5154,12371124,"TERMINAL",0,0,"510103666",,terminal_output +5155,12372147,"TERMINAL",0,0,"6114777",,terminal_output +5156,12373182,"TERMINAL",0,0,"7225888",,terminal_output +5157,12374196,"TERMINAL",0,0,"8336999",,terminal_output +5158,12375352,"TERMINAL",0,0,"9447405050",,terminal_output +5159,12376381,"TERMINAL",0,0,"30558111",,terminal_output +5160,12377369,"TERMINAL",0,0,"1669222",,terminal_output +5161,12378393,"TERMINAL",0,0,"28821444",,terminal_output +5162,12379383,"TERMINAL",0,0,"4992555",,terminal_output +5163,12380444,"TERMINAL",0,0,"520203666",,terminal_output +5164,12381482,"TERMINAL",0,0,"6114777",,terminal_output +5165,12382538,"TERMINAL",0,0,"7225888",,terminal_output +5166,12383576,"TERMINAL",0,0,"8336999",,terminal_output +5167,12384616,"TERMINAL",0,0,"9447501:001:00",,terminal_output +5168,12385767,"TERMINAL",0,0,"40558111",,terminal_output +5169,12386709,"TERMINAL",0,0,"1669222",,terminal_output +5170,12387759,"TERMINAL",0,0,"27730333",,terminal_output +5171,12388812,"TERMINAL",0,0,"3881444",,terminal_output +5172,12389868,"TERMINAL",0,0,"4992555",,terminal_output +5173,12390993,"TERMINAL",0,0,"530303666",,terminal_output +5174,12392014,"TERMINAL",0,0,"6114777",,terminal_output +5175,12393039,"TERMINAL",0,0,"7225888",,terminal_output +5176,12394069,"TERMINAL",0,0,"8336999",,terminal_output +5177,12395135,"TERMINAL",0,0,"94479:001010",,terminal_output +5178,12396214,"TERMINAL",0,0,"50558111",,terminal_output +5179,12397235,"TERMINAL",0,0,"1669222",,terminal_output +5180,12398259,"TERMINAL",0,0,"27740333",,terminal_output +5181,12399284,"TERMINAL",0,0,"3881444",,terminal_output +5182,12400410,"TERMINAL",0,0,"440403666",,terminal_output +5183,12401372,"TERMINAL",0,0,"6114777",,terminal_output +5184,12402415,"TERMINAL",0,0,"7225888",,terminal_output +5185,12403459,"TERMINAL",0,0,"8336999",,terminal_output +5186,12404500,"TERMINAL",0,0,"9447102020",,terminal_output +5187,12405551,"TERMINAL",0,0,"7:00558111",,terminal_output +5188,12406596,"TERMINAL",0,0,"1669222",,terminal_output +5189,12407629,"TERMINAL",0,0,"27750333",,terminal_output +5190,12408709,"TERMINAL",0,0,"3881444",,terminal_output +5191,12409733,"TERMINAL",0,0,"4992555",,terminal_output +5192,12410857,"TERMINAL",0,0,"550503666",,terminal_output +5193,12411882,"TERMINAL",0,0,"6114777",,terminal_output +5194,12412849,"TERMINAL",0,0,"7225888",,terminal_output +5195,12414036,"TERMINAL",0,0,"8336999",,terminal_output +5196,12414955,"TERMINAL",0,0,"9447203030",,terminal_output +5197,12415969,"TERMINAL",0,0,"10558111",,terminal_output +5198,12417022,"TERMINAL",0,0,"1669222",,terminal_output +5199,12418053,"TERMINAL",0,0,"2774:00333",,terminal_output +5200,12419097,"TERMINAL",0,0,"3881444",,terminal_output +5201,12420193,"TERMINAL",0,0,"4992555",,terminal_output +5202,12421197,"TERMINAL",0,0,"53:003:003666",,terminal_output +5203,12422312,"TERMINAL",0,0,"6114777",,terminal_output +5204,12423390,"TERMINAL",0,0,"7225888",,terminal_output +5205,12424338,"TERMINAL",0,0,"8447304040",,terminal_output +5206,12425398,"TERMINAL",0,0,"20558111",,terminal_output +5207,12426438,"TERMINAL",0,0,"1669222",,terminal_output +5208,12427495,"TERMINAL",0,0,"27710333",,terminal_output +5209,12428538,"TERMINAL",0,0,"3881444",,terminal_output +5210,12429585,"TERMINAL",0,0,"4992555",,terminal_output +5211,12430725,"TERMINAL",0,0,"510103666",,terminal_output +5212,12431744,"TERMINAL",0,0,"6114777",,terminal_output +5213,12432767,"TERMINAL",0,0,"7225888",,terminal_output +5214,12433895,"TERMINAL",0,0,"8336999",,terminal_output +5215,12434835,"TERMINAL",0,0,"9447405050",,terminal_output +5216,12435943,"TERMINAL",0,0,"30558111",,terminal_output +5217,12436933,"TERMINAL",0,0,"1669222",,terminal_output +5218,12437991,"TERMINAL",0,0,"27720333",,terminal_output +5219,12439115,"TERMINAL",0,0,"3881444",,terminal_output +5220,12440099,"TERMINAL",0,0,"4992555",,terminal_output +5221,12441120,"TERMINAL",0,0,"520203666",,terminal_output +5222,12442189,"TERMINAL",0,0,"6114777",,terminal_output +5223,12443261,"TERMINAL",0,0,"7225888",,terminal_output +5224,12444261,"TERMINAL",0,0,"8336999",,terminal_output +5225,12445388,"TERMINAL",0,0,"9558512:012:01",,terminal_output +5226,12446387,"TERMINAL",0,0,"41669222",,terminal_output +5227,12447418,"TERMINAL",0,0,"27730333",,terminal_output +5228,12448443,"TERMINAL",0,0,"3881444",,terminal_output +5229,12449973,"TERMINAL",0,0,"4992555",,terminal_output +5230,12451097,"TERMINAL",0,0,"530303666",,terminal_output +5231,12452127,"TERMINAL",0,0,"6114777",,terminal_output +5232,12453145,"TERMINAL",0,0,"7225888",,terminal_output +5233,12454171,"TERMINAL",0,0,"8336999",,terminal_output +5234,12455187,"TERMINAL",0,0,"94474:00:001010",,terminal_output +5235,12456322,"TERMINAL",0,0,"50558111",,terminal_output +5236,12457347,"TERMINAL",0,0,"1669222",,terminal_output +5237,12458388,"TERMINAL",0,0,"28841444",,terminal_output +5238,12459405,"TERMINAL",0,0,"4992555",,terminal_output +5239,12460420,"TERMINAL",0,0,"540403666",,terminal_output +5240,12461466,"TERMINAL",0,0,"6114777",,terminal_output +5241,12462514,"TERMINAL",0,0,"7225888",,terminal_output +5242,12463595,"TERMINAL",0,0,"8336999",,terminal_output +5243,12464717,"TERMINAL",0,0,"9447102020",,terminal_output +5244,12465743,"TERMINAL",0,0,"8:00558111",,terminal_output +5245,12466702,"TERMINAL",0,0,"1669222",,terminal_output +5246,12467790,"TERMINAL",0,0,"27750333",,terminal_output +5247,12468820,"TERMINAL",0,0,"3881444",,terminal_output +5248,12469837,"TERMINAL",0,0,"4992555",,terminal_output +5249,12470879,"TERMINAL",0,0,"550503666",,terminal_output +5250,12471987,"TERMINAL",0,0,"6114777",,terminal_output +5251,12473013,"TERMINAL",0,0,"7225888",,terminal_output +5252,12474038,"TERMINAL",0,0,"8336999",,terminal_output +5253,12475104,"TERMINAL",0,0,"9447203030",,terminal_output +5254,12476190,"TERMINAL",0,0,"10558111",,terminal_output +5255,12477210,"TERMINAL",0,0,"1669222",,terminal_output +5256,12478235,"TERMINAL",0,0,"2775:00333",,terminal_output +5257,12479260,"TERMINAL",0,0,"3881444",,terminal_output +5258,12480295,"TERMINAL",0,0,"4992555",,terminal_output +5259,12481410,"TERMINAL",0,0,"54:014:014777",,terminal_output +5260,12482432,"TERMINAL",0,0,"7225888",,terminal_output +5261,12483443,"TERMINAL",0,0,"8336999",,terminal_output +5262,12484471,"TERMINAL",0,0,"9447304040",,terminal_output +5263,12485516,"TERMINAL",0,0,"20558111",,terminal_output +5264,12486554,"TERMINAL",0,0,"1669222",,terminal_output +5265,12487601,"TERMINAL",0,0,"27710333",,terminal_output +5266,12488651,"TERMINAL",0,0,"3881444",,terminal_output +5267,12489812,"TERMINAL",0,0,"4992555",,terminal_output +5268,12490829,"TERMINAL",0,0,"510103666",,terminal_output +5269,12491788,"TERMINAL",0,0,"6114777",,terminal_output +5270,12492882,"TERMINAL",0,0,"7225888",,terminal_output +5271,12493891,"TERMINAL",0,0,"8336999",,terminal_output +5272,12495027,"TERMINAL",0,0,"9447405050",,terminal_output +5273,12495986,"TERMINAL",0,0,"30558111",,terminal_output +5274,12497077,"TERMINAL",0,0,"1669222",,terminal_output +5275,12498101,"TERMINAL",0,0,"27720333",,terminal_output +5276,12499224,"TERMINAL",0,0,"3881444",,terminal_output +5277,12500250,"TERMINAL",0,0,"4992555",,terminal_output +5278,12501222,"TERMINAL",0,0,"520203666",,terminal_output +5279,12502301,"TERMINAL",0,0,"6114777",,terminal_output +5280,12503295,"TERMINAL",0,0,"7225888",,terminal_output +5281,12504382,"TERMINAL",0,0,"8447503:003:00",,terminal_output +5282,12505422,"TERMINAL",0,0,"40558111",,terminal_output +5283,12506429,"TERMINAL",0,0,"1669222",,terminal_output +5284,12507527,"TERMINAL",0,0,"27730333",,terminal_output +5285,12508533,"TERMINAL",0,0,"3881444",,terminal_output +5286,12509583,"TERMINAL",0,0,"4992555",,terminal_output +5287,12510628,"TERMINAL",0,0,"530303666",,terminal_output +5288,12511678,"TERMINAL",0,0,"6114777",,terminal_output +5289,12512743,"TERMINAL",0,0,"7225888",,terminal_output +5290,12513768,"TERMINAL",0,0,"8336999",,terminal_output +5291,12514826,"TERMINAL",0,0,"94471:001010",,terminal_output +5292,12515923,"TERMINAL",0,0,"50558111",,terminal_output +5293,12516043,"train_dynamics.py",0,0,"",python,tab +5294,12516944,"TERMINAL",0,0,"1669222",,terminal_output +5295,12517968,"TERMINAL",0,0,"27740333",,terminal_output +5296,12519015,"TERMINAL",0,0,"3881444",,terminal_output +5297,12520074,"TERMINAL",0,0,"4992555",,terminal_output +5298,12521144,"TERMINAL",0,0,"540403666",,terminal_output +5299,12522152,"TERMINAL",0,0,"6114777",,terminal_output +5300,12523195,"TERMINAL",0,0,"7225888",,terminal_output +5301,12524317,"TERMINAL",0,0,"8336999",,terminal_output +5302,12525335,"TERMINAL",0,0,"9558112121",,terminal_output +5303,12526412,"TERMINAL",0,0,"9:01669222",,terminal_output +5304,12527417,"TERMINAL",0,0,"27750333",,terminal_output +5305,12527525,"train_dynamics.py",5708,0,"",python,selection_mouse +5306,12527734,"genie.py",0,0,"",python,tab +5307,12528531,"TERMINAL",0,0,"3881444",,terminal_output +5308,12529505,"TERMINAL",0,0,"4992555",,terminal_output +5309,12530552,"TERMINAL",0,0,"550503666",,terminal_output +5310,12531597,"TERMINAL",0,0,"6114777",,terminal_output +5311,12532199,"genie.py",3691,0,"",python,selection_mouse +5312,12532761,"genie.py",3653,0,"",python,selection_mouse +5313,12532762,"TERMINAL",0,0,"7225888",,terminal_output +5314,12532863,"genie.py",3647,11,"lam_outputs",python,selection_mouse +5315,12533736,"TERMINAL",0,0,"8336999",,terminal_output +5316,12533886,"genie.py",3659,0,"",python,selection_mouse +5317,12534061,"genie.py",3658,2,"[""",python,selection_mouse +5318,12534231,"genie.py",3658,2,"[""",python,selection_mouse +5319,12534257,"genie.py",3658,5,"[""z_q",python,selection_mouse +5320,12534340,"genie.py",3658,6,"[""z_q""",python,selection_mouse +5321,12534377,"genie.py",3658,7,"[""z_q""]",python,selection_mouse +5322,12534793,"genie.py",3665,0,"",python,selection_mouse +5323,12534880,"TERMINAL",0,0,"9447203030",,terminal_output +5324,12535610,"genie.py",3514,0,"",python,selection_mouse +5325,12535815,"genie.py",3514,9,"vq_encode",python,selection_mouse +5326,12535834,"TERMINAL",0,0,"10558111",,terminal_output +5327,12536909,"TERMINAL",0,0,"1669222",,terminal_output +5328,12537934,"TERMINAL",0,0,"2776:00333",,terminal_output +5329,12538960,"TERMINAL",0,0,"3881444",,terminal_output +5330,12539974,"TERMINAL",0,0,"4992555",,terminal_output +5331,12540375,"genie.py",3498,0,"",python,selection_mouse +5332,12540490,"genie.py",3491,11,"lam_outputs",python,selection_mouse +5333,12541109,"TERMINAL",0,0,"55:005:003666",,terminal_output +5334,12541694,"genie.py",3612,0,"",python,selection_mouse +5335,12541868,"genie.py",3608,4,"self",python,selection_mouse +5336,12542151,"TERMINAL",0,0,"6114777",,terminal_output +5337,12542395,"genie.py",3574,0,"",python,selection_mouse +5338,12542561,"genie.py",3565,14,"latent_actions",python,selection_mouse +5339,12543123,"TERMINAL",0,0,"7225888",,terminal_output +5340,12544183,"TERMINAL",0,0,"8336999",,terminal_output +5341,12545513,"TERMINAL",0,0,"9447304040",,terminal_output +5342,12546329,"TERMINAL",0,0,"20558111",,terminal_output +5343,12547338,"TERMINAL",0,0,"1669222",,terminal_output +5344,12548484,"TERMINAL",0,0,"28811444",,terminal_output +5345,12553426,"genie.py",3520,0,"",python,selection_mouse +5346,12553427,"genie.py",3516,0,"",python,selection_mouse +5347,12553793,"genie.py",3519,0,"",python,selection_mouse +5348,12554248,"TERMINAL",0,0,"4992555510103666611477772258888336999",,terminal_output +5349,12554380,"genie.py",3523,0,"",python,selection_mouse +5350,12554688,"TERMINAL",0,0,"9447405050",,terminal_output +5351,12555702,"TERMINAL",0,0,"30558111",,terminal_output +5352,12556426,"genie.py",3519,0,"",python,selection_mouse +5353,12556739,"TERMINAL",0,0,"1669222",,terminal_output +5354,12557783,"TERMINAL",0,0,"27720333",,terminal_output +5355,12558843,"TERMINAL",0,0,"3881444",,terminal_output +5356,12559911,"TERMINAL",0,0,"4992555",,terminal_output +5357,12560342,"models/lam.py",0,0,"",python,tab +5358,12560343,"models/lam.py",2598,0,"",python,selection_mouse +5359,12560698,"genie.py",0,0,"",python,tab +5360,12560933,"TERMINAL",0,0,"520203666",,terminal_output +5361,12561984,"TERMINAL",0,0,"6114777",,terminal_output +5362,12563031,"TERMINAL",0,0,"7225888",,terminal_output +5363,12563563,"models/lam.py",0,0,"",python,tab +5364,12564077,"TERMINAL",0,0,"8336999",,terminal_output +5365,12565133,"TERMINAL",0,0,"9447504:004:00",,terminal_output +5366,12566178,"TERMINAL",0,0,"40558111",,terminal_output +5367,12567325,"TERMINAL",0,0,"1669222",,terminal_output +5368,12568347,"TERMINAL",0,0,"27730333",,terminal_output +5369,12569372,"TERMINAL",0,0,"3992555",,terminal_output +5370,12570556,"TERMINAL",0,0,"530303666",,terminal_output +5371,12571604,"TERMINAL",0,0,"6114777",,terminal_output +5372,12572653,"TERMINAL",0,0,"7225888",,terminal_output +5373,12573779,"TERMINAL",0,0,"8336999",,terminal_output +5374,12574746,"TERMINAL",0,0,"94472:001010",,terminal_output +5375,12575798,"TERMINAL",0,0,"50558111",,terminal_output +5376,12576947,"TERMINAL",0,0,"1669222",,terminal_output +5377,12577969,"TERMINAL",0,0,"27740333",,terminal_output +5378,12578996,"TERMINAL",0,0,"3881444",,terminal_output +5379,12580018,"TERMINAL",0,0,"4992555",,terminal_output +5380,12581043,"TERMINAL",0,0,"540403666",,terminal_output +5381,12582169,"TERMINAL",0,0,"6114777",,terminal_output +5382,12583194,"TERMINAL",0,0,"7225888",,terminal_output +5383,12584194,"TERMINAL",0,0,"8336999",,terminal_output +5384,12585345,"TERMINAL",0,0,"9447102020",,terminal_output +5385,12586379,"TERMINAL",0,0,"30:00558111",,terminal_output +5386,12587394,"TERMINAL",0,0,"17750333",,terminal_output +5387,12588416,"TERMINAL",0,0,"3881444",,terminal_output +5388,12589427,"TERMINAL",0,0,"4992555",,terminal_output +5389,12590477,"TERMINAL",0,0,"550503666",,terminal_output +5390,12591530,"TERMINAL",0,0,"6114777",,terminal_output +5391,12592578,"TERMINAL",0,0,"7225888",,terminal_output +5392,12593645,"TERMINAL",0,0,"8336999",,terminal_output +5393,12594687,"TERMINAL",0,0,"9447203030",,terminal_output +5394,12595731,"TERMINAL",0,0,"10558111",,terminal_output +5395,12596773,"TERMINAL",0,0,"1669222",,terminal_output +5396,12597824,"TERMINAL",0,0,"2777:00333",,terminal_output +5397,12598963,"TERMINAL",0,0,"3881444",,terminal_output +5398,12599989,"TERMINAL",0,0,"4992555",,terminal_output +5399,12600959,"TERMINAL",0,0,"56:006:003666",,terminal_output +5400,12602004,"TERMINAL",0,0,"6114777",,terminal_output +5401,12603053,"TERMINAL",0,0,"7225888",,terminal_output +5402,12604218,"TERMINAL",0,0,"8336999",,terminal_output +5403,12605158,"TERMINAL",0,0,"9447304040",,terminal_output +5404,12606293,"TERMINAL",0,0,"20558111",,terminal_output +5405,12607282,"TERMINAL",0,0,"1669222",,terminal_output +5406,12608295,"TERMINAL",0,0,"27710333",,terminal_output +5407,12609377,"TERMINAL",0,0,"3992555",,terminal_output +5408,12610435,"TERMINAL",0,0,"510103666",,terminal_output +5409,12611471,"TERMINAL",0,0,"6114777",,terminal_output +5410,12612486,"TERMINAL",0,0,"7225888",,terminal_output +5411,12613525,"TERMINAL",0,0,"8336999",,terminal_output +5412,12614577,"TERMINAL",0,0,"9447405050",,terminal_output +5413,12615619,"TERMINAL",0,0,"30558111",,terminal_output +5414,12616667,"TERMINAL",0,0,"1669222",,terminal_output +5415,12617747,"TERMINAL",0,0,"27720333",,terminal_output +5416,12618777,"TERMINAL",0,0,"3881444",,terminal_output +5417,12619852,"TERMINAL",0,0,"4992555",,terminal_output +5418,12620864,"TERMINAL",0,0,"520203666",,terminal_output +5419,12621916,"TERMINAL",0,0,"6114777",,terminal_output +5420,12623027,"TERMINAL",0,0,"7225888",,terminal_output +5421,12624052,"TERMINAL",0,0,"8336999",,terminal_output +5422,12625091,"TERMINAL",0,0,"9447505:005:00",,terminal_output +5423,12626530,"TERMINAL",0,0,"40558111",,terminal_output +5424,12627227,"TERMINAL",0,0,"1669222",,terminal_output +5425,12628252,"TERMINAL",0,0,"27730333",,terminal_output +5426,12629275,"TERMINAL",0,0,"3881444",,terminal_output +5427,12630309,"TERMINAL",0,0,"4992555",,terminal_output +5428,12631423,"TERMINAL",0,0,"531314777",,terminal_output +5429,12632447,"TERMINAL",0,0,"7225888",,terminal_output +5430,12633471,"TERMINAL",0,0,"8336999",,terminal_output +5431,12634487,"TERMINAL",0,0,"94473:001010",,terminal_output +5432,12635536,"TERMINAL",0,0,"50558111",,terminal_output +5433,12636652,"TERMINAL",0,0,"1669222",,terminal_output +5434,12637674,"TERMINAL",0,0,"27740333",,terminal_output +5435,12638713,"TERMINAL",0,0,"3881444",,terminal_output +5436,12639754,"TERMINAL",0,0,"4992555",,terminal_output +5437,12640821,"TERMINAL",0,0,"540403666",,terminal_output +5438,12641849,"TERMINAL",0,0,"6114777",,terminal_output +5439,12642994,"TERMINAL",0,0,"7225888",,terminal_output +5440,12644021,"TERMINAL",0,0,"8336999",,terminal_output +5441,12645043,"TERMINAL",0,0,"9447102020",,terminal_output +5442,12646067,"TERMINAL",0,0,"1:00558111",,terminal_output +5443,12647090,"TERMINAL",0,0,"1669222",,terminal_output +5444,12648219,"TERMINAL",0,0,"27750333",,terminal_output +5445,12649248,"TERMINAL",0,0,"3881444",,terminal_output +5446,12650267,"TERMINAL",0,0,"4992555",,terminal_output +5447,12651293,"TERMINAL",0,0,"550503666",,terminal_output +5448,12652308,"TERMINAL",0,0,"6114777",,terminal_output +5449,12653348,"TERMINAL",0,0,"8336999",,terminal_output +5450,12654387,"TERMINAL",0,0,"9447203030",,terminal_output +5451,12655493,"TERMINAL",0,0,"10558111",,terminal_output +5452,12656515,"TERMINAL",0,0,"1669222",,terminal_output +5453,12657512,"TERMINAL",0,0,"2778:00333",,terminal_output +5454,12658550,"TERMINAL",0,0,"3881444",,terminal_output +5455,12659601,"TERMINAL",0,0,"4992555",,terminal_output +5456,12660651,"TERMINAL",0,0,"57:007:003666",,terminal_output +5457,12661688,"TERMINAL",0,0,"6114777",,terminal_output +5458,12662739,"TERMINAL",0,0,"7225888",,terminal_output +5459,12663773,"TERMINAL",0,0,"8336999",,terminal_output +5460,12664911,"TERMINAL",0,0,"9447304040",,terminal_output +5461,12665855,"TERMINAL",0,0,"20558111",,terminal_output +5462,12666964,"TERMINAL",0,0,"1669222",,terminal_output +5463,12667982,"TERMINAL",0,0,"27710333",,terminal_output +5464,12668980,"TERMINAL",0,0,"3881444",,terminal_output +5465,12670031,"TERMINAL",0,0,"4992555",,terminal_output +5466,12671263,"TERMINAL",0,0,"510103666",,terminal_output +5467,12672181,"TERMINAL",0,0,"6114777",,terminal_output +5468,12673150,"TERMINAL",0,0,"7225888",,terminal_output +5469,12674193,"TERMINAL",0,0,"8336999",,terminal_output +5470,12675257,"TERMINAL",0,0,"9447405050",,terminal_output +5471,12676286,"TERMINAL",0,0,"30558111",,terminal_output +5472,12677403,"TERMINAL",0,0,"17720333",,terminal_output +5473,12678429,"TERMINAL",0,0,"3881444",,terminal_output +5474,12679428,"TERMINAL",0,0,"4992555",,terminal_output +5475,12680577,"TERMINAL",0,0,"520203666",,terminal_output +5476,12681603,"TERMINAL",0,0,"6114777",,terminal_output +5477,12682561,"TERMINAL",0,0,"7225888",,terminal_output +5478,12683615,"TERMINAL",0,0,"8336999",,terminal_output +5479,12684655,"TERMINAL",0,0,"9447506:006:00",,terminal_output +5480,12685801,"TERMINAL",0,0,"40558111",,terminal_output +5481,12686752,"TERMINAL",0,0,"1669222",,terminal_output +5482,12687795,"TERMINAL",0,0,"27730333",,terminal_output +5483,12688875,"TERMINAL",0,0,"3881444",,terminal_output +5484,12689895,"TERMINAL",0,0,"4992555",,terminal_output +5485,12691228,"TERMINAL",0,0,"530303666",,terminal_output +5486,12692254,"TERMINAL",0,0,"6114777",,terminal_output +5487,12693281,"TERMINAL",0,0,"7225888",,terminal_output +5488,12694301,"TERMINAL",0,0,"8336999",,terminal_output +5489,12695424,"TERMINAL",0,0,"95584:011111",,terminal_output +5490,12696450,"TERMINAL",0,0,"51669222",,terminal_output +5491,12697437,"TERMINAL",0,0,"27740333",,terminal_output +5492,12698500,"TERMINAL",0,0,"3881444",,terminal_output +5493,12699546,"TERMINAL",0,0,"4992555",,terminal_output +5494,12700570,"TERMINAL",0,0,"540403666",,terminal_output +5495,12701632,"TERMINAL",0,0,"6114777",,terminal_output +5496,12702700,"TERMINAL",0,0,"7225888",,terminal_output +5497,12703724,"TERMINAL",0,0,"8336999",,terminal_output +5498,12704855,"TERMINAL",0,0,"9447102020",,terminal_output +5499,12705873,"TERMINAL",0,0,"2:00558111",,terminal_output +5500,12706892,"TERMINAL",0,0,"1669222",,terminal_output +5501,12707932,"TERMINAL",0,0,"27750333",,terminal_output +5502,12709044,"TERMINAL",0,0,"3881444",,terminal_output +5503,12710068,"TERMINAL",0,0,"4992555",,terminal_output +5504,12711093,"TERMINAL",0,0,"550503666",,terminal_output +5505,12712219,"TERMINAL",0,0,"6114777",,terminal_output +5506,12713156,"TERMINAL",0,0,"7225888",,terminal_output +5507,12714269,"TERMINAL",0,0,"8336999",,terminal_output +5508,12715292,"TERMINAL",0,0,"9447203030",,terminal_output +5509,12716318,"TERMINAL",0,0,"10558111",,terminal_output +5510,12717340,"TERMINAL",0,0,"1779:00333",,terminal_output +5511,12718377,"TERMINAL",0,0,"3881444",,terminal_output +5512,12719429,"TERMINAL",0,0,"4992555",,terminal_output +5513,12720519,"TERMINAL",0,0,"58:008:003666",,terminal_output +5514,12721539,"TERMINAL",0,0,"6114777",,terminal_output +5515,12722575,"TERMINAL",0,0,"7225888",,terminal_output +5516,12723617,"TERMINAL",0,0,"8336999",,terminal_output +5517,12724714,"TERMINAL",0,0,"9447304040",,terminal_output +5518,12725700,"TERMINAL",0,0,"20558111",,terminal_output +5519,12726762,"TERMINAL",0,0,"1669222",,terminal_output +5520,12727782,"TERMINAL",0,0,"27710333",,terminal_output +5521,12728913,"TERMINAL",0,0,"3881444",,terminal_output +5522,12729935,"TERMINAL",0,0,"4992555",,terminal_output +5523,12730961,"TERMINAL",0,0,"510103666",,terminal_output +5524,12731985,"TERMINAL",0,0,"6114777",,terminal_output +5525,12733110,"TERMINAL",0,0,"7225888",,terminal_output +5526,12734137,"TERMINAL",0,0,"8336999",,terminal_output +5527,12735128,"TERMINAL",0,0,"9447405050",,terminal_output +5528,12736183,"TERMINAL",0,0,"30558111",,terminal_output +5529,12737310,"TERMINAL",0,0,"1669222",,terminal_output +5530,12738331,"TERMINAL",0,0,"27720333",,terminal_output +5531,12739307,"TERMINAL",0,0,"3992555",,terminal_output +5532,12740383,"TERMINAL",0,0,"520203666",,terminal_output +5533,12741519,"TERMINAL",0,0,"6114777",,terminal_output +5534,12742456,"TERMINAL",0,0,"7225888",,terminal_output +5535,12743534,"TERMINAL",0,0,"8336999",,terminal_output +5536,12744555,"TERMINAL",0,0,"9447507:007:00",,terminal_output +5537,12745598,"TERMINAL",0,0,"40558111",,terminal_output +5538,12746637,"TERMINAL",0,0,"1669222",,terminal_output +5539,12747677,"TERMINAL",0,0,"27730333",,terminal_output +5540,12748727,"TERMINAL",0,0,"3881444",,terminal_output +5541,12749806,"TERMINAL",0,0,"4992555",,terminal_output +5542,12750825,"TERMINAL",0,0,"530303666",,terminal_output +5543,12751865,"TERMINAL",0,0,"6114777",,terminal_output +5544,12752976,"TERMINAL",0,0,"7225888",,terminal_output +5545,12753960,"TERMINAL",0,0,"8336999",,terminal_output +5546,12755003,"TERMINAL",0,0,"94475:001010",,terminal_output +5547,12756147,"TERMINAL",0,0,"50558111",,terminal_output +5548,12757172,"TERMINAL",0,0,"1669222",,terminal_output +5549,12758200,"TERMINAL",0,0,"27740333",,terminal_output +5550,12759198,"TERMINAL",0,0,"3881444",,terminal_output +5551,12760247,"TERMINAL",0,0,"4992555",,terminal_output +5552,12761298,"TERMINAL",0,0,"540403666",,terminal_output +5553,12762355,"TERMINAL",0,0,"6225888",,terminal_output +5554,12763410,"TERMINAL",0,0,"8336999",,terminal_output +5555,12764464,"TERMINAL",0,0,"9447102020",,terminal_output +5556,12765507,"TERMINAL",0,0,"3:00558111",,terminal_output +5557,12766607,"TERMINAL",0,0,"1669222",,terminal_output +5558,12767621,"TERMINAL",0,0,"27750333",,terminal_output +5559,12768643,"TERMINAL",0,0,"3881444",,terminal_output +5560,12769775,"TERMINAL",0,0,"4992555",,terminal_output +5561,12770746,"TERMINAL",0,0,"550503666",,terminal_output +5562,12771795,"TERMINAL",0,0,"6114777",,terminal_output +5563,12772948,"TERMINAL",0,0,"7225888",,terminal_output +5564,12773975,"TERMINAL",0,0,"8336999",,terminal_output +5565,12774995,"TERMINAL",0,0,"9447203030",,terminal_output +5566,12775991,"TERMINAL",0,0,"10558111",,terminal_output +5567,12777041,"TERMINAL",0,0,"1669222",,terminal_output +5568,12778169,"TERMINAL",0,0,"27720:00333",,terminal_output +5569,12779144,"TERMINAL",0,0,"3881444",,terminal_output +5570,12780186,"TERMINAL",0,0,"4992555",,terminal_output +5571,12781341,"TERMINAL",0,0,"59:009:003666",,terminal_output +5572,12782364,"TERMINAL",0,0,"6114777",,terminal_output +5573,12783386,"TERMINAL",0,0,"7336999",,terminal_output +5574,12784397,"TERMINAL",0,0,"9447304040",,terminal_output +5575,12785541,"TERMINAL",0,0,"20558111",,terminal_output +5576,12786488,"TERMINAL",0,0,"1669222",,terminal_output +5577,12787585,"TERMINAL",0,0,"27710333",,terminal_output +5578,12788615,"TERMINAL",0,0,"3881444",,terminal_output +5579,12789634,"TERMINAL",0,0,"4992555",,terminal_output +5580,12790680,"TERMINAL",0,0,"510103666",,terminal_output +5581,12791726,"TERMINAL",0,0,"6114777",,terminal_output +5582,12792773,"TERMINAL",0,0,"7225888",,terminal_output +5583,12793821,"TERMINAL",0,0,"8336999",,terminal_output +5584,12794962,"TERMINAL",0,0,"9447405050",,terminal_output +5585,12795983,"TERMINAL",0,0,"30558111",,terminal_output +5586,12797006,"TERMINAL",0,0,"1669222",,terminal_output +5587,12798031,"TERMINAL",0,0,"27720333",,terminal_output +5588,12799156,"TERMINAL",0,0,"3881444",,terminal_output +5589,12800120,"TERMINAL",0,0,"4992555",,terminal_output +5590,12801211,"TERMINAL",0,0,"520203666",,terminal_output +5591,12802201,"TERMINAL",0,0,"6114777",,terminal_output +5592,12803253,"TERMINAL",0,0,"7225888",,terminal_output +5593,12804305,"TERMINAL",0,0,"8336999",,terminal_output +5594,12805404,"TERMINAL",0,0,"40558518:018:01",,terminal_output +5595,12806430,"TERMINAL",0,0,"1669222",,terminal_output +5596,12807556,"TERMINAL",0,0,"27730333",,terminal_output +5597,12808518,"TERMINAL",0,0,"3881444",,terminal_output +5598,12809611,"TERMINAL",0,0,"4992555",,terminal_output +5599,12810614,"TERMINAL",0,0,"530303666",,terminal_output +5600,12811668,"TERMINAL",0,0,"6114777",,terminal_output +5601,12812780,"TERMINAL",0,0,"7225888",,terminal_output +5602,12813749,"TERMINAL",0,0,"8336999",,terminal_output +5603,12814798,"TERMINAL",0,0,"94476:001010",,terminal_output +5604,12815954,"TERMINAL",0,0,"50558111",,terminal_output +5605,12816890,"TERMINAL",0,0,"1669222",,terminal_output +5606,12818001,"TERMINAL",0,0,"27740333",,terminal_output +5607,12819027,"TERMINAL",0,0,"3881444",,terminal_output +5608,12820087,"TERMINAL",0,0,"4992555",,terminal_output +5609,12821177,"TERMINAL",0,0,"540403666",,terminal_output +5610,12822199,"TERMINAL",0,0,"6114777",,terminal_output +5611,12823223,"TERMINAL",0,0,"7225888",,terminal_output +5612,12824250,"TERMINAL",0,0,"8336999",,terminal_output +5613,12825373,"TERMINAL",0,0,"9447102020",,terminal_output +5614,12826406,"TERMINAL",0,0,"4:00669222",,terminal_output +5615,12827418,"TERMINAL",0,0,"27750333",,terminal_output +5616,12828453,"TERMINAL",0,0,"3881444",,terminal_output +5617,12829494,"TERMINAL",0,0,"4992555",,terminal_output +5618,12830595,"TERMINAL",0,0,"550503666",,terminal_output +5619,12831622,"TERMINAL",0,0,"6114777",,terminal_output +5620,12832635,"TERMINAL",0,0,"7225888",,terminal_output +5621,12833688,"TERMINAL",0,0,"8336999",,terminal_output +5622,12834728,"TERMINAL",0,0,"9447203030",,terminal_output +5623,12835780,"TERMINAL",0,0,"10558111",,terminal_output +5624,12836844,"TERMINAL",0,0,"1669222",,terminal_output +5625,12837872,"TERMINAL",0,0,"2771:00333",,terminal_output +5626,12838993,"TERMINAL",0,0,"3881444",,terminal_output +5627,12840017,"TERMINAL",0,0,"4992555",,terminal_output +5628,12841041,"TERMINAL",0,0,"57:00:007:00:003666",,terminal_output +5629,12842081,"TERMINAL",0,0,"6114777",,terminal_output +5630,12843197,"TERMINAL",0,0,"7225888",,terminal_output +5631,12844213,"TERMINAL",0,0,"8336999",,terminal_output +5632,12845343,"TERMINAL",0,0,"9447304040",,terminal_output +5633,12846366,"TERMINAL",0,0,"20558111",,terminal_output +5634,12847388,"TERMINAL",0,0,"17710333",,terminal_output +5635,12848412,"TERMINAL",0,0,"3881444",,terminal_output +5636,12849419,"TERMINAL",0,0,"4992555",,terminal_output +5637,12850566,"TERMINAL",0,0,"510103666",,terminal_output +5638,12851590,"TERMINAL",0,0,"6114777",,terminal_output +5639,12852611,"TERMINAL",0,0,"7225888",,terminal_output +5640,12853611,"TERMINAL",0,0,"8336999",,terminal_output +5641,12854660,"TERMINAL",0,0,"9447405050",,terminal_output +5642,12855707,"TERMINAL",0,0,"30558111",,terminal_output +5643,12856761,"TERMINAL",0,0,"1669222",,terminal_output +5644,12857808,"TERMINAL",0,0,"27720333",,terminal_output +5645,12858857,"TERMINAL",0,0,"3881444",,terminal_output +5646,12859908,"TERMINAL",0,0,"4992555",,terminal_output +5647,12861010,"TERMINAL",0,0,"520203666",,terminal_output +5648,12862030,"TERMINAL",0,0,"6114777",,terminal_output +5649,12863154,"TERMINAL",0,0,"7225888",,terminal_output +5650,12864125,"TERMINAL",0,0,"8336999",,terminal_output +5651,12865167,"TERMINAL",0,0,"9447509:009:00",,terminal_output +5652,12866229,"TERMINAL",0,0,"40558111",,terminal_output +5653,12867358,"TERMINAL",0,0,"1669222",,terminal_output +5654,12868387,"TERMINAL",0,0,"27730333",,terminal_output +5655,12869368,"TERMINAL",0,0,"3992555",,terminal_output +5656,12870428,"TERMINAL",0,0,"530303666",,terminal_output +5657,12871445,"TERMINAL",0,0,"6114777",,terminal_output +5658,12872491,"TERMINAL",0,0,"7225888",,terminal_output +5659,12873606,"TERMINAL",0,0,"8336999",,terminal_output +5660,12874592,"TERMINAL",0,0,"94477:001010",,terminal_output +5661,12875640,"TERMINAL",0,0,"50558111",,terminal_output +5662,12876703,"TERMINAL",0,0,"1669222",,terminal_output +5663,12877752,"TERMINAL",0,0,"27740333",,terminal_output +5664,12878778,"TERMINAL",0,0,"3881444",,terminal_output +5665,12879851,"TERMINAL",0,0,"4992555",,terminal_output +5666,12880872,"TERMINAL",0,0,"540403666",,terminal_output +5667,12882006,"TERMINAL",0,0,"6114777",,terminal_output +5668,12882959,"TERMINAL",0,0,"7225888",,terminal_output +5669,12884049,"TERMINAL",0,0,"8336999",,terminal_output +5670,12885096,"TERMINAL",0,0,"9447102020",,terminal_output +5671,12886096,"TERMINAL",0,0,"5:00558111",,terminal_output +5672,12887139,"TERMINAL",0,0,"1669222",,terminal_output +5673,12888248,"TERMINAL",0,0,"27750333",,terminal_output +5674,12889271,"TERMINAL",0,0,"3881444",,terminal_output +5675,12890296,"TERMINAL",0,0,"4992555",,terminal_output +5676,12891334,"TERMINAL",0,0,"551514777",,terminal_output +5677,12892447,"TERMINAL",0,0,"7225888",,terminal_output +5678,12893408,"TERMINAL",0,0,"8336999",,terminal_output +5679,12894453,"TERMINAL",0,0,"9447203030",,terminal_output +5680,12895523,"TERMINAL",0,0,"10558111",,terminal_output +5681,12896645,"TERMINAL",0,0,"1669222",,terminal_output +5682,12897603,"TERMINAL",0,0,"2772:00333",,terminal_output +5683,12898696,"TERMINAL",0,0,"3881444",,terminal_output +5684,12899683,"TERMINAL",0,0,"4992555",,terminal_output +5685,12900723,"TERMINAL",0,0,"51:001:003666",,terminal_output +5686,12901867,"TERMINAL",0,0,"6114777",,terminal_output +5687,12902813,"TERMINAL",0,0,"7225888",,terminal_output +5688,12903863,"TERMINAL",0,0,"8336999",,terminal_output +5689,12904981,"TERMINAL",0,0,"9447304040",,terminal_output +5690,12906068,"TERMINAL",0,0,"20558111",,terminal_output +5691,12906997,"TERMINAL",0,0,"1669222",,terminal_output +5692,12908114,"TERMINAL",0,0,"27710333",,terminal_output +5693,12909136,"TERMINAL",0,0,"3881444",,terminal_output +5694,12910128,"TERMINAL",0,0,"4992555",,terminal_output +5695,12911188,"TERMINAL",0,0,"510103666",,terminal_output +5696,12912225,"TERMINAL",0,0,"6114777",,terminal_output +5697,12913274,"TERMINAL",0,0,"7225888",,terminal_output +5698,12914318,"TERMINAL",0,0,"8447405050",,terminal_output +5699,12915381,"TERMINAL",0,0,"30558111",,terminal_output +5700,12916511,"TERMINAL",0,0,"1669222",,terminal_output +5701,12917535,"TERMINAL",0,0,"27720333",,terminal_output +5702,12918540,"TERMINAL",0,0,"3881444",,terminal_output +5703,12919565,"TERMINAL",0,0,"4992555",,terminal_output +5704,12920709,"TERMINAL",0,0,"520203666",,terminal_output +5705,12921658,"TERMINAL",0,0,"6114777",,terminal_output +5706,12922748,"TERMINAL",0,0,"7225888",,terminal_output +5707,12923764,"TERMINAL",0,0,"8336999",,terminal_output +5708,12924835,"TERMINAL",0,0,"94475020:0020:00",,terminal_output +5709,12925931,"TERMINAL",0,0,"40558111",,terminal_output +5710,12926967,"TERMINAL",0,0,"1669222",,terminal_output +5711,12927983,"TERMINAL",0,0,"27730333",,terminal_output +5712,12929004,"TERMINAL",0,0,"3881444",,terminal_output +5713,12930096,"TERMINAL",0,0,"4992555",,terminal_output +5714,12931148,"TERMINAL",0,0,"530303666",,terminal_output +5715,12932863,"TERMINAL",0,0,"6225888",,terminal_output +5716,12933892,"TERMINAL",0,0,"8336999",,terminal_output +5717,12935044,"TERMINAL",0,0,"94478:001010",,terminal_output +5718,12936069,"TERMINAL",0,0,"50558111",,terminal_output +5719,12937090,"TERMINAL",0,0,"1669222",,terminal_output +5720,12938132,"TERMINAL",0,0,"27740333",,terminal_output +5721,12939141,"TERMINAL",0,0,"3881444",,terminal_output +5722,12940183,"TERMINAL",0,0,"4992555",,terminal_output +5723,12941230,"TERMINAL",0,0,"540403666",,terminal_output +5724,12942320,"TERMINAL",0,0,"6114777",,terminal_output +5725,12943447,"TERMINAL",0,0,"7336999",,terminal_output +5726,12944400,"TERMINAL",0,0,"9447102020",,terminal_output +5727,12945471,"TERMINAL",0,0,"6:00558111",,terminal_output +5728,12946517,"TERMINAL",0,0,"1669222",,terminal_output +5729,12947656,"TERMINAL",0,0,"27750333",,terminal_output +5730,12948622,"TERMINAL",0,0,"3881444",,terminal_output +5731,12949639,"TERMINAL",0,0,"4992555",,terminal_output +5732,12950718,"TERMINAL",0,0,"550503666",,terminal_output +5733,12951743,"TERMINAL",0,0,"6114777",,terminal_output +5734,12952795,"TERMINAL",0,0,"7225888",,terminal_output +5735,12953886,"TERMINAL",0,0,"8336999",,terminal_output +5736,12954889,"TERMINAL",0,0,"9447203030",,terminal_output +5737,12955939,"TERMINAL",0,0,"10558111",,terminal_output +5738,12957017,"TERMINAL",0,0,"1669222",,terminal_output +5739,12958037,"TERMINAL",0,0,"2773:00333",,terminal_output +5740,12959088,"TERMINAL",0,0,"3881444",,terminal_output +5741,12960174,"TERMINAL",0,0,"4992555",,terminal_output +5742,12961261,"TERMINAL",0,0,"52:002:003666",,terminal_output +5743,12962286,"TERMINAL",0,0,"6114777",,terminal_output +5744,12963307,"TERMINAL",0,0,"7225888",,terminal_output +5745,12964378,"TERMINAL",0,0,"8447304040",,terminal_output +5746,12965383,"TERMINAL",0,0,"20558111",,terminal_output +5747,12966429,"TERMINAL",0,0,"1669222",,terminal_output +5748,12967469,"TERMINAL",0,0,"27710333",,terminal_output +5749,12968519,"TERMINAL",0,0,"3881444",,terminal_output +5750,12969657,"TERMINAL",0,0,"4992555",,terminal_output +5751,12970634,"TERMINAL",0,0,"510103666",,terminal_output +5752,12971704,"TERMINAL",0,0,"6114777",,terminal_output +5753,12972750,"TERMINAL",0,0,"7225888",,terminal_output +5754,12973853,"TERMINAL",0,0,"8336999",,terminal_output +5755,12974821,"TERMINAL",0,0,"9447405050",,terminal_output +5756,12975868,"TERMINAL",0,0,"30558111",,terminal_output +5757,12976920,"TERMINAL",0,0,"1669222",,terminal_output +5758,12977966,"TERMINAL",0,0,"27720333",,terminal_output +5759,12979084,"TERMINAL",0,0,"3881444",,terminal_output +5760,12980145,"TERMINAL",0,0,"4992555",,terminal_output +5761,12981130,"TERMINAL",0,0,"520203666",,terminal_output +5762,12982251,"TERMINAL",0,0,"6114777",,terminal_output +5763,12983230,"TERMINAL",0,0,"7225888",,terminal_output +5764,12984300,"TERMINAL",0,0,"8336999",,terminal_output +5765,12985322,"TERMINAL",0,0,"9447501:001:00",,terminal_output +5766,12986339,"TERMINAL",0,0,"40669222",,terminal_output +5767,12987473,"TERMINAL",0,0,"27730333",,terminal_output +5768,12988501,"TERMINAL",0,0,"3881444",,terminal_output +5769,12989479,"TERMINAL",0,0,"4992555",,terminal_output +5770,12990545,"TERMINAL",0,0,"530303666",,terminal_output +5771,12991672,"TERMINAL",0,0,"6114777",,terminal_output +5772,12992697,"TERMINAL",0,0,"7225888",,terminal_output +5773,12993726,"TERMINAL",0,0,"8336999",,terminal_output +5774,12994711,"TERMINAL",0,0,"94479:001010",,terminal_output +5775,12995758,"TERMINAL",0,0,"50558111",,terminal_output +5776,12996810,"TERMINAL",0,0,"1669222",,terminal_output +5777,12997854,"TERMINAL",0,0,"27740333",,terminal_output +5778,12998942,"TERMINAL",0,0,"3881444",,terminal_output +5779,12999968,"TERMINAL",0,0,"4992555",,terminal_output +5780,13000999,"TERMINAL",0,0,"540403666",,terminal_output +5781,13002117,"TERMINAL",0,0,"6114777",,terminal_output +5782,13003141,"TERMINAL",0,0,"7225888",,terminal_output +5783,13004167,"TERMINAL",0,0,"8336999",,terminal_output +5784,13005195,"TERMINAL",0,0,"9447102020",,terminal_output +5785,13006316,"TERMINAL",0,0,"7:00558111",,terminal_output +5786,13007341,"TERMINAL",0,0,"1669222",,terminal_output +5787,13008357,"TERMINAL",0,0,"28851444",,terminal_output +5788,13009400,"TERMINAL",0,0,"4992555",,terminal_output +5789,13010455,"TERMINAL",0,0,"550503666",,terminal_output +5790,13011507,"TERMINAL",0,0,"6114777",,terminal_output +5791,13012552,"TERMINAL",0,0,"7225888",,terminal_output +5792,13013690,"TERMINAL",0,0,"8336999",,terminal_output +5793,13014716,"TERMINAL",0,0,"9447203030",,terminal_output +5794,13015698,"TERMINAL",0,0,"10558111",,terminal_output +5795,13016748,"TERMINAL",0,0,"1669222",,terminal_output +5796,13017795,"TERMINAL",0,0,"2774:00333",,terminal_output +5797,13018840,"TERMINAL",0,0,"3881444",,terminal_output +5798,13019886,"TERMINAL",0,0,"4992555",,terminal_output +5799,13020963,"TERMINAL",0,0,"53:003:003666",,terminal_output +5800,13021982,"TERMINAL",0,0,"6114777",,terminal_output +5801,13023110,"TERMINAL",0,0,"7225888",,terminal_output +5802,13024133,"TERMINAL",0,0,"8336999",,terminal_output +5803,13025168,"TERMINAL",0,0,"9447304040",,terminal_output +5804,13026286,"TERMINAL",0,0,"20558111",,terminal_output +5805,13027244,"TERMINAL",0,0,"1669222",,terminal_output +5806,13028334,"TERMINAL",0,0,"27710333",,terminal_output +5807,13029364,"TERMINAL",0,0,"3992555",,terminal_output +5808,13030483,"TERMINAL",0,0,"510103666",,terminal_output +5809,13031515,"TERMINAL",0,0,"6114777",,terminal_output +5810,13032532,"TERMINAL",0,0,"7225888",,terminal_output +5811,13033558,"TERMINAL",0,0,"8336999",,terminal_output +5812,13034584,"TERMINAL",0,0,"9447405050",,terminal_output +5813,13035707,"TERMINAL",0,0,"30558111",,terminal_output +5814,13036735,"TERMINAL",0,0,"1669222",,terminal_output +5815,13037758,"TERMINAL",0,0,"27720333",,terminal_output +5816,13038794,"TERMINAL",0,0,"3881444",,terminal_output +5817,13039910,"TERMINAL",0,0,"4992555",,terminal_output +5818,13040928,"TERMINAL",0,0,"520203666",,terminal_output +5819,13041947,"TERMINAL",0,0,"6114777",,terminal_output +5820,13042989,"TERMINAL",0,0,"7225888",,terminal_output +5821,13044035,"TERMINAL",0,0,"8336999",,terminal_output +5822,13045125,"TERMINAL",0,0,"9447502:002:00",,terminal_output +5823,13046152,"TERMINAL",0,0,"40558111",,terminal_output +5824,13047277,"TERMINAL",0,0,"1669222",,terminal_output +5825,13048286,"TERMINAL",0,0,"27730333",,terminal_output +5826,13049288,"TERMINAL",0,0,"3881444",,terminal_output +5827,13050350,"TERMINAL",0,0,"430303666",,terminal_output +5828,13051374,"TERMINAL",0,0,"6114777",,terminal_output +5829,13052411,"TERMINAL",0,0,"7225888",,terminal_output +5830,13053524,"TERMINAL",0,0,"8336999",,terminal_output +5831,13054506,"TERMINAL",0,0,"944710:001010",,terminal_output +5832,13055576,"TERMINAL",0,0,"50558111",,terminal_output +5833,13056704,"TERMINAL",0,0,"1669222",,terminal_output +5834,13057725,"TERMINAL",0,0,"27740333",,terminal_output +5835,13058750,"TERMINAL",0,0,"3881444",,terminal_output +5836,13059772,"TERMINAL",0,0,"4992555",,terminal_output +5837,13060830,"TERMINAL",0,0,"540403666",,terminal_output +5838,13061842,"TERMINAL",0,0,"6114777",,terminal_output +5839,13062890,"TERMINAL",0,0,"7225888",,terminal_output +5840,13063968,"TERMINAL",0,0,"8336999",,terminal_output +5841,13064988,"TERMINAL",0,0,"9447102020",,terminal_output +5842,13066118,"TERMINAL",0,0,"8:00558111",,terminal_output +5843,13067139,"TERMINAL",0,0,"1669222",,terminal_output +5844,13068169,"TERMINAL",0,0,"27750333",,terminal_output +5845,13069172,"TERMINAL",0,0,"3881444",,terminal_output +5846,13070226,"TERMINAL",0,0,"4992555",,terminal_output +5847,13071271,"TERMINAL",0,0,"550503666",,terminal_output +5848,13072364,"TERMINAL",0,0,"6225888",,terminal_output +5849,13073369,"TERMINAL",0,0,"8336999",,terminal_output +5850,13074421,"TERMINAL",0,0,"9447203030",,terminal_output +5851,13075537,"TERMINAL",0,0,"10558111",,terminal_output +5852,13076564,"TERMINAL",0,0,"1669222",,terminal_output +5853,13077586,"TERMINAL",0,0,"2775:00333",,terminal_output +5854,13078726,"TERMINAL",0,0,"3881444",,terminal_output +5855,13079737,"TERMINAL",0,0,"4992555",,terminal_output +5856,13080798,"TERMINAL",0,0,"54:004:003666",,terminal_output +5857,13081851,"TERMINAL",0,0,"6114777",,terminal_output +5858,13082911,"TERMINAL",0,0,"7225888",,terminal_output +5859,13084039,"TERMINAL",0,0,"8336999",,terminal_output +5860,13084988,"TERMINAL",0,0,"9447304040",,terminal_output +5861,13086050,"TERMINAL",0,0,"20558111",,terminal_output +5862,13087113,"TERMINAL",0,0,"1669222",,terminal_output +5863,13088134,"TERMINAL",0,0,"27710333",,terminal_output +5864,13089261,"TERMINAL",0,0,"3881444",,terminal_output +5865,13090288,"TERMINAL",0,0,"4992555",,terminal_output +5866,13091309,"TERMINAL",0,0,"510103666",,terminal_output +5867,13092439,"TERMINAL",0,0,"6225888",,terminal_output +5868,13093382,"TERMINAL",0,0,"8336999",,terminal_output +5869,13094437,"TERMINAL",0,0,"9447405050",,terminal_output +5870,13095511,"TERMINAL",0,0,"30558111",,terminal_output +5871,13096635,"TERMINAL",0,0,"1669222",,terminal_output +5872,13097582,"TERMINAL",0,0,"27720333",,terminal_output +5873,13098683,"TERMINAL",0,0,"3881444",,terminal_output +5874,13099707,"TERMINAL",0,0,"4992555",,terminal_output +5875,13100735,"TERMINAL",0,0,"520203666",,terminal_output +5876,13101787,"TERMINAL",0,0,"6114777",,terminal_output +5877,13102836,"TERMINAL",0,0,"7225888",,terminal_output +5878,13103887,"TERMINAL",0,0,"8336999",,terminal_output +5879,13104982,"TERMINAL",0,0,"9447503:003:00",,terminal_output +5880,13106158,"TERMINAL",0,0,"40558111",,terminal_output +5881,13107114,"TERMINAL",0,0,"1669222",,terminal_output +5882,13108209,"TERMINAL",0,0,"27730333",,terminal_output +5883,13109229,"TERMINAL",0,0,"3881444",,terminal_output +5884,13110238,"TERMINAL",0,0,"4992555",,terminal_output +5885,13111285,"TERMINAL",0,0,"530303666",,terminal_output +5886,13112404,"TERMINAL",0,0,"6225888",,terminal_output +5887,13113378,"TERMINAL",0,0,"8336999",,terminal_output +5888,13114427,"TERMINAL",0,0,"94471:001010",,terminal_output +5889,13115581,"TERMINAL",0,0,"50558111",,terminal_output +5890,13116602,"TERMINAL",0,0,"1669222",,terminal_output +5891,13117625,"TERMINAL",0,0,"27740333",,terminal_output +5892,13118650,"TERMINAL",0,0,"3881444",,terminal_output +5893,13119777,"TERMINAL",0,0,"4992555",,terminal_output +5894,13120726,"TERMINAL",0,0,"540403666",,terminal_output +5895,13121781,"TERMINAL",0,0,"6114777",,terminal_output +5896,13122823,"TERMINAL",0,0,"7225888",,terminal_output +5897,13123873,"TERMINAL",0,0,"8336999",,terminal_output +5898,13124920,"TERMINAL",0,0,"9447102020",,terminal_output +5899,13126022,"TERMINAL",0,0,"9:00558111",,terminal_output +5900,13127000,"TERMINAL",0,0,"1669222",,terminal_output +5901,13128073,"TERMINAL",0,0,"27750333",,terminal_output +5902,13129104,"TERMINAL",0,0,"3881444",,terminal_output +5903,13130160,"TERMINAL",0,0,"4992555",,terminal_output +5904,13131245,"TERMINAL",0,0,"550503666",,terminal_output +5905,13132271,"TERMINAL",0,0,"6114777",,terminal_output +5906,13133400,"TERMINAL",0,0,"7225888",,terminal_output +5907,13134368,"TERMINAL",0,0,"8447203030",,terminal_output +5908,13135443,"TERMINAL",0,0,"10558111",,terminal_output +5909,13136472,"TERMINAL",0,0,"1669222",,terminal_output +5910,13137599,"TERMINAL",0,0,"2776:00333",,terminal_output +5911,13138618,"TERMINAL",0,0,"3881444",,terminal_output +5912,13139587,"TERMINAL",0,0,"4992555",,terminal_output +5913,13140667,"TERMINAL",0,0,"55:005:003666",,terminal_output +5914,13141713,"TERMINAL",0,0,"6114777",,terminal_output +5915,13142726,"TERMINAL",0,0,"7225888",,terminal_output +5916,13143772,"TERMINAL",0,0,"8336999",,terminal_output +5917,13144818,"TERMINAL",0,0,"9447304040",,terminal_output +5918,13145868,"TERMINAL",0,0,"20558111",,terminal_output +5919,13147020,"TERMINAL",0,0,"1669222",,terminal_output +5920,13147968,"TERMINAL",0,0,"27710333",,terminal_output +5921,13149063,"TERMINAL",0,0,"3881444",,terminal_output +5922,13150064,"TERMINAL",0,0,"4992555",,terminal_output +5923,13151212,"TERMINAL",0,0,"510103666",,terminal_output +5924,13152237,"TERMINAL",0,0,"6114777",,terminal_output +5925,13153263,"TERMINAL",0,0,"7225888",,terminal_output +5926,13154261,"TERMINAL",0,0,"8336999",,terminal_output +5927,13155310,"TERMINAL",0,0,"9447405050",,terminal_output +5928,13156440,"TERMINAL",0,0,"30669222",,terminal_output +5929,13157462,"TERMINAL",0,0,"27720333",,terminal_output +5930,13158488,"TERMINAL",0,0,"3881444",,terminal_output +5931,13159497,"TERMINAL",0,0,"4992555",,terminal_output +5932,13160543,"TERMINAL",0,0,"520203666",,terminal_output +5933,13161660,"TERMINAL",0,0,"6114777",,terminal_output +5934,13162645,"TERMINAL",0,0,"7225888",,terminal_output +5935,13163723,"TERMINAL",0,0,"8336999",,terminal_output +5936,13164743,"TERMINAL",0,0,"9447504:004:00",,terminal_output +5937,13165792,"TERMINAL",0,0,"40558111",,terminal_output +5938,13166838,"TERMINAL",0,0,"1669222",,terminal_output +5939,13167905,"TERMINAL",0,0,"27730333",,terminal_output +5940,13168936,"TERMINAL",0,0,"3881444",,terminal_output +5941,13170055,"TERMINAL",0,0,"4992555",,terminal_output +5942,13171082,"TERMINAL",0,0,"530303666",,terminal_output +5943,13172082,"TERMINAL",0,0,"6114777",,terminal_output +5944,13173229,"TERMINAL",0,0,"7225888",,terminal_output +5945,13174254,"TERMINAL",0,0,"8336999",,terminal_output +5946,13175218,"TERMINAL",0,0,"94472:001010",,terminal_output +5947,13176303,"TERMINAL",0,0,"50558111",,terminal_output +5948,13177315,"TERMINAL",0,0,"17740333",,terminal_output +5949,13178362,"TERMINAL",0,0,"3881444",,terminal_output +5950,13179415,"TERMINAL",0,0,"4992555",,terminal_output +5951,13180506,"TERMINAL",0,0,"540403666",,terminal_output +5952,13181511,"TERMINAL",0,0,"6114777",,terminal_output +5953,13182651,"TERMINAL",0,0,"7225888",,terminal_output +5954,13183676,"TERMINAL",0,0,"8336999",,terminal_output +5955,13184657,"TERMINAL",0,0,"9447102020",,terminal_output +5956,13185719,"TERMINAL",0,0,"40:00558111",,terminal_output +5957,13186754,"TERMINAL",0,0,"1669222",,terminal_output +5958,13187803,"TERMINAL",0,0,"27750333",,terminal_output +5959,13188857,"TERMINAL",0,0,"3881444",,terminal_output +5960,13189919,"TERMINAL",0,0,"4992555",,terminal_output +5961,13190887,"TERMINAL",0,0,"watch",,terminal_focus +5962,13190995,"TERMINAL",0,0,"550503666",,terminal_output +5963,13191126,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 17:40:05 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 17:05:50\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 17:05:50\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 3:26:53\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 4:12:16\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]3373409 accelerat wrap tum_cte0 R24:26\t 1 hkn07233373410 accelerat wrap tum_cte0 R24:26\t 1 hkn0723",,terminal_output +5964,13192277,"TERMINAL",0,0,"6114777",,terminal_output +5965,13193301,"TERMINAL",0,0,"7225888",,terminal_output +5966,13194366,"TERMINAL",0,0,"8336999",,terminal_output +5967,13195326,"TERMINAL",0,0,"9558213131",,terminal_output +5968,13196408,"TERMINAL",0,0,"11669222",,terminal_output +5969,13197500,"TERMINAL",0,0,"2777:00333",,terminal_output +5970,13198477,"TERMINAL",0,0,"3881444",,terminal_output +5971,13199526,"TERMINAL",0,0,"4992555",,terminal_output +5972,13200571,"TERMINAL",0,0,"56:006:003666",,terminal_output +5973,13201712,"TERMINAL",0,0,"6114777",,terminal_output +5974,13202666,"TERMINAL",0,0,"7225888",,terminal_output +5975,13203748,"TERMINAL",0,0,"8336999",,terminal_output +5976,13204774,"TERMINAL",0,0,"9447304040",,terminal_output +5977,13205799,"TERMINAL",0,0,"20558111",,terminal_output +5978,13206846,"TERMINAL",0,0,"1669222",,terminal_output +5979,13207890,"TERMINAL",0,0,"27710333",,terminal_output +5980,13208967,"TERMINAL",0,0,"3881444",,terminal_output +5981,13210094,"TERMINAL",0,0,"4992555",,terminal_output +5982,13211121,"TERMINAL",0,0,"510103666",,terminal_output +5983,13212087,"TERMINAL",0,0,"6114777",,terminal_output +5984,13213166,"TERMINAL",0,0,"7225888",,terminal_output +5985,13214297,"TERMINAL",0,0,"8336999",,terminal_output +5986,13215238,"TERMINAL",0,0,"9447405050",,terminal_output +5987,13216364,"TERMINAL",0,0,"30558111",,terminal_output +5988,13217371,"TERMINAL",0,0,"17720333",,terminal_output +5989,13218389,"TERMINAL",0,0,"3881444",,terminal_output +5990,13219438,"TERMINAL",0,0,"4992555",,terminal_output +5991,13220539,"TERMINAL",0,0,"520203666",,terminal_output +5992,13221523,"TERMINAL",0,0,"6114777",,terminal_output +5993,13222572,"TERMINAL",0,0,"7225888",,terminal_output +5994,13223618,"TERMINAL",0,0,"8336999",,terminal_output +5995,13224675,"TERMINAL",0,0,"9447505:005:00",,terminal_output +5996,13225772,"TERMINAL",0,0,"40558111",,terminal_output +5997,13226783,"TERMINAL",0,0,"1669222",,terminal_output +5998,13227816,"TERMINAL",0,0,"27730333",,terminal_output +5999,13228926,"TERMINAL",0,0,"3881444",,terminal_output +6000,13230104,"TERMINAL",0,0,"4992555",,terminal_output +6001,13230972,"TERMINAL",0,0,"530303666",,terminal_output +6002,13232024,"TERMINAL",0,0,"6114777",,terminal_output +6003,13233136,"TERMINAL",0,0,"7225888",,terminal_output +6004,13234159,"TERMINAL",0,0,"8336999",,terminal_output +6005,13235173,"TERMINAL",0,0,"94473:001010",,terminal_output +6006,13236309,"TERMINAL",0,0,"50558111",,terminal_output +6007,13237265,"TERMINAL",0,0,"1669222",,terminal_output +6008,13238360,"TERMINAL",0,0,"28841444",,terminal_output +6009,13239367,"TERMINAL",0,0,"4992555",,terminal_output +6010,13240507,"TERMINAL",0,0,"540403666",,terminal_output +6011,13241464,"TERMINAL",0,0,"6114777",,terminal_output +6012,13242557,"TERMINAL",0,0,"7225888",,terminal_output +6013,13243581,"TERMINAL",0,0,"8336999",,terminal_output +6014,13244608,"TERMINAL",0,0,"9447102020",,terminal_output +6015,13245657,"TERMINAL",0,0,"1:00558111",,terminal_output +6016,13246754,"TERMINAL",0,0,"1669222",,terminal_output +6017,13247781,"TERMINAL",0,0,"27750333",,terminal_output +6018,13248804,"TERMINAL",0,0,"3881444",,terminal_output +6019,13249846,"TERMINAL",0,0,"4992555",,terminal_output +6020,13250899,"TERMINAL",0,0,"550503666",,terminal_output +6021,13251947,"TERMINAL",0,0,"6114777",,terminal_output +6022,13253002,"TERMINAL",0,0,"7225888",,terminal_output +6023,13254129,"TERMINAL",0,0,"8336999",,terminal_output +6024,13255194,"TERMINAL",0,0,"9447203030",,terminal_output +6025,13256134,"TERMINAL",0,0,"10558111",,terminal_output +6026,13257206,"TERMINAL",0,0,"1669222",,terminal_output +6027,13258327,"TERMINAL",0,0,"2778:00333",,terminal_output +6028,13259370,"TERMINAL",0,0,"3881444",,terminal_output +6029,13260337,"TERMINAL",0,0,"47:007:003666",,terminal_output +6030,13261400,"TERMINAL",0,0,"6114777",,terminal_output +6031,13262522,"TERMINAL",0,0,"7225888",,terminal_output +6032,13263481,"TERMINAL",0,0,"8336999",,terminal_output +6033,13264572,"TERMINAL",0,0,"9447304040",,terminal_output +6034,13265597,"TERMINAL",0,0,"20558111",,terminal_output +6035,13266720,"TERMINAL",0,0,"1669222",,terminal_output +6036,13267746,"TERMINAL",0,0,"27710333",,terminal_output +6037,13268738,"TERMINAL",0,0,"3881444",,terminal_output +6038,13269794,"TERMINAL",0,0,"4992555",,terminal_output +6039,13270830,"TERMINAL",0,0,"510103666",,terminal_output +6040,13271885,"TERMINAL",0,0,"6114777",,terminal_output +6041,13272923,"TERMINAL",0,0,"7225888",,terminal_output +6042,13273993,"TERMINAL",0,0,"8336999",,terminal_output +6043,13275024,"TERMINAL",0,0,"9447405050",,terminal_output +6044,13276073,"TERMINAL",0,0,"30558111",,terminal_output +6045,13277167,"TERMINAL",0,0,"1669222",,terminal_output +6046,13278192,"TERMINAL",0,0,"27720333",,terminal_output +6047,13279318,"TERMINAL",0,0,"3881444",,terminal_output +6048,13280258,"TERMINAL",0,0,"4992555",,terminal_output +6049,13281299,"TERMINAL",0,0,"520203666",,terminal_output +6050,13282390,"TERMINAL",0,0,"6225888",,terminal_output +6051,13283420,"TERMINAL",0,0,"8336999",,terminal_output +6052,13284439,"TERMINAL",0,0,"9447506:006:00",,terminal_output +6053,13285562,"TERMINAL",0,0,"40558111",,terminal_output +6054,13286587,"TERMINAL",0,0,"1669222",,terminal_output +6055,13287612,"TERMINAL",0,0,"27730333",,terminal_output +6056,13288630,"TERMINAL",0,0,"3881444",,terminal_output +6057,13289760,"TERMINAL",0,0,"4992555",,terminal_output +6058,13290783,"TERMINAL",0,0,"530303666",,terminal_output +6059,13291788,"TERMINAL",0,0,"6114777",,terminal_output +6060,13292836,"TERMINAL",0,0,"7225888",,terminal_output +6061,13294238,"TERMINAL",0,0,"8336999",,terminal_output +6062,13295394,"TERMINAL",0,0,"94474:001010",,terminal_output +6063,13296425,"TERMINAL",0,0,"50669222",,terminal_output +6064,13297393,"TERMINAL",0,0,"27740333",,terminal_output +6065,13298440,"TERMINAL",0,0,"3881444",,terminal_output +6066,13299480,"TERMINAL",0,0,"4992555",,terminal_output +6067,13300531,"TERMINAL",0,0,"540403666",,terminal_output +6068,13301579,"TERMINAL",0,0,"6114777",,terminal_output +6069,13302662,"TERMINAL",0,0,"7225888",,terminal_output +6070,13303690,"TERMINAL",0,0,"8336999",,terminal_output +6071,13304817,"TERMINAL",0,0,"9447102020",,terminal_output +6072,13305840,"TERMINAL",0,0,"2:00558111",,terminal_output +6073,13306816,"TERMINAL",0,0,"1669222",,terminal_output +6074,13307859,"TERMINAL",0,0,"27750333",,terminal_output +6075,13308912,"TERMINAL",0,0,"3881444",,terminal_output +6076,13309953,"TERMINAL",0,0,"4992555",,terminal_output +6077,13311059,"TERMINAL",0,0,"550503666",,terminal_output +6078,13312087,"TERMINAL",0,0,"6114777",,terminal_output +6079,13313210,"TERMINAL",0,0,"7225888",,terminal_output +6080,13314237,"TERMINAL",0,0,"8336999",,terminal_output +6081,13315200,"TERMINAL",0,0,"9447203030",,terminal_output +6082,13316284,"TERMINAL",0,0,"10558111",,terminal_output +6083,13317413,"TERMINAL",0,0,"1669222",,terminal_output +6084,13318357,"TERMINAL",0,0,"2889:01444",,terminal_output +6085,13319407,"TERMINAL",0,0,"4992555",,terminal_output +6086,13320482,"TERMINAL",0,0,"58:008:003666",,terminal_output +6087,13321488,"TERMINAL",0,0,"6114777",,terminal_output +6088,13322584,"TERMINAL",0,0,"7225888",,terminal_output +6089,13323581,"TERMINAL",0,0,"8336999",,terminal_output +6090,13324639,"TERMINAL",0,0,"9447304040",,terminal_output +6091,13325713,"TERMINAL",0,0,"20558111",,terminal_output +6092,13326832,"TERMINAL",0,0,"1669222",,terminal_output +6093,13327789,"TERMINAL",0,0,"27710333",,terminal_output +6094,13328844,"TERMINAL",0,0,"3881444",,terminal_output +6095,13329892,"TERMINAL",0,0,"4992555",,terminal_output +6096,13330941,"TERMINAL",0,0,"510103666",,terminal_output +6097,13331988,"TERMINAL",0,0,"6114777",,terminal_output +6098,13333039,"TERMINAL",0,0,"7225888",,terminal_output +6099,13334101,"TERMINAL",0,0,"8336999",,terminal_output +6100,13335165,"TERMINAL",0,0,"9447405050",,terminal_output +6101,13336185,"TERMINAL",0,0,"30558111",,terminal_output +6102,13337228,"TERMINAL",0,0,"1669222",,terminal_output +6103,13338304,"TERMINAL",0,0,"27720333",,terminal_output +6104,13339366,"TERMINAL",0,0,"3992555",,terminal_output +6105,13340371,"TERMINAL",0,0,"520203666",,terminal_output +6106,13341479,"TERMINAL",0,0,"6114777",,terminal_output +6107,13342462,"TERMINAL",0,0,"7225888",,terminal_output +6108,13343527,"TERMINAL",0,0,"8336999",,terminal_output +6109,13344559,"TERMINAL",0,0,"9447507:007:00",,terminal_output +6110,13345675,"TERMINAL",0,0,"40558111",,terminal_output +6111,13346648,"TERMINAL",0,0,"1669222",,terminal_output +6112,13347690,"TERMINAL",0,0,"27730333",,terminal_output +6113,13348739,"TERMINAL",0,0,"3881444",,terminal_output +6114,13349789,"TERMINAL",0,0,"4992555",,terminal_output +6115,13350837,"TERMINAL",0,0,"530303666",,terminal_output +6116,13351878,"TERMINAL",0,0,"6114777",,terminal_output +6117,13352923,"TERMINAL",0,0,"7225888",,terminal_output +6118,13353981,"TERMINAL",0,0,"8336999",,terminal_output +6119,13355019,"TERMINAL",0,0,"94475:001010",,terminal_output +6120,13356065,"TERMINAL",0,0,"50558111",,terminal_output +6121,13357141,"TERMINAL",0,0,"1669222",,terminal_output +6122,13358154,"TERMINAL",0,0,"27740333",,terminal_output +6123,13359291,"TERMINAL",0,0,"3881444",,terminal_output +6124,13360254,"TERMINAL",0,0,"4992555",,terminal_output +6125,13361295,"TERMINAL",0,0,"540403666",,terminal_output +6126,13362362,"TERMINAL",0,0,"6225888",,terminal_output +6127,13363490,"TERMINAL",0,0,"8336999",,terminal_output +6128,13364437,"TERMINAL",0,0,"9447102020",,terminal_output +6129,13365492,"TERMINAL",0,0,"3:00558111",,terminal_output +6130,13366548,"TERMINAL",0,0,"1669222",,terminal_output +6131,13367587,"TERMINAL",0,0,"27750333",,terminal_output +6132,13368712,"TERMINAL",0,0,"3881444",,terminal_output +6133,13369738,"TERMINAL",0,0,"4992555",,terminal_output +6134,13370776,"TERMINAL",0,0,"550503666",,terminal_output +6135,13371784,"TERMINAL",0,0,"6114777",,terminal_output +6136,13372818,"TERMINAL",0,0,"7225888",,terminal_output +6137,13373863,"TERMINAL",0,0,"8336999",,terminal_output +6138,13374962,"TERMINAL",0,0,"9447203030",,terminal_output +6139,13375960,"TERMINAL",0,0,"10558111",,terminal_output +6140,13377021,"TERMINAL",0,0,"1669222",,terminal_output +6141,13378134,"TERMINAL",0,0,"27730:00333",,terminal_output +6142,13379158,"TERMINAL",0,0,"3881444",,terminal_output +6143,13380183,"TERMINAL",0,0,"4992555",,terminal_output +6144,13381207,"TERMINAL",0,0,"59:009:003666",,terminal_output +6145,13382333,"TERMINAL",0,0,"6114777",,terminal_output +6146,13383356,"TERMINAL",0,0,"7225888",,terminal_output +6147,13384370,"TERMINAL",0,0,"8447304040",,terminal_output +6148,13385379,"TERMINAL",0,0,"20558111",,terminal_output +6149,13386430,"TERMINAL",0,0,"1669222",,terminal_output +6150,13387481,"TERMINAL",0,0,"27710333",,terminal_output +6151,13388524,"TERMINAL",0,0,"3881444",,terminal_output +6152,13389609,"TERMINAL",0,0,"4992555",,terminal_output +6153,13390640,"TERMINAL",0,0,"510103666",,terminal_output +6154,13391753,"TERMINAL",0,0,"6114777",,terminal_output +6155,13392791,"TERMINAL",0,0,"7225888",,terminal_output +6156,13393801,"TERMINAL",0,0,"8336999",,terminal_output +6157,13394847,"TERMINAL",0,0,"9447405050",,terminal_output +6158,13395862,"TERMINAL",0,0,"30558111",,terminal_output +6159,13396901,"TERMINAL",0,0,"1669222",,terminal_output +6160,13397946,"TERMINAL",0,0,"27720333",,terminal_output +6161,13399034,"TERMINAL",0,0,"3881444",,terminal_output +6162,13400036,"TERMINAL",0,0,"4992555",,terminal_output +6163,13401089,"TERMINAL",0,0,"520203666",,terminal_output +6164,13402135,"TERMINAL",0,0,"6114777",,terminal_output +6165,13403228,"TERMINAL",0,0,"7225888",,terminal_output +6166,13404229,"TERMINAL",0,0,"8336999",,terminal_output +6167,13405276,"TERMINAL",0,0,"9447508:008:00",,terminal_output +6168,13406318,"TERMINAL",0,0,"40669222",,terminal_output +6169,13407370,"TERMINAL",0,0,"27730333",,terminal_output +6170,13408414,"TERMINAL",0,0,"3881444",,terminal_output +6171,13409461,"TERMINAL",0,0,"4992555",,terminal_output +6172,13410510,"TERMINAL",0,0,"530303666",,terminal_output +6173,13411552,"TERMINAL",0,0,"6114777",,terminal_output +6174,13412595,"TERMINAL",0,0,"7225888",,terminal_output +6175,13413670,"TERMINAL",0,0,"8336999",,terminal_output +6176,13414833,"TERMINAL",0,0,"972884476:001010",,terminal_output +6177,13415856,"TERMINAL",0,0,"50558111",,terminal_output +6178,13416893,"TERMINAL",0,0,"1669222",,terminal_output +6179,13417945,"TERMINAL",0,0,"27740333",,terminal_output +6180,13418987,"TERMINAL",0,0,"3881444",,terminal_output +6181,13420034,"TERMINAL",0,0,"4992555",,terminal_output +6182,13421082,"TERMINAL",0,0,"540403666",,terminal_output +6183,13422132,"TERMINAL",0,0,"6114777",,terminal_output +6184,13423178,"TERMINAL",0,0,"7225888",,terminal_output +6185,13424317,"TERMINAL",0,0,"8336999",,terminal_output +6186,13425341,"TERMINAL",0,0,"9447102020",,terminal_output +6187,13426366,"TERMINAL",0,0,"4:00669222",,terminal_output +6188,13427391,"TERMINAL",0,0,"27750333",,terminal_output +6189,13428431,"TERMINAL",0,0,"3881444",,terminal_output +6190,13429479,"TERMINAL",0,0,"4992555",,terminal_output +6191,13430565,"TERMINAL",0,0,"550503666",,terminal_output +6192,13431573,"TERMINAL",0,0,"6114777",,terminal_output +6193,13432619,"TERMINAL",0,0,"7225888",,terminal_output +6194,13433665,"TERMINAL",0,0,"8336999",,terminal_output +6195,13434711,"TERMINAL",0,0,"9447203030",,terminal_output +6196,13435800,"TERMINAL",0,0,"10558111",,terminal_output +6197,13436816,"TERMINAL",0,0,"1669222",,terminal_output +6198,13437871,"TERMINAL",0,0,"2771:00333",,terminal_output +6199,13438915,"TERMINAL",0,0,"3881444",,terminal_output +6200,13439963,"TERMINAL",0,0,"4992555",,terminal_output +6201,13441016,"TERMINAL",0,0,"510:0010:003666",,terminal_output +6202,13442058,"TERMINAL",0,0,"6114777",,terminal_output +6203,13443166,"TERMINAL",0,0,"7225888",,terminal_output +6204,13444221,"TERMINAL",0,0,"8336999",,terminal_output +6205,13445204,"TERMINAL",0,0,"9447304040",,terminal_output +6206,13446272,"TERMINAL",0,0,"20558111",,terminal_output +6207,13447298,"TERMINAL",0,0,"1669222",,terminal_output +6208,13448346,"TERMINAL",0,0,"28811444",,terminal_output +6209,13449398,"TERMINAL",0,0,"4992555",,terminal_output +6210,13450444,"TERMINAL",0,0,"510103666",,terminal_output +6211,13451490,"TERMINAL",0,0,"6114777",,terminal_output +6212,13452535,"TERMINAL",0,0,"7225888",,terminal_output +6213,13453578,"TERMINAL",0,0,"8336999",,terminal_output +6214,13454730,"TERMINAL",0,0,"9447405050",,terminal_output +6215,13455667,"TERMINAL",0,0,"30558111",,terminal_output +6216,13456775,"TERMINAL",0,0,"1669222",,terminal_output +6217,13457819,"TERMINAL",0,0,"27720333",,terminal_output +6218,13458826,"TERMINAL",0,0,"3881444",,terminal_output +6219,13459841,"TERMINAL",0,0,"4992555",,terminal_output +6220,13460884,"TERMINAL",0,0,"520203666",,terminal_output +6221,13461933,"TERMINAL",0,0,"6114777",,terminal_output +6222,13462975,"TERMINAL",0,0,"7225888",,terminal_output +6223,13464004,"TERMINAL",0,0,"8336999",,terminal_output +6224,13465045,"TERMINAL",0,0,"9447509:009:00",,terminal_output +6225,13466082,"TERMINAL",0,0,"40558111",,terminal_output +6226,13467135,"TERMINAL",0,0,"1669222",,terminal_output +6227,13468250,"TERMINAL",0,0,"27730333",,terminal_output +6228,13469279,"TERMINAL",0,0,"3881444",,terminal_output +6229,13470280,"TERMINAL",0,0,"4992555",,terminal_output +6230,13471422,"TERMINAL",0,0,"531314777",,terminal_output +6231,13472449,"TERMINAL",0,0,"7225888",,terminal_output +6232,13473472,"TERMINAL",0,0,"8336999",,terminal_output +6233,13474467,"TERMINAL",0,0,"94477:001010",,terminal_output +6234,13475508,"TERMINAL",0,0,"50558111",,terminal_output +6235,13476555,"TERMINAL",0,0,"1669222",,terminal_output +6236,13477598,"TERMINAL",0,0,"27740333",,terminal_output +6237,13478693,"TERMINAL",0,0,"3881444",,terminal_output +6238,13479698,"TERMINAL",0,0,"4992555",,terminal_output +6239,13480742,"TERMINAL",0,0,"540403666",,terminal_output +6240,13481817,"TERMINAL",0,0,"6114777",,terminal_output +6241,13482840,"TERMINAL",0,0,"7225888",,terminal_output +6242,13483918,"TERMINAL",0,0,"8336999",,terminal_output +6243,13484960,"TERMINAL",0,0,"9447102020",,terminal_output +6244,13485946,"TERMINAL",0,0,"5:00558111",,terminal_output +6245,13486991,"TERMINAL",0,0,"1669222",,terminal_output +6246,13488112,"TERMINAL",0,0,"27750333",,terminal_output +6247,13489091,"TERMINAL",0,0,"3881444",,terminal_output +6248,13490774,"TERMINAL",0,0,"450503666",,terminal_output +6249,13491713,"TERMINAL",0,0,"6114777",,terminal_output +6250,13492774,"TERMINAL",0,0,"7225888",,terminal_output +6251,13493801,"TERMINAL",0,0,"8336999",,terminal_output +6252,13494853,"TERMINAL",0,0,"9447203030",,terminal_output +6253,13495885,"TERMINAL",0,0,"10558111",,terminal_output +6254,13496929,"TERMINAL",0,0,"1669222",,terminal_output +6255,13497971,"TERMINAL",0,0,"2772:00333",,terminal_output +6256,13499060,"TERMINAL",0,0,"3881444",,terminal_output +6257,13500097,"TERMINAL",0,0,"4992555",,terminal_output +6258,13501093,"TERMINAL",0,0,"51:001:003666",,terminal_output +6259,13502142,"TERMINAL",0,0,"6114777",,terminal_output +6260,13503171,"TERMINAL",0,0,"7225888",,terminal_output +6261,13504221,"TERMINAL",0,0,"8336999",,terminal_output +6262,13505299,"TERMINAL",0,0,"9447304040",,terminal_output +6263,13506340,"TERMINAL",0,0,"20669222",,terminal_output +6264,13507378,"TERMINAL",0,0,"27710333",,terminal_output +6265,13508419,"TERMINAL",0,0,"3881444",,terminal_output +6266,13509468,"TERMINAL",0,0,"4992555",,terminal_output +6267,13510508,"TERMINAL",0,0,"510103666",,terminal_output +6268,13511551,"TERMINAL",0,0,"6114777",,terminal_output +6269,13512688,"TERMINAL",0,0,"7225888",,terminal_output +6270,13513629,"TERMINAL",0,0,"8336999",,terminal_output +6271,13514672,"TERMINAL",0,0,"9447405050",,terminal_output +6272,13515712,"TERMINAL",0,0,"30558111",,terminal_output +6273,13516786,"TERMINAL",0,0,"1669222",,terminal_output +6274,13517810,"TERMINAL",0,0,"27720333",,terminal_output +6275,13518934,"TERMINAL",0,0,"3881444",,terminal_output +6276,13519885,"TERMINAL",0,0,"4992555",,terminal_output +6277,13520933,"TERMINAL",0,0,"520203666",,terminal_output +6278,13521977,"TERMINAL",0,0,"6114777",,terminal_output +6279,13523029,"TERMINAL",0,0,"7225888",,terminal_output +6280,13524077,"TERMINAL",0,0,"8336999",,terminal_output +6281,13525169,"TERMINAL",0,0,"94475030:0030:00",,terminal_output +6282,13526165,"TERMINAL",0,0,"40558111",,terminal_output +6283,13527205,"TERMINAL",0,0,"1669222",,terminal_output +6284,13528245,"TERMINAL",0,0,"27730333",,terminal_output +6285,13529286,"TERMINAL",0,0,"3881444",,terminal_output +6286,13530381,"TERMINAL",0,0,"430303666",,terminal_output +6287,13531411,"TERMINAL",0,0,"6114777",,terminal_output +6288,13532462,"TERMINAL",0,0,"7225888",,terminal_output +6289,13533478,"TERMINAL",0,0,"8336999",,terminal_output +6290,13534522,"TERMINAL",0,0,"94478:001010",,terminal_output +6291,13535566,"TERMINAL",0,0,"50558111",,terminal_output +6292,13536615,"TERMINAL",0,0,"1669222",,terminal_output +6293,13537659,"TERMINAL",0,0,"27740333",,terminal_output +6294,13538692,"TERMINAL",0,0,"3881444",,terminal_output +6295,13539738,"TERMINAL",0,0,"4992555",,terminal_output +6296,13540775,"TERMINAL",0,0,"540403666",,terminal_output +6297,13541812,"TERMINAL",0,0,"6114777",,terminal_output +6298,13542856,"TERMINAL",0,0,"7225888",,terminal_output +6299,13543896,"TERMINAL",0,0,"8336999",,terminal_output +6300,13544935,"TERMINAL",0,0,"9447102020",,terminal_output +6301,13545979,"TERMINAL",0,0,"6:00558111",,terminal_output +6302,13547019,"TERMINAL",0,0,"1669222",,terminal_output +6303,13548119,"TERMINAL",0,0,"27750333",,terminal_output +6304,13549108,"TERMINAL",0,0,"3881444",,terminal_output +6305,13550150,"TERMINAL",0,0,"4992555",,terminal_output +6306,13551294,"TERMINAL",0,0,"550503666",,terminal_output +6307,13552231,"TERMINAL",0,0,"6114777",,terminal_output +6308,13553283,"TERMINAL",0,0,"7225888",,terminal_output +6309,13554366,"TERMINAL",0,0,"8447203030",,terminal_output +6310,13555391,"TERMINAL",0,0,"10558111",,terminal_output +6311,13556410,"TERMINAL",0,0,"1669222",,terminal_output +6312,13557542,"TERMINAL",0,0,"2773:00333",,terminal_output +6313,13558491,"TERMINAL",0,0,"3881444",,terminal_output +6314,13559588,"TERMINAL",0,0,"4992555",,terminal_output +6315,13560589,"TERMINAL",0,0,"52:002:003666",,terminal_output +6316,13561631,"TERMINAL",0,0,"6114777",,terminal_output +6317,13562762,"TERMINAL",0,0,"7225888",,terminal_output +6318,13563719,"TERMINAL",0,0,"8336999",,terminal_output +6319,13564762,"TERMINAL",0,0,"9447304040",,terminal_output +6320,13565802,"TERMINAL",0,0,"20558111",,terminal_output +6321,13566866,"TERMINAL",0,0,"1669222",,terminal_output +6322,13567884,"TERMINAL",0,0,"27710333",,terminal_output +6323,13568930,"TERMINAL",0,0,"3881444",,terminal_output +6324,13569967,"TERMINAL",0,0,"4992555",,terminal_output +6325,13571011,"TERMINAL",0,0,"510103666",,terminal_output +6326,13572049,"TERMINAL",0,0,"6114777",,terminal_output +6327,13573103,"TERMINAL",0,0,"7225888",,terminal_output +6328,13574238,"TERMINAL",0,0,"8336999",,terminal_output +6329,13575206,"TERMINAL",0,0,"9447405050",,terminal_output +6330,13576284,"TERMINAL",0,0,"30558111",,terminal_output +6331,13577305,"TERMINAL",0,0,"1669222",,terminal_output +6332,13578435,"TERMINAL",0,0,"28821444",,terminal_output +6333,13579380,"TERMINAL",0,0,"4992555",,terminal_output +6334,13580480,"TERMINAL",0,0,"520203666",,terminal_output +6335,13581495,"TERMINAL",0,0,"6114777",,terminal_output +6336,13582522,"TERMINAL",0,0,"7225888",,terminal_output +6337,13583569,"TERMINAL",0,0,"8336999",,terminal_output +6338,13584622,"TERMINAL",0,0,"9447501:001:00",,terminal_output +6339,13585669,"TERMINAL",0,0,"40558111",,terminal_output +6340,13586724,"TERMINAL",0,0,"1669222",,terminal_output +6341,13587863,"TERMINAL",0,0,"27730333",,terminal_output +6342,13588810,"TERMINAL",0,0,"3881444",,terminal_output +6343,13589880,"TERMINAL",0,0,"4992555",,terminal_output +6344,13590929,"TERMINAL",0,0,"530303666",,terminal_output +6345,13591950,"TERMINAL",0,0,"6114777",,terminal_output +6346,13593000,"TERMINAL",0,0,"7225888",,terminal_output +6347,13594045,"TERMINAL",0,0,"8336999",,terminal_output +6348,13595086,"TERMINAL",0,0,"94479:001010",,terminal_output +6349,13596144,"TERMINAL",0,0,"50558111",,terminal_output +6350,13597185,"TERMINAL",0,0,"1669222",,terminal_output +6351,13598235,"TERMINAL",0,0,"27740333",,terminal_output +6352,13599320,"TERMINAL",0,0,"3881444",,terminal_output +6353,13600333,"TERMINAL",0,0,"440403666",,terminal_output +6354,13601385,"TERMINAL",0,0,"6114777",,terminal_output +6355,13602432,"TERMINAL",0,0,"7225888",,terminal_output +6356,13603519,"TERMINAL",0,0,"8336999",,terminal_output +6357,13604544,"TERMINAL",0,0,"9447102020",,terminal_output +6358,13605566,"TERMINAL",0,0,"7:00558111",,terminal_output +6359,13606693,"TERMINAL",0,0,"1669222",,terminal_output +6360,13607657,"TERMINAL",0,0,"27750333",,terminal_output +6361,13608709,"TERMINAL",0,0,"3881444",,terminal_output +6362,13609763,"TERMINAL",0,0,"4992555",,terminal_output +6363,13610794,"TERMINAL",0,0,"550503666",,terminal_output +6364,13611869,"TERMINAL",0,0,"6114777",,terminal_output +6365,13612892,"TERMINAL",0,0,"7225888",,terminal_output +6366,13613949,"TERMINAL",0,0,"8336999",,terminal_output +6367,13614990,"TERMINAL",0,0,"9447203030",,terminal_output +6368,13616036,"TERMINAL",0,0,"10558111",,terminal_output +6369,13617097,"TERMINAL",0,0,"1669222",,terminal_output +6370,13618139,"TERMINAL",0,0,"2774:00333",,terminal_output +6371,13619188,"TERMINAL",0,0,"3881444",,terminal_output +6372,13620238,"TERMINAL",0,0,"4992555",,terminal_output +6373,13621289,"TERMINAL",0,0,"53:003:003666",,terminal_output +6374,13622335,"TERMINAL",0,0,"6225888",,terminal_output +6375,13623401,"TERMINAL",0,0,"8336999",,terminal_output +6376,13624436,"TERMINAL",0,0,"9447304040",,terminal_output +6377,13625483,"TERMINAL",0,0,"20558111",,terminal_output +6378,13626524,"TERMINAL",0,0,"1669222",,terminal_output +6379,13627560,"TERMINAL",0,0,"27710333",,terminal_output +6380,13628608,"TERMINAL",0,0,"3881444",,terminal_output +6381,13629657,"TERMINAL",0,0,"4992555",,terminal_output +6382,13630758,"TERMINAL",0,0,"510103666",,terminal_output +6383,13631755,"TERMINAL",0,0,"6114777",,terminal_output +6384,13632855,"TERMINAL",0,0,"7225888",,terminal_output +6385,13633892,"TERMINAL",0,0,"8336999",,terminal_output +6386,13634956,"TERMINAL",0,0,"9447405050",,terminal_output +6387,13635954,"TERMINAL",0,0,"30558111",,terminal_output +6388,13637004,"TERMINAL",0,0,"1669222",,terminal_output +6389,13638054,"TERMINAL",0,0,"27720333",,terminal_output +6390,13639102,"TERMINAL",0,0,"3881444",,terminal_output +6391,13640150,"TERMINAL",0,0,"4992555",,terminal_output +6392,13641306,"TERMINAL",0,0,"520203666",,terminal_output +6393,13642243,"TERMINAL",0,0,"6114777",,terminal_output +6394,13643294,"TERMINAL",0,0,"7225888",,terminal_output +6395,13644370,"TERMINAL",0,0,"8447502:002:00",,terminal_output +6396,13645401,"TERMINAL",0,0,"40558111",,terminal_output +6397,13646527,"TERMINAL",0,0,"1669222",,terminal_output +6398,13647551,"TERMINAL",0,0,"27730333",,terminal_output +6399,13648541,"TERMINAL",0,0,"3881444",,terminal_output +6400,13649591,"TERMINAL",0,0,"4992555",,terminal_output +6401,13650634,"TERMINAL",0,0,"530303666",,terminal_output +6402,13651752,"TERMINAL",0,0,"6114777",,terminal_output +6403,13652729,"TERMINAL",0,0,"7225888",,terminal_output +6404,13653779,"TERMINAL",0,0,"8336999",,terminal_output +6405,13654826,"TERMINAL",0,0,"944720:001010",,terminal_output +6406,13655895,"TERMINAL",0,0,"50558111",,terminal_output +6407,13656947,"TERMINAL",0,0,"1669222",,terminal_output +6408,13657994,"TERMINAL",0,0,"27740333",,terminal_output +6409,13659039,"TERMINAL",0,0,"3881444",,terminal_output +6410,13660146,"TERMINAL",0,0,"4992555",,terminal_output +6411,13661128,"TERMINAL",0,0,"540403666",,terminal_output +6412,13662192,"TERMINAL",0,0,"6114777",,terminal_output +6413,13663223,"TERMINAL",0,0,"7225888",,terminal_output +6414,13664277,"TERMINAL",0,0,"8336999",,terminal_output +6415,13665322,"TERMINAL",0,0,"9558112121",,terminal_output +6416,13666401,"TERMINAL",0,0,"8:01669222",,terminal_output +6417,13667517,"TERMINAL",0,0,"27750333",,terminal_output +6418,13668498,"TERMINAL",0,0,"3881444",,terminal_output +6419,13669565,"TERMINAL",0,0,"4992555",,terminal_output +6420,13670695,"TERMINAL",0,0,"550503666",,terminal_output +6421,13671639,"TERMINAL",0,0,"6114777",,terminal_output +6422,13672743,"TERMINAL",0,0,"7225888",,terminal_output +6423,13673766,"TERMINAL",0,0,"8336999",,terminal_output +6424,13674790,"TERMINAL",0,0,"9447203030",,terminal_output +6425,13675823,"TERMINAL",0,0,"10558111",,terminal_output +6426,13676941,"TERMINAL",0,0,"1669222",,terminal_output +6427,13677923,"TERMINAL",0,0,"2775:00333",,terminal_output +6428,13678994,"TERMINAL",0,0,"3881444",,terminal_output +6429,13680113,"TERMINAL",0,0,"4992555",,terminal_output +6430,13681073,"TERMINAL",0,0,"54:004:003666",,terminal_output +6431,13682160,"TERMINAL",0,0,"6114777",,terminal_output +6432,13683186,"TERMINAL",0,0,"7225888",,terminal_output +6433,13684226,"TERMINAL",0,0,"8336999",,terminal_output +6434,13685261,"TERMINAL",0,0,"9447304040",,terminal_output +6435,13686319,"TERMINAL",0,0,"20669222",,terminal_output +6436,13687384,"TERMINAL",0,0,"27710333",,terminal_output +6437,13688518,"TERMINAL",0,0,"3881444",,terminal_output +6438,13689491,"TERMINAL",0,0,"4992555",,terminal_output +6439,13690564,"TERMINAL",0,0,"510103666",,terminal_output +6440,13691589,"TERMINAL",0,0,"6114777",,terminal_output +6441,13692643,"TERMINAL",0,0,"7225888",,terminal_output +6442,13693737,"TERMINAL",0,0,"8336999",,terminal_output +6443,13694758,"TERMINAL",0,0,"9447405050",,terminal_output +6444,13695896,"TERMINAL",0,0,"30558111",,terminal_output +6445,13696856,"TERMINAL",0,0,"1669222",,terminal_output +6446,13697897,"TERMINAL",0,0,"27720333",,terminal_output +6447,13698945,"TERMINAL",0,0,"3881444",,terminal_output +6448,13699990,"TERMINAL",0,0,"4992555",,terminal_output +6449,13701039,"TERMINAL",0,0,"520203666",,terminal_output +6450,13702131,"TERMINAL",0,0,"6114777",,terminal_output +6451,13703153,"TERMINAL",0,0,"7225888",,terminal_output +6452,13704185,"TERMINAL",0,0,"8336999",,terminal_output +6453,13705235,"TERMINAL",0,0,"9447503:003:00",,terminal_output +6454,13706284,"TERMINAL",0,0,"40558111",,terminal_output +6455,13707334,"TERMINAL",0,0,"17730333",,terminal_output +6456,13708487,"TERMINAL",0,0,"3881444",,terminal_output +6457,13709431,"TERMINAL",0,0,"4992555",,terminal_output +6458,13710531,"TERMINAL",0,0,"530303666",,terminal_output +6459,13711552,"TERMINAL",0,0,"6114777",,terminal_output +6460,13712580,"TERMINAL",0,0,"7225888",,terminal_output +6461,13713630,"TERMINAL",0,0,"8336999",,terminal_output +6462,13714672,"TERMINAL",0,0,"94471:001010",,terminal_output +6463,13715751,"TERMINAL",0,0,"50558111",,terminal_output +6464,13716762,"TERMINAL",0,0,"1669222",,terminal_output +6465,13717837,"TERMINAL",0,0,"27740333",,terminal_output +6466,13718859,"TERMINAL",0,0,"3881444",,terminal_output +6467,13719936,"TERMINAL",0,0,"4992555",,terminal_output +6468,13720960,"TERMINAL",0,0,"540403666",,terminal_output +6469,13722004,"TERMINAL",0,0,"6114777",,terminal_output +6470,13723055,"TERMINAL",0,0,"7225888",,terminal_output +6471,13724148,"TERMINAL",0,0,"8336999",,terminal_output +6472,13725143,"TERMINAL",0,0,"9447102020",,terminal_output +6473,13726196,"TERMINAL",0,0,"9:00558111",,terminal_output +6474,13727308,"TERMINAL",0,0,"1669222",,terminal_output +6475,13728368,"TERMINAL",0,0,"27750333",,terminal_output +6476,13729366,"TERMINAL",0,0,"3992555",,terminal_output +6477,13730382,"TERMINAL",0,0,"550503666",,terminal_output +6478,13731520,"TERMINAL",0,0,"6114777",,terminal_output +6479,13732545,"TERMINAL",0,0,"7225888",,terminal_output +6480,13733516,"TERMINAL",0,0,"8336999",,terminal_output +6481,13734566,"TERMINAL",0,0,"9447203030",,terminal_output +6482,13735621,"TERMINAL",0,0,"10558111",,terminal_output +6483,13736741,"TERMINAL",0,0,"1669222",,terminal_output +6484,13737770,"TERMINAL",0,0,"2776:00333",,terminal_output +6485,13738789,"TERMINAL",0,0,"3881444",,terminal_output +6486,13739814,"TERMINAL",0,0,"4992555",,terminal_output +6487,13740941,"TERMINAL",0,0,"55:005:003666",,terminal_output +6488,13741962,"TERMINAL",0,0,"6114777",,terminal_output +6489,13742935,"TERMINAL",0,0,"7225888",,terminal_output +6490,13744011,"TERMINAL",0,0,"8336999",,terminal_output +6491,13745019,"TERMINAL",0,0,"9447304040",,terminal_output +6492,13746064,"TERMINAL",0,0,"20558111",,terminal_output +6493,13747188,"TERMINAL",0,0,"1669222",,terminal_output +6494,13748158,"TERMINAL",0,0,"27710333",,terminal_output +6495,13749234,"TERMINAL",0,0,"3881444",,terminal_output +6496,13750239,"TERMINAL",0,0,"4992555",,terminal_output +6497,13751291,"TERMINAL",0,0,"510103666",,terminal_output +6498,13752336,"TERMINAL",0,0,"6225888",,terminal_output +6499,13753432,"TERMINAL",0,0,"8336999",,terminal_output +6500,13754445,"TERMINAL",0,0,"9447405050",,terminal_output +6501,13755585,"TERMINAL",0,0,"30558111",,terminal_output +6502,13756607,"TERMINAL",0,0,"1669222",,terminal_output +6503,13757592,"TERMINAL",0,0,"27720333",,terminal_output +6504,13758635,"TERMINAL",0,0,"3881444",,terminal_output +6505,13759786,"TERMINAL",0,0,"4992555",,terminal_output +6506,13760810,"TERMINAL",0,0,"520203666",,terminal_output +6507,13761833,"TERMINAL",0,0,"6114777",,terminal_output +6508,13762825,"TERMINAL",0,0,"7225888",,terminal_output +6509,13763868,"TERMINAL",0,0,"8336999",,terminal_output +6510,13765007,"TERMINAL",0,0,"9447504:004:00",,terminal_output +6511,13766031,"TERMINAL",0,0,"40558111",,terminal_output +6512,13767067,"TERMINAL",0,0,"1669222",,terminal_output +6513,13768060,"TERMINAL",0,0,"27730333",,terminal_output +6514,13769104,"TERMINAL",0,0,"3881444",,terminal_output +6515,13770154,"TERMINAL",0,0,"4992555",,terminal_output +6516,13771204,"TERMINAL",0,0,"530303666",,terminal_output +6517,13772259,"TERMINAL",0,0,"6114777",,terminal_output +6518,13773304,"TERMINAL",0,0,"7225888",,terminal_output +6519,13774378,"TERMINAL",0,0,"84472:001010",,terminal_output +6520,13775400,"TERMINAL",0,0,"50558111",,terminal_output +6521,13776473,"TERMINAL",0,0,"1669222",,terminal_output +6522,13777483,"TERMINAL",0,0,"27740333",,terminal_output +6523,13778548,"TERMINAL",0,0,"3881444",,terminal_output +6524,13779596,"TERMINAL",0,0,"4992555",,terminal_output +6525,13780671,"TERMINAL",0,0,"540403666",,terminal_output +6526,13781701,"TERMINAL",0,0,"6114777",,terminal_output +6527,13782822,"TERMINAL",0,0,"7225888",,terminal_output +6528,13783788,"TERMINAL",0,0,"8336999",,terminal_output +6529,13784869,"TERMINAL",0,0,"9447102020",,terminal_output +6530,13785894,"TERMINAL",0,0,"50:00558111",,terminal_output +6531,13787039,"TERMINAL",0,0,"1669222",,terminal_output +6532,13787971,"TERMINAL",0,0,"27750333",,terminal_output +6533,13789016,"TERMINAL",0,0,"3881444",,terminal_output +6534,13790067,"TERMINAL",0,0,"4992555",,terminal_output +6535,13791218,"TERMINAL",0,0,"550503666",,terminal_output +6536,13792155,"TERMINAL",0,0,"6114777",,terminal_output +6537,13793198,"TERMINAL",0,0,"7225888",,terminal_output +6538,13794242,"TERMINAL",0,0,"8336999",,terminal_output +6539,13795294,"TERMINAL",0,0,"9447203030",,terminal_output +6540,13796446,"TERMINAL",0,0,"10669222",,terminal_output +6541,13797467,"TERMINAL",0,0,"2777:00333",,terminal_output +6542,13798492,"TERMINAL",0,0,"3881444",,terminal_output +6543,13799530,"TERMINAL",0,0,"4992555",,terminal_output +6544,13800638,"TERMINAL",0,0,"56:006:003666",,terminal_output +6545,13801665,"TERMINAL",0,0,"6114777",,terminal_output +6546,13802687,"TERMINAL",0,0,"7225888",,terminal_output +6547,13803712,"TERMINAL",0,0,"8336999",,terminal_output +6548,13804738,"TERMINAL",0,0,"9447304040",,terminal_output +6549,13805861,"TERMINAL",0,0,"20558111",,terminal_output +6550,13806816,"TERMINAL",0,0,"1669222",,terminal_output +6551,13807862,"TERMINAL",0,0,"27710333",,terminal_output +6552,13809014,"TERMINAL",0,0,"3881444",,terminal_output +6553,13809961,"TERMINAL",0,0,"4992555",,terminal_output +6554,13810994,"TERMINAL",0,0,"510103666",,terminal_output +6555,13812032,"TERMINAL",0,0,"6114777",,terminal_output +6556,13813070,"TERMINAL",0,0,"7225888",,terminal_output +6557,13814113,"TERMINAL",0,0,"8336999",,terminal_output +6558,13815189,"TERMINAL",0,0,"9447405050",,terminal_output +6559,13816200,"TERMINAL",0,0,"30558111",,terminal_output +6560,13817332,"TERMINAL",0,0,"1669222",,terminal_output +6561,13818356,"TERMINAL",0,0,"27720333",,terminal_output +6562,13819366,"TERMINAL",0,0,"3992555",,terminal_output +6563,13820404,"TERMINAL",0,0,"520203666",,terminal_output +6564,13821534,"TERMINAL",0,0,"6114777",,terminal_output +6565,13822553,"TERMINAL",0,0,"7225888",,terminal_output +6566,13823585,"TERMINAL",0,0,"8336999",,terminal_output +6567,13824584,"TERMINAL",0,0,"9447505:005:00",,terminal_output +6568,13825734,"TERMINAL",0,0,"40558111",,terminal_output +6569,13826754,"TERMINAL",0,0,"1669222",,terminal_output +6570,13827778,"TERMINAL",0,0,"27730333",,terminal_output +6571,13828906,"TERMINAL",0,0,"3881444",,terminal_output +6572,13829845,"TERMINAL",0,0,"4992555",,terminal_output +6573,13830954,"TERMINAL",0,0,"530303666",,terminal_output +6574,13831954,"TERMINAL",0,0,"6114777",,terminal_output +6575,13832985,"TERMINAL",0,0,"7225888",,terminal_output +6576,13834093,"TERMINAL",0,0,"8336999",,terminal_output +6577,13835092,"TERMINAL",0,0,"94473:001010",,terminal_output +6578,13836135,"TERMINAL",0,0,"50558111",,terminal_output +6579,13837198,"TERMINAL",0,0,"1669222",,terminal_output +6580,13838232,"TERMINAL",0,0,"27740333",,terminal_output +6581,13839275,"TERMINAL",0,0,"3881444",,terminal_output +6582,13840374,"TERMINAL",0,0,"440403666",,terminal_output +6583,13841382,"TERMINAL",0,0,"6114777",,terminal_output +6584,13842429,"TERMINAL",0,0,"7225888",,terminal_output +6585,13843483,"TERMINAL",0,0,"8336999",,terminal_output +6586,13844525,"TERMINAL",0,0,"9447102020",,terminal_output +6587,13845599,"TERMINAL",0,0,"1:00558111",,terminal_output +6588,13846722,"TERMINAL",0,0,"1669222",,terminal_output +6589,13847745,"TERMINAL",0,0,"27750333",,terminal_output +6590,13848770,"TERMINAL",0,0,"3881444",,terminal_output +6591,13849771,"TERMINAL",0,0,"4992555",,terminal_output +6592,13850818,"TERMINAL",0,0,"550503666",,terminal_output +6593,13851967,"TERMINAL",0,0,"6114777",,terminal_output +6594,13852970,"TERMINAL",0,0,"7225888",,terminal_output +6595,13853991,"TERMINAL",0,0,"8336999",,terminal_output +6596,13855076,"TERMINAL",0,0,"9447203030",,terminal_output +6597,13856055,"TERMINAL",0,0,"10558111",,terminal_output +6598,13857100,"TERMINAL",0,0,"1669222",,terminal_output +6599,13858190,"TERMINAL",0,0,"2778:00333",,terminal_output +6600,13859319,"TERMINAL",0,0,"3881444",,terminal_output +6601,13860342,"TERMINAL",0,0,"4992555",,terminal_output +6602,13861365,"TERMINAL",0,0,"57:017:014777",,terminal_output +6603,13862390,"TERMINAL",0,0,"7225888",,terminal_output +6604,13863405,"TERMINAL",0,0,"8336999",,terminal_output +6605,13864456,"TERMINAL",0,0,"9447304040",,terminal_output +6606,13865494,"TERMINAL",0,0,"20558111",,terminal_output +6607,13866589,"TERMINAL",0,0,"1669222",,terminal_output +6608,13867612,"TERMINAL",0,0,"27710333",,terminal_output +6609,13868638,"TERMINAL",0,0,"3881444",,terminal_output +6610,13869674,"TERMINAL",0,0,"4992555",,terminal_output +6611,13870791,"TERMINAL",0,0,"510103666",,terminal_output +6612,13871762,"TERMINAL",0,0,"6114777",,terminal_output +6613,13872874,"TERMINAL",0,0,"7225888",,terminal_output +6614,13873873,"TERMINAL",0,0,"8336999",,terminal_output +6615,13874989,"TERMINAL",0,0,"9447405050",,terminal_output +6616,13876009,"TERMINAL",0,0,"30558111",,terminal_output +6617,13877014,"TERMINAL",0,0,"1669222",,terminal_output +6618,13878060,"TERMINAL",0,0,"27720333",,terminal_output +6619,13879105,"TERMINAL",0,0,"3881444",,terminal_output +6620,13880249,"TERMINAL",0,0,"4992555",,terminal_output +6621,13881232,"TERMINAL",0,0,"520203666",,terminal_output +6622,13882257,"TERMINAL",0,0,"6114777",,terminal_output +6623,13883309,"TERMINAL",0,0,"7225888",,terminal_output +6624,13884379,"TERMINAL",0,0,"8447506:006:00",,terminal_output +6625,13885408,"TERMINAL",0,0,"40558111",,terminal_output +6626,13886556,"TERMINAL",0,0,"1669222",,terminal_output +6627,13887498,"TERMINAL",0,0,"27730333",,terminal_output +6628,13888535,"TERMINAL",0,0,"3881444",,terminal_output +6629,13889587,"TERMINAL",0,0,"4992555",,terminal_output +6630,13890636,"TERMINAL",0,0,"530303666",,terminal_output +6631,13891778,"TERMINAL",0,0,"6114777",,terminal_output +6632,13892801,"TERMINAL",0,0,"7225888",,terminal_output +6633,13893831,"TERMINAL",0,0,"8336999",,terminal_output +6634,13894836,"TERMINAL",0,0,"94474:001010",,terminal_output +6635,13895873,"TERMINAL",0,0,"50558111",,terminal_output +6636,13896924,"TERMINAL",0,0,"1669222",,terminal_output +6637,13898000,"TERMINAL",0,0,"27740333",,terminal_output +6638,13899050,"TERMINAL",0,0,"3881444",,terminal_output +6639,13900070,"TERMINAL",0,0,"4992555",,terminal_output +6640,13901108,"TERMINAL",0,0,"540403666",,terminal_output +6641,13902154,"TERMINAL",0,0,"6114777",,terminal_output +6642,13903202,"TERMINAL",0,0,"7225888",,terminal_output +6643,13904252,"TERMINAL",0,0,"8336999",,terminal_output +6644,13905301,"TERMINAL",0,0,"9447102020",,terminal_output +6645,13906354,"TERMINAL",0,0,"2:00669222",,terminal_output +6646,13907403,"TERMINAL",0,0,"27750333",,terminal_output +6647,13908448,"TERMINAL",0,0,"3881444",,terminal_output +6648,13909494,"TERMINAL",0,0,"4992555",,terminal_output +6649,13910542,"TERMINAL",0,0,"550503666",,terminal_output +6650,13911647,"TERMINAL",0,0,"6114777",,terminal_output +6651,13912639,"TERMINAL",0,0,"7225888",,terminal_output +6652,13913683,"TERMINAL",0,0,"8336999",,terminal_output +6653,13914730,"TERMINAL",0,0,"9447203030",,terminal_output +6654,13915790,"TERMINAL",0,0,"10558111",,terminal_output +6655,13916834,"TERMINAL",0,0,"1669222",,terminal_output +6656,13917880,"TERMINAL",0,0,"2779:00333",,terminal_output +6657,13919017,"TERMINAL",0,0,"3881444",,terminal_output +6658,13919974,"TERMINAL",0,0,"4992555",,terminal_output +6659,13921022,"TERMINAL",0,0,"58:008:003666",,terminal_output +6660,13922071,"TERMINAL",0,0,"6114777",,terminal_output +6661,13923119,"TERMINAL",0,0,"7225888",,terminal_output +6662,13924165,"TERMINAL",0,0,"8336999",,terminal_output +6663,13925220,"TERMINAL",0,0,"9447304040",,terminal_output +6664,13926262,"TERMINAL",0,0,"20558111",,terminal_output +6665,13927311,"TERMINAL",0,0,"17710333",,terminal_output +6666,13928354,"TERMINAL",0,0,"3881444",,terminal_output +6667,13929407,"TERMINAL",0,0,"4992555",,terminal_output +6668,13930483,"TERMINAL",0,0,"510103666",,terminal_output +6669,13931498,"TERMINAL",0,0,"6114777",,terminal_output +6670,13932546,"TERMINAL",0,0,"7225888",,terminal_output +6671,13933591,"TERMINAL",0,0,"8336999",,terminal_output +6672,13934643,"TERMINAL",0,0,"9447405050",,terminal_output +6673,13935683,"TERMINAL",0,0,"30558111",,terminal_output +6674,13936735,"TERMINAL",0,0,"1669222",,terminal_output +6675,13937862,"TERMINAL",0,0,"27720333",,terminal_output +6676,13938881,"TERMINAL",0,0,"3881444",,terminal_output +6677,13939908,"TERMINAL",0,0,"4992555",,terminal_output +6678,13940920,"TERMINAL",0,0,"520203666",,terminal_output +6679,13941962,"TERMINAL",0,0,"6114777",,terminal_output +6680,13943007,"TERMINAL",0,0,"7225888",,terminal_output +6681,13944057,"TERMINAL",0,0,"8336999",,terminal_output +6682,13945099,"TERMINAL",0,0,"9447507:007:00",,terminal_output +6683,13946152,"TERMINAL",0,0,"40558111",,terminal_output +6684,13947282,"TERMINAL",0,0,"1669222",,terminal_output +6685,13948304,"TERMINAL",0,0,"27730333",,terminal_output +6686,13949304,"TERMINAL",0,0,"3881444",,terminal_output +6687,13950355,"TERMINAL",0,0,"530303666",,terminal_output +6688,13951403,"TERMINAL",0,0,"6114777",,terminal_output +6689,13952450,"TERMINAL",0,0,"7225888",,terminal_output +6690,13953528,"TERMINAL",0,0,"8336999",,terminal_output +6691,13954561,"TERMINAL",0,0,"94475:001010",,terminal_output +6692,13955623,"TERMINAL",0,0,"50558111",,terminal_output +6693,13956712,"TERMINAL",0,0,"1669222",,terminal_output +6694,13957724,"TERMINAL",0,0,"27740333",,terminal_output +6695,13958748,"TERMINAL",0,0,"3881444",,terminal_output +6696,13959799,"TERMINAL",0,0,"4992555",,terminal_output +6697,13960847,"TERMINAL",0,0,"540403666",,terminal_output +6698,13961899,"TERMINAL",0,0,"6114777",,terminal_output +6699,13962949,"TERMINAL",0,0,"7225888",,terminal_output +6700,13964074,"TERMINAL",0,0,"8336999",,terminal_output +6701,13965053,"TERMINAL",0,0,"9447102020",,terminal_output +6702,13966103,"TERMINAL",0,0,"3:00558111",,terminal_output +6703,13967151,"TERMINAL",0,0,"1669222",,terminal_output +6704,13968204,"TERMINAL",0,0,"27750333",,terminal_output +6705,13969250,"TERMINAL",0,0,"3881444",,terminal_output +6706,13970299,"TERMINAL",0,0,"4992555",,terminal_output +6707,13971354,"TERMINAL",0,0,"551514777",,terminal_output +6708,13972405,"TERMINAL",0,0,"7225888",,terminal_output +6709,13973453,"TERMINAL",0,0,"8336999",,terminal_output +6710,13974517,"TERMINAL",0,0,"9447203030",,terminal_output +6711,13975548,"TERMINAL",0,0,"10558111",,terminal_output +6712,13976609,"TERMINAL",0,0,"1669222",,terminal_output +6713,13977690,"TERMINAL",0,0,"27740:00333",,terminal_output +6714,13978689,"TERMINAL",0,0,"3881444",,terminal_output +6715,13979740,"TERMINAL",0,0,"4992555",,terminal_output +6716,13980782,"TERMINAL",0,0,"59:009:003666",,terminal_output +6717,13981891,"TERMINAL",0,0,"6114777",,terminal_output +6718,13982873,"TERMINAL",0,0,"7225888",,terminal_output +6719,13983939,"TERMINAL",0,0,"8336999",,terminal_output +6720,13984971,"TERMINAL",0,0,"9447304040",,terminal_output +6721,13986092,"TERMINAL",0,0,"20558111",,terminal_output +6722,13987063,"TERMINAL",0,0,"1669222",,terminal_output +6723,13988110,"TERMINAL",0,0,"27710333",,terminal_output +6724,13989151,"TERMINAL",0,0,"3881444",,terminal_output +6725,13990202,"TERMINAL",0,0,"4992555",,terminal_output +6726,13991248,"TERMINAL",0,0,"510103666",,terminal_output +6727,13992334,"TERMINAL",0,0,"6114777",,terminal_output +6728,13993358,"TERMINAL",0,0,"7336999",,terminal_output +6729,13994388,"TERMINAL",0,0,"9447405050",,terminal_output +6730,13995508,"TERMINAL",0,0,"30558111",,terminal_output +6731,13996483,"TERMINAL",0,0,"1669222",,terminal_output +6732,13997534,"TERMINAL",0,0,"27720333",,terminal_output +6733,13998687,"TERMINAL",0,0,"3881444",,terminal_output +6734,13999711,"TERMINAL",0,0,"4992555",,terminal_output +6735,14000732,"TERMINAL",0,0,"520203666",,terminal_output +6736,14001734,"TERMINAL",0,0,"6114777",,terminal_output +6737,14002882,"TERMINAL",0,0,"7225888",,terminal_output +6738,14003833,"TERMINAL",0,0,"8336999",,terminal_output +6739,14004929,"TERMINAL",0,0,"9447508:008:00",,terminal_output +6740,14005955,"TERMINAL",0,0,"40558111",,terminal_output +6741,14007053,"TERMINAL",0,0,"1669222",,terminal_output +6742,14008105,"TERMINAL",0,0,"27730333",,terminal_output +6743,14009128,"TERMINAL",0,0,"3881444",,terminal_output +6744,14010153,"TERMINAL",0,0,"4992555",,terminal_output +6745,14011183,"TERMINAL",0,0,"530303666",,terminal_output +6746,14012302,"TERMINAL",0,0,"6114777",,terminal_output +6747,14013327,"TERMINAL",0,0,"7225888",,terminal_output +6748,14014366,"TERMINAL",0,0,"84476:001010",,terminal_output +6749,14015482,"TERMINAL",0,0,"50558111",,terminal_output +6750,14016502,"TERMINAL",0,0,"1669222",,terminal_output +6751,14017529,"TERMINAL",0,0,"288727740333",,terminal_output +6752,14018522,"TERMINAL",0,0,"3881444",,terminal_output +6753,14019675,"TERMINAL",0,0,"4992555",,terminal_output +6754,14020702,"TERMINAL",0,0,"540403666",,terminal_output +6755,14021726,"TERMINAL",0,0,"6114777",,terminal_output +6756,14022756,"TERMINAL",0,0,"7225888",,terminal_output +6757,14023873,"TERMINAL",0,0,"8336999",,terminal_output +6758,14024898,"TERMINAL",0,0,"9447102020",,terminal_output +6759,14025921,"TERMINAL",0,0,"4:00558111",,terminal_output +6760,14026946,"TERMINAL",0,0,"1669222",,terminal_output +6761,14027982,"TERMINAL",0,0,"27750333",,terminal_output +6762,14029099,"TERMINAL",0,0,"3881444",,terminal_output +6763,14030126,"TERMINAL",0,0,"4992555",,terminal_output +6764,14031108,"TERMINAL",0,0,"550503666",,terminal_output +6765,14032173,"TERMINAL",0,0,"6114777",,terminal_output +6766,14033219,"TERMINAL",0,0,"7225888",,terminal_output +6767,14034263,"TERMINAL",0,0,"8336999",,terminal_output +6768,14035345,"TERMINAL",0,0,"9447203030",,terminal_output +6769,14036371,"TERMINAL",0,0,"11669222",,terminal_output +6770,14037495,"TERMINAL",0,0,"2771:00333",,terminal_output +6771,14038449,"TERMINAL",0,0,"3881444",,terminal_output +6772,14039493,"TERMINAL",0,0,"4992555",,terminal_output +6773,14040566,"TERMINAL",0,0,"520:0020:003666",,terminal_output +6774,14041691,"TERMINAL",0,0,"6114777",,terminal_output +6775,14042716,"TERMINAL",0,0,"7225888",,terminal_output +6776,14043702,"TERMINAL",0,0,"8336999",,terminal_output +6777,14044749,"TERMINAL",0,0,"9447304040",,terminal_output +6778,14045791,"TERMINAL",0,0,"20558111",,terminal_output +6779,14046830,"TERMINAL",0,0,"1669222",,terminal_output +6780,14047879,"TERMINAL",0,0,"27710333",,terminal_output +6781,14048916,"TERMINAL",0,0,"3881444",,terminal_output +6782,14049956,"TERMINAL",0,0,"4992555",,terminal_output +6783,14051007,"TERMINAL",0,0,"510103666",,terminal_output +6784,14052076,"TERMINAL",0,0,"6114777",,terminal_output +6785,14053162,"TERMINAL",0,0,"7225888",,terminal_output +6786,14054153,"TERMINAL",0,0,"8336999",,terminal_output +6787,14055204,"TERMINAL",0,0,"9447405050",,terminal_output +6788,14056250,"TERMINAL",0,0,"30558111",,terminal_output +6789,14057361,"TERMINAL",0,0,"1669222",,terminal_output +6790,14058383,"TERMINAL",0,0,"28821444",,terminal_output +6791,14059386,"TERMINAL",0,0,"4992555",,terminal_output +6792,14060544,"TERMINAL",0,0,"520203666",,terminal_output +6793,14061557,"TERMINAL",0,0,"6114777",,terminal_output +6794,14062518,"TERMINAL",0,0,"7225888",,terminal_output +6795,14063565,"TERMINAL",0,0,"8336999",,terminal_output +6796,14064633,"TERMINAL",0,0,"9447509:009:00",,terminal_output +6797,14065668,"TERMINAL",0,0,"40558111",,terminal_output +6798,14066780,"TERMINAL",0,0,"1669222",,terminal_output +6799,14067758,"TERMINAL",0,0,"27730333",,terminal_output +6800,14068806,"TERMINAL",0,0,"3881444",,terminal_output +6801,14069967,"TERMINAL",0,0,"4992555",,terminal_output +6802,14070903,"TERMINAL",0,0,"530303666",,terminal_output +6803,14072021,"TERMINAL",0,0,"6114777",,terminal_output +6804,14073032,"TERMINAL",0,0,"7225888",,terminal_output +6805,14074073,"TERMINAL",0,0,"8336999",,terminal_output +6806,14075077,"TERMINAL",0,0,"94477:001010",,terminal_output +6807,14076120,"TERMINAL",0,0,"50558111",,terminal_output +6808,14077160,"TERMINAL",0,0,"1669222",,terminal_output +6809,14078200,"TERMINAL",0,0,"27740333",,terminal_output +6810,14079275,"TERMINAL",0,0,"3881444",,terminal_output +6811,14080402,"TERMINAL",0,0,"4992555",,terminal_output +6812,14081344,"TERMINAL",0,0,"541414777",,terminal_output +6813,14082387,"TERMINAL",0,0,"7225888",,terminal_output +6814,14083476,"TERMINAL",0,0,"8336999",,terminal_output +6815,14084465,"TERMINAL",0,0,"9447102020",,terminal_output +6816,14085531,"TERMINAL",0,0,"5:00558111",,terminal_output +6817,14086648,"TERMINAL",0,0,"1669222",,terminal_output +6818,14087673,"TERMINAL",0,0,"27750333",,terminal_output +6819,14088695,"TERMINAL",0,0,"3881444",,terminal_output +6820,14089672,"TERMINAL",0,0,"4992555",,terminal_output +6821,14090721,"TERMINAL",0,0,"550503666",,terminal_output +6822,14091873,"TERMINAL",0,0,"6114777",,terminal_output +6823,14092895,"TERMINAL",0,0,"7225888",,terminal_output +6824,14093918,"TERMINAL",0,0,"8336999",,terminal_output +6825,14094940,"TERMINAL",0,0,"9447203030",,terminal_output +6826,14095931,"TERMINAL",0,0,"10558111",,terminal_output +6827,14096974,"TERMINAL",0,0,"1669222",,terminal_output +6828,14098117,"TERMINAL",0,0,"2772:00333",,terminal_output +6829,14099064,"TERMINAL",0,0,"3881444",,terminal_output +6830,14100162,"TERMINAL",0,0,"4992555",,terminal_output +6831,14101188,"TERMINAL",0,0,"51:001:003666",,terminal_output +6832,14102212,"TERMINAL",0,0,"6114777",,terminal_output +6833,14103256,"TERMINAL",0,0,"7225888",,terminal_output +6834,14104292,"TERMINAL",0,0,"8336999",,terminal_output +6835,14105388,"TERMINAL",0,0,"9558314141",,terminal_output +6836,14106412,"TERMINAL",0,0,"21669222",,terminal_output +6837,14107537,"TERMINAL",0,0,"27710333",,terminal_output +6838,14108562,"TERMINAL",0,0,"3881444",,terminal_output +6839,14109588,"TERMINAL",0,0,"4992555",,terminal_output +6840,14110570,"TERMINAL",0,0,"510103666",,terminal_output +6841,14111632,"TERMINAL",0,0,"6114777",,terminal_output +6842,14112762,"TERMINAL",0,0,"7225888",,terminal_output +6843,14113785,"TERMINAL",0,0,"8336999",,terminal_output +6844,14114807,"TERMINAL",0,0,"9447405050",,terminal_output +6845,14115833,"TERMINAL",0,0,"30558111",,terminal_output +6846,14116959,"TERMINAL",0,0,"1669222",,terminal_output +6847,14117923,"TERMINAL",0,0,"27720333",,terminal_output +6848,14118965,"TERMINAL",0,0,"3881444",,terminal_output +6849,14120031,"TERMINAL",0,0,"4992555",,terminal_output +6850,14121057,"TERMINAL",0,0,"520203666",,terminal_output +6851,14122079,"TERMINAL",0,0,"6114777",,terminal_output +6852,14123206,"TERMINAL",0,0,"7225888",,terminal_output +6853,14124148,"TERMINAL",0,0,"8336999",,terminal_output +6854,14125188,"TERMINAL",0,0,"94475040:0040:00",,terminal_output +6855,14126239,"TERMINAL",0,0,"40558111",,terminal_output +6856,14127276,"TERMINAL",0,0,"1669222",,terminal_output +6857,14128316,"TERMINAL",0,0,"28831444",,terminal_output +6858,14129382,"TERMINAL",0,0,"4992555",,terminal_output +6859,14130473,"TERMINAL",0,0,"530303666",,terminal_output +6860,14131497,"TERMINAL",0,0,"6114777",,terminal_output +6861,14132521,"TERMINAL",0,0,"7225888",,terminal_output +6862,14133549,"TERMINAL",0,0,"8336999",,terminal_output +6863,14134560,"TERMINAL",0,0,"94478:001010",,terminal_output +6864,14135638,"TERMINAL",0,0,"50558111",,terminal_output +6865,14136639,"TERMINAL",0,0,"1669222",,terminal_output +6866,14137892,"TERMINAL",0,0,"27740333",,terminal_output +6867,14138888,"TERMINAL",0,0,"3881444",,terminal_output +6868,14139997,"TERMINAL",0,0,"4992555",,terminal_output +6869,14140386,"genie.py",0,0,"",python,tab +6870,14140973,"TERMINAL",0,0,"540403666",,terminal_output +6871,14141463,"genie.py",3696,0,"",python,selection_mouse +6872,14142046,"TERMINAL",0,0,"6114777",,terminal_output +6873,14143070,"TERMINAL",0,0,"7225888",,terminal_output +6874,14144126,"TERMINAL",0,0,"8336999",,terminal_output +6875,14145225,"TERMINAL",0,0,"9447102020",,terminal_output +6876,14146207,"TERMINAL",0,0,"6:00558111",,terminal_output +6877,14147245,"TERMINAL",0,0,"1669222",,terminal_output +6878,14148295,"TERMINAL",0,0,"27750333",,terminal_output +6879,14149147,"genie.py",3652,0,"",python,selection_mouse +6880,14149367,"TERMINAL",0,0,"3992555",,terminal_output +6881,14150161,"genie.py",3666,0,"",python,selection_mouse +6882,14150162,"genie.py",3665,0,"",python,selection_command +6883,14150381,"TERMINAL",0,0,"550503666",,terminal_output +6884,14151468,"TERMINAL",0,0,"6114777",,terminal_output +6885,14151584,"genie.py",3666,0,"",python,selection_command +6886,14152468,"TERMINAL",0,0,"7225888",,terminal_output +6887,14152847,"genie.py",3688,0,"",python,selection_mouse +6888,14152976,"genie.py",3687,3,"jax",python,selection_mouse +6889,14153215,"genie.py",3667,63," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n",python,selection_mouse +6890,14153525,"TERMINAL",0,0,"8336999",,terminal_output +6891,14154561,"TERMINAL",0,0,"9447203030",,terminal_output +6892,14155667,"TERMINAL",0,0,"10558111",,terminal_output +6893,14156651,"TERMINAL",0,0,"1669222",,terminal_output +6894,14157716,"TERMINAL",0,0,"2773:00333",,terminal_output +6895,14158843,"TERMINAL",0,0,"3881444",,terminal_output +6896,14159864,"TERMINAL",0,0,"4992555",,terminal_output +6897,14160814,"TERMINAL",0,0,"52:002:003666",,terminal_output +6898,14161911,"TERMINAL",0,0,"6114777",,terminal_output +6899,14162001,"genie.py",3688,0,"",python,selection_mouse +6900,14162913,"genie.py",3684,0,"",python,selection_mouse +6901,14162927,"TERMINAL",0,0,"7225888",,terminal_output +6902,14163032,"genie.py",3679,6,"lambda",python,selection_mouse +6903,14163199,"genie.py",3679,11,"lambda: jax",python,selection_mouse +6904,14163213,"genie.py",3679,15,"lambda: jax.lax",python,selection_mouse +6905,14163273,"genie.py",3679,29,"lambda: jax.lax.stop_gradient",python,selection_mouse +6906,14163305,"genie.py",3679,60,"lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )",python,selection_mouse +6907,14163731,"genie.py",3679,50,"lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6908,14163939,"TERMINAL",0,0,"8336999",,terminal_output +6909,14164976,"TERMINAL",0,0,"9447304040",,terminal_output +6910,14166126,"TERMINAL",0,0,"20558111",,terminal_output +6911,14166385,"genie.py",3729,0,"",python,selection_mouse +6912,14167062,"TERMINAL",0,0,"1669222",,terminal_output +6913,14167185,"genie.py",3729,10,"\n )",python,selection_mouse +6914,14167299,"genie.py",3713,16,"outputs[""z_q""]),",python,selection_mouse +6915,14167317,"genie.py",3710,19,"am_outputs[""z_q""]),",python,selection_mouse +6916,14167374,"genie.py",3704,25,"ient(lam_outputs[""z_q""]),",python,selection_mouse +6917,14167376,"genie.py",3658,71,"[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6918,14167377,"genie.py",3649,80,"m_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6919,14167381,"genie.py",3644,85,"a: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6920,14167405,"genie.py",3681,48,"mbda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6921,14167465,"genie.py",3678,51," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6922,14167467,"genie.py",3677,52," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6923,14167467,"genie.py",3676,53," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6924,14167481,"genie.py",3675,54," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6925,14167649,"genie.py",3676,53," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6926,14167665,"genie.py",3677,52," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6927,14167724,"genie.py",3678,51," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6928,14167867,"genie.py",3677,52," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),",python,selection_mouse +6929,14168163,"TERMINAL",0,0,"27710333",,terminal_output +6930,14168177,"genie.py",3677,0,"",python,selection_mouse +6931,14168178,"genie.py",3667,12," ",python,selection_mouse +6932,14168334,"genie.py",3667,63," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n",python,selection_mouse +6933,14169142,"TERMINAL",0,0,"3881444",,terminal_output +6934,14169201,"genie.py",3677,0,"",python,selection_mouse +6935,14169201,"genie.py",3667,12," ",python,selection_mouse +6936,14169363,"genie.py",3667,63," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n",python,selection_mouse +6937,14170199,"TERMINAL",0,0,"4992555",,terminal_output +6938,14170515,"genie.py",3677,0,"",python,selection_mouse +6939,14170516,"genie.py",3667,12," ",python,selection_mouse +6940,14171136,"genie.py",3677,0,"",python,selection_mouse +6941,14171248,"TERMINAL",0,0,"510103666",,terminal_output +6942,14171613,"genie.py",3667,12," ",python,selection_mouse +6943,14171805,"genie.py",3667,63," lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n",python,selection_mouse +6944,14172281,"TERMINAL",0,0,"6114777",,terminal_output +6945,14173320,"TERMINAL",0,0,"7336999",,terminal_output +6946,14174375,"TERMINAL",0,0,"9447405050",,terminal_output +6947,14175402,"TERMINAL",0,0,"30558111",,terminal_output +6948,14176449,"TERMINAL",0,0,"1669222",,terminal_output +6949,14177585,"TERMINAL",0,0,"27720333",,terminal_output +6950,14178605,"TERMINAL",0,0,"3881444",,terminal_output +6951,14179630,"TERMINAL",0,0,"4992555",,terminal_output +6952,14180655,"TERMINAL",0,0,"520203666",,terminal_output +6953,14181779,"TERMINAL",0,0,"6114777",,terminal_output +6954,14182719,"TERMINAL",0,0,"7225888",,terminal_output +6955,14183766,"TERMINAL",0,0,"8336999",,terminal_output +6956,14184851,"TERMINAL",0,0,"9447501:001:00",,terminal_output +6957,14185875,"TERMINAL",0,0,"40558111",,terminal_output +6958,14186967,"TERMINAL",0,0,"1669222",,terminal_output +6959,14187971,"TERMINAL",0,0,"27730333",,terminal_output +6960,14189050,"TERMINAL",0,0,"3881444",,terminal_output +6961,14190073,"TERMINAL",0,0,"4992555",,terminal_output +6962,14191201,"TERMINAL",0,0,"530303666",,terminal_output +6963,14192223,"TERMINAL",0,0,"6114777",,terminal_output +6964,14193183,"TERMINAL",0,0,"7225888",,terminal_output +6965,14194241,"TERMINAL",0,0,"8336999",,terminal_output +6966,14195301,"TERMINAL",0,0,"94479:001010",,terminal_output +6967,14196342,"TERMINAL",0,0,"50669222",,terminal_output +6968,14197399,"TERMINAL",0,0,"27740333",,terminal_output +6969,14198478,"TERMINAL",0,0,"3881444",,terminal_output +6970,14199495,"TERMINAL",0,0,"4992555",,terminal_output +6971,14200539,"TERMINAL",0,0,"540403666",,terminal_output +6972,14201579,"TERMINAL",0,0,"6114777",,terminal_output +6973,14202672,"TERMINAL",0,0,"7225888",,terminal_output +6974,14203696,"TERMINAL",0,0,"8336999",,terminal_output +6975,14204716,"TERMINAL",0,0,"9447102020",,terminal_output +6976,14205843,"TERMINAL",0,0,"7:00558111",,terminal_output +6977,14206866,"TERMINAL",0,0,"1669222",,terminal_output +6978,14207891,"TERMINAL",0,0,"27750333",,terminal_output +6979,14208916,"TERMINAL",0,0,"M33373409 accelerat wrap tum_cte0 CG41:24\t 1 hkn072388144",,terminal_output +6980,14209944,"TERMINAL",0,0,"499255",,terminal_output +6981,14211066,"TERMINAL",0,0,"55050366",,terminal_output +6982,14212088,"TERMINAL",0,0,"611477",,terminal_output +6983,14213089,"TERMINAL",0,0,"722588",,terminal_output +6984,14214239,"TERMINAL",0,0,"833699",,terminal_output +6985,14215192,"TERMINAL",0,0,"94472030",,terminal_output +6986,14216251,"TERMINAL",0,0,"1055811",,terminal_output +6987,14217305,"TERMINAL",0,0,"166922",,terminal_output +6988,14218356,"TERMINAL",0,0,"2884:0144",,terminal_output +6989,14219400,"TERMINAL",0,0,"\r499255",,terminal_output +6990,14220491,"TERMINAL",0,0,"53:003:00366",,terminal_output +6991,14221498,"TERMINAL",0,0,"611477",,terminal_output +6992,14222637,"TERMINAL",0,0,"722588",,terminal_output +6993,14223662,"TERMINAL",0,0,"833699",,terminal_output +6994,14224684,"TERMINAL",0,0,"94473040",,terminal_output +6995,14225709,"TERMINAL",0,0,"2055811",,terminal_output +6996,14226839,"TERMINAL",0,0,"166922",,terminal_output +6997,14227789,"TERMINAL",0,0,"2771033",,terminal_output +6998,14228887,"TERMINAL",0,0,"388144",,terminal_output +6999,14229915,"TERMINAL",0,0,"499255",,terminal_output +7000,14231039,"TERMINAL",0,0,"51010366",,terminal_output +7001,14232059,"TERMINAL",0,0,"611477",,terminal_output +7002,14233084,"TERMINAL",0,0,"722588",,terminal_output +7003,14234084,"TERMINAL",0,0,"833699",,terminal_output +7004,14235161,"TERMINAL",0,0,"94474050",,terminal_output +7005,14236190,"TERMINAL",0,0,"3055811",,terminal_output +7006,14237239,"TERMINAL",0,0,"166922",,terminal_output +7007,14238290,"TERMINAL",0,0,"2772033",,terminal_output +7008,14239368,"TERMINAL",0,0,"399255",,terminal_output +7009,14240393,"TERMINAL",0,0,"52020366",,terminal_output +7010,14241480,"TERMINAL",0,0,"611477",,terminal_output +7011,14242503,"TERMINAL",0,0,"722588",,terminal_output +7012,14243521,"TERMINAL",0,0,"833699",,terminal_output +7013,14244656,"TERMINAL",0,0,"9447502:00",,terminal_output +7014,14245677,"TERMINAL",0,0,"4055811",,terminal_output +7015,14246681,"TERMINAL",0,0,"166922",,terminal_output +7016,14247825,"TERMINAL",0,0,"2773033",,terminal_output +7017,14248765,"TERMINAL",0,0,"388144",,terminal_output +7018,14249876,"TERMINAL",0,0,"499255",,terminal_output +7019,14250900,"TERMINAL",0,0,"53030366",,terminal_output +7020,14251913,"TERMINAL",0,0,"611477",,terminal_output +7021,14252964,"TERMINAL",0,0,"722588",,terminal_output +7022,14254081,"TERMINAL",0,0,"833699",,terminal_output +7023,14255201,"TERMINAL",0,0,"944730:0010",,terminal_output +7024,14256228,"TERMINAL",0,0,"5055811",,terminal_output +7025,14257251,"TERMINAL",0,0,"166922",,terminal_output +7026,14258477,"TERMINAL",0,0,"2884144",,terminal_output +7027,14259496,"TERMINAL",0,0,"499255",,terminal_output +7028,14260626,"TERMINAL",0,0,"54040366",,terminal_output +7029,14260781,"genie.py",3998,0,"",python,selection_mouse +7030,14261591,"TERMINAL",0,0,"611477",,terminal_output +7031,14262122,"genie.py",3943,56," dyna_outputs = self.dynamics(outputs, training)\n",python,selection_mouse +7032,14262298,"genie.py",3998,0,"",python,selection_mouse +7033,14262682,"TERMINAL",0,0,"722588",,terminal_output +7034,14263683,"TERMINAL",0,0,"833699",,terminal_output +7035,14264826,"TERMINAL",0,0,"94471020",,terminal_output +7036,14265849,"TERMINAL",0,0,"8:0055811",,terminal_output +7037,14266138,"models/lam.py",0,0,"",python,tab +7038,14266822,"TERMINAL",0,0,"166922",,terminal_output +7039,14267865,"TERMINAL",0,0,"2775033",,terminal_output +7040,14268462,"models/lam.py",2754,0,"",python,selection_mouse +7041,14268534,"models/lam.py",2749,8,"patchify",python,selection_mouse +7042,14268908,"TERMINAL",0,0,"388144",,terminal_output +7043,14269233,"models/lam.py",2828,0,"",python,selection_mouse +7044,14269370,"models/lam.py",2826,9,"action_in",python,selection_mouse +7045,14269950,"TERMINAL",0,0,"499255",,terminal_output +7046,14269969,"models/lam.py",2819,0,"",python,selection_mouse +7047,14270106,"models/lam.py",2808,12,"broadcast_to",python,selection_mouse +7048,14270940,"models/lam.py",2799,0,"",python,selection_mouse +7049,14271005,"TERMINAL",0,0,"55050366",,terminal_output +7050,14271084,"models/lam.py",2791,10,"action_pad",python,selection_mouse +7051,14271678,"models/lam.py",2993,0,"",python,selection_mouse +7052,14272042,"TERMINAL",0,0,"611477",,terminal_output +7053,14272264,"models/lam.py",2958,0,"",python,selection_mouse +7054,14272415,"models/lam.py",2950,11,"concatenate",python,selection_mouse +7055,14273084,"TERMINAL",0,0,"722588",,terminal_output +7056,14274144,"TERMINAL",0,0,"833699",,terminal_output +7057,14275166,"TERMINAL",0,0,"94472030",,terminal_output +7058,14275369,"models/lam.py",2939,0,"",python,selection_mouse +7059,14275467,"models/lam.py",2929,14,"padded_patches",python,selection_mouse +7060,14275954,"models/lam.py",3040,0,"",python,selection_mouse +7061,14276203,"TERMINAL",0,0,"1055811",,terminal_output +7062,14276778,"models/lam.py",3098,0,"",python,selection_mouse +7063,14277073,"models/lam.py",3097,6,"action",python,selection_mouse +7064,14277255,"TERMINAL",0,0,"166922",,terminal_output +7065,14278213,"models/lam.py",3039,0,"",python,selection_mouse +7066,14278306,"TERMINAL",0,0,"2785:0144",,terminal_output +7067,14278352,"models/lam.py",3036,7,"encoder",python,selection_mouse +7068,14279371,"TERMINAL",0,0,"499255",,terminal_output +7069,14280045,"models/lam.py",652,0,"",python,selection_mouse +7070,14280173,"models/lam.py",646,13,"STTransformer",python,selection_mouse +7071,14280491,"TERMINAL",0,0,"54:004:00366",,terminal_output +7072,14281386,"models/lam.py",639,0,"",python,selection_mouse +7073,14281452,"TERMINAL",0,0,"611477",,terminal_output +7074,14281515,"models/lam.py",636,7,"encoder",python,selection_mouse +7075,14282506,"TERMINAL",0,0,"722588",,terminal_output +7076,14283565,"TERMINAL",0,0,"833699",,terminal_output +7077,14284597,"TERMINAL",0,0,"94473040",,terminal_output +7078,14285717,"TERMINAL",0,0,"2055811",,terminal_output +7079,14286689,"TERMINAL",0,0,"166922",,terminal_output +7080,14287743,"TERMINAL",0,0,"2771033",,terminal_output +7081,14288640,"models/lam.py",3087,0,"",python,selection_mouse +7082,14288774,"models/lam.py",3086,3,"Get",python,selection_mouse +7083,14288810,"TERMINAL",0,0,"388144",,terminal_output +7084,14289024,"models/lam.py",3086,10,"Get latent",python,selection_mouse +7085,14289051,"models/lam.py",3086,57,"Get latent action for all future frames\n z = z[:, ",python,selection_mouse +7086,14289109,"models/lam.py",3086,58,"Get latent action for all future frames\n z = z[:, 1",python,selection_mouse +7087,14289111,"models/lam.py",3086,59,"Get latent action for all future frames\n z = z[:, 1:",python,selection_mouse +7088,14289111,"models/lam.py",3086,61,"Get latent action for all future frames\n z = z[:, 1:, ",python,selection_mouse +7089,14289117,"models/lam.py",3086,62,"Get latent action for all future frames\n z = z[:, 1:, 0",python,selection_mouse +7090,14289137,"models/lam.py",3086,63,"Get latent action for all future frames\n z = z[:, 1:, 0]",python,selection_mouse +7091,14289166,"models/lam.py",3086,64,"Get latent action for all future frames\n z = z[:, 1:, 0] ",python,selection_mouse +7092,14289231,"models/lam.py",3086,65,"Get latent action for all future frames\n z = z[:, 1:, 0] ",python,selection_mouse +7093,14289232,"models/lam.py",3086,66,"Get latent action for all future frames\n z = z[:, 1:, 0] #",python,selection_mouse +7094,14289232,"models/lam.py",3086,68,"Get latent action for all future frames\n z = z[:, 1:, 0] # (",python,selection_mouse +7095,14289233,"models/lam.py",3086,70,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B,",python,selection_mouse +7096,14289250,"models/lam.py",3086,71,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, ",python,selection_mouse +7097,14289317,"models/lam.py",3086,72,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T",python,selection_mouse +7098,14289317,"models/lam.py",3086,73,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-",python,selection_mouse +7099,14289373,"models/lam.py",3086,74,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1",python,selection_mouse +7100,14289378,"models/lam.py",3086,75,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1,",python,selection_mouse +7101,14289389,"models/lam.py",3086,76,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, ",python,selection_mouse +7102,14289403,"models/lam.py",3086,77,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E",python,selection_mouse +7103,14289464,"models/lam.py",3086,78,"Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)",python,selection_mouse +7104,14289604,"models/lam.py",3086,33,"Get latent action for all future ",python,selection_mouse +7105,14289624,"models/lam.py",3086,39,"Get latent action for all future frames",python,selection_mouse +7106,14289844,"TERMINAL",0,0,"499255",,terminal_output +7107,14290892,"TERMINAL",0,0,"51010366",,terminal_output +7108,14291935,"TERMINAL",0,0,"611477",,terminal_output +7109,14292978,"TERMINAL",0,0,"722588",,terminal_output +7110,14294120,"TERMINAL",0,0,"833699",,terminal_output +7111,14295136,"TERMINAL",0,0,"94474050",,terminal_output +7112,14296127,"TERMINAL",0,0,"3055811",,terminal_output +7113,14297203,"TERMINAL",0,0,"166922",,terminal_output +7114,14298229,"TERMINAL",0,0,"2772033",,terminal_output +7115,14299285,"TERMINAL",0,0,"388144",,terminal_output +7116,14300360,"TERMINAL",0,0,"42020366",,terminal_output +7117,14301485,"TERMINAL",0,0,"M63373410 accelerat wrap tum_cte0 CG42:56\t 1 hkn07231147",,terminal_output +7118,14302419,"TERMINAL",0,0,"72258",,terminal_output +7119,14303469,"TERMINAL",0,0,"83369",,terminal_output +7120,14304557,"TERMINAL",0,0,"944750",,terminal_output +7121,14305584,"TERMINAL",0,0,"405581",,terminal_output +7122,14306708,"TERMINAL",0,0,"16692",,terminal_output +7123,14307670,"TERMINAL",0,0,"277303",,terminal_output +7124,14308757,"TERMINAL",0,0,"38814",,terminal_output +7125,14309754,"TERMINAL",0,0,"49925",,terminal_output +7126,14310809,"TERMINAL",0,0,"5303036",,terminal_output +7127,14311930,"TERMINAL",0,0,"\r61147",,terminal_output +7128,14312896,"TERMINAL",0,0,"72258",,terminal_output +7129,14313934,"TERMINAL",0,0,"83369",,terminal_output +7130,14315190,"TERMINAL",0,0,"94471:00",,terminal_output +7131,14316235,"TERMINAL",0,0,"505581",,terminal_output +7132,14317240,"TERMINAL",0,0,"16692",,terminal_output +7133,14318293,"TERMINAL",0,0,"277403",,terminal_output +7134,14319367,"TERMINAL",0,0,"39925",,terminal_output +7135,14320430,"TERMINAL",0,0,"5404036",,terminal_output +7136,14321456,"TERMINAL",0,0,"61147",,terminal_output +7137,14322479,"TERMINAL",0,0,"72258",,terminal_output +7138,14323518,"TERMINAL",0,0,"83369",,terminal_output +7139,14324574,"TERMINAL",0,0,"944710",,terminal_output +7140,14325652,"TERMINAL",0,0,"9:005581",,terminal_output +7141,14326677,"TERMINAL",0,0,"16692",,terminal_output +7142,14327713,"TERMINAL",0,0,"277503",,terminal_output +7143,14328829,"TERMINAL",0,0,"38814",,terminal_output +7144,14329854,"TERMINAL",0,0,"49925",,terminal_output +7145,14330879,"TERMINAL",0,0,"5505036",,terminal_output +7146,14331918,"TERMINAL",0,0,"61147",,terminal_output +7147,14332928,"TERMINAL",0,0,"72258",,terminal_output +7148,14334047,"TERMINAL",0,0,"83369",,terminal_output +7149,14335072,"TERMINAL",0,0,"944720",,terminal_output +7150,14336097,"TERMINAL",0,0,"105581",,terminal_output +7151,14337097,"TERMINAL",0,0,"16692",,terminal_output +7152,14338145,"TERMINAL",0,0,"2776:003",,terminal_output +7153,14339227,"TERMINAL",0,0,"38814",,terminal_output +7154,14340220,"TERMINAL",0,0,"49925",,terminal_output +7155,14341266,"TERMINAL",0,0,"55:005:0036",,terminal_output +7156,14342320,"TERMINAL",0,0,"62258",,terminal_output +7157,14343366,"TERMINAL",0,0,"83369",,terminal_output +7158,14344404,"TERMINAL",0,0,"944730",,terminal_output +7159,14345517,"TERMINAL",0,0,"205581",,terminal_output +7160,14346550,"TERMINAL",0,0,"16692",,terminal_output +7161,14347547,"TERMINAL",0,0,"277103",,terminal_output +7162,14348595,"TERMINAL",0,0,"38814",,terminal_output +7163,14349717,"TERMINAL",0,0,"49925",,terminal_output +7164,14350741,"TERMINAL",0,0,"5101036",,terminal_output +7165,14351765,"TERMINAL",0,0,"61147",,terminal_output +7166,14352788,"TERMINAL",0,0,"72258",,terminal_output +7167,14353916,"TERMINAL",0,0,"83369",,terminal_output +7168,14354938,"TERMINAL",0,0,"944740",,terminal_output +7169,14355964,"TERMINAL",0,0,"305581",,terminal_output +7170,14356987,"TERMINAL",0,0,"16692",,terminal_output +7171,14357998,"TERMINAL",0,0,"277203",,terminal_output +7172,14358372,"models/lam.py",3108,0,"",python,selection_mouse +7173,14358817,"models/lam.py",3103,0,"",python,selection_mouse +7174,14358964,"models/lam.py",3097,6,"action",python,selection_mouse +7175,14359061,"TERMINAL",0,0,"38814",,terminal_output +7176,14359121,"models/lam.py",3076,50," # Get latent action for all future frames\n",python,selection_mouse +7177,14359861,"models/lam.py",3146,0,"",python,selection_mouse +7178,14360101,"TERMINAL",0,0,"49925",,terminal_output +7179,14360396,"models/lam.py",3142,0,"",python,selection_mouse +7180,14360905,"models/lam.py",3143,0,"",python,selection_mouse +7181,14361142,"TERMINAL",0,0,"5202036",,terminal_output +7182,14361937,"models/lam.py",3219,0,"",python,selection_mouse +7183,14362216,"TERMINAL",0,0,"61147",,terminal_output +7184,14362468,"models/lam.py",3141,0,"",python,selection_mouse +7185,14362997,"models/lam.py",3137,0,"",python,selection_mouse +7186,14363164,"models/lam.py",3137,1," ",python,selection_mouse +7187,14363228,"TERMINAL",0,0,"72258",,terminal_output +7188,14363377,"models/lam.py",3137,3," z[",python,selection_mouse +7189,14363397,"models/lam.py",3137,5," z[:,",python,selection_mouse +7190,14363412,"models/lam.py",3137,6," z[:, ",python,selection_mouse +7191,14363429,"models/lam.py",3137,8," z[:, 1:",python,selection_mouse +7192,14363493,"models/lam.py",3137,10," z[:, 1:, ",python,selection_mouse +7193,14363493,"models/lam.py",3137,11," z[:, 1:, 0",python,selection_mouse +7194,14363494,"models/lam.py",3137,12," z[:, 1:, 0]",python,selection_mouse +7195,14363495,"models/lam.py",3137,13," z[:, 1:, 0] ",python,selection_mouse +7196,14363513,"models/lam.py",3137,14," z[:, 1:, 0] ",python,selection_mouse +7197,14363898,"models/lam.py",3151,0,"",python,selection_mouse +7198,14364282,"TERMINAL",0,0,"83369",,terminal_output +7199,14365324,"TERMINAL",0,0,"955851",,terminal_output +7200,14366377,"TERMINAL",0,0,"416692",,terminal_output +7201,14367537,"TERMINAL",0,0,"277303",,terminal_output +7202,14368555,"TERMINAL",0,0,"38814",,terminal_output +7203,14369521,"TERMINAL",0,0,"49925",,terminal_output +7204,14370606,"TERMINAL",0,0,"5303036",,terminal_output +7205,14371631,"TERMINAL",0,0,"61147",,terminal_output +7206,14372759,"TERMINAL",0,0,"72258",,terminal_output +7207,14373781,"TERMINAL",0,0,"83369",,terminal_output +7208,14374805,"TERMINAL",0,0,"94472:00",,terminal_output +7209,14375829,"TERMINAL",0,0,"505581",,terminal_output +7210,14376859,"TERMINAL",0,0,"16692",,terminal_output +7211,14377773,"models/lam.py",3135,0,"",python,selection_mouse +7212,14377908,"TERMINAL",0,0,"277403",,terminal_output +7213,14377933,"models/lam.py",3134,1,"z",python,selection_mouse +7214,14378136,"models/lam.py",3134,2,"z ",python,selection_mouse +7215,14378157,"models/lam.py",3134,3,"z =",python,selection_mouse +7216,14378175,"models/lam.py",3086,49,"Get latent action for all future frames\n z",python,selection_mouse +7217,14378191,"models/lam.py",3090,45,"latent action for all future frames\n z",python,selection_mouse +7218,14378246,"models/lam.py",3035,100,".encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z",python,selection_mouse +7219,14378785,"models/lam.py",3033,0,"",python,selection_mouse +7220,14378908,"models/lam.py",3031,4,"self",python,selection_mouse +7221,14378962,"TERMINAL",0,0,"38814",,terminal_output +7222,14379180,"models/lam.py",3030,5," self",python,selection_mouse +7223,14379196,"models/lam.py",3029,6,"= self",python,selection_mouse +7224,14379262,"models/lam.py",3028,7," = self",python,selection_mouse +7225,14379603,"models/lam.py",3028,0,"",python,selection_mouse +7226,14379938,"models/lam.py",3027,0,"",python,selection_mouse +7227,14379995,"TERMINAL",0,0,"49925",,terminal_output +7228,14381054,"TERMINAL",0,0,"5404036",,terminal_output +7229,14382178,"TERMINAL",0,0,"61147",,terminal_output +7230,14383222,"TERMINAL",0,0,"72258",,terminal_output +7231,14384230,"TERMINAL",0,0,"83369",,terminal_output +7232,14385251,"TERMINAL",0,0,"944710",,terminal_output +7233,14385453,"models/lam.py",3042,0,"",python,selection_mouse +7234,14385612,"models/lam.py",3036,7,"encoder",python,selection_mouse +7235,14386166,"models/lam.py",3040,0,"",python,selection_mouse +7236,14386261,"models/lam.py",3036,7,"encoder",python,selection_mouse +7237,14386334,"TERMINAL",0,0,"8:00:005581",,terminal_output +7238,14387011,"models/lam.py",3027,0,"",python,selection_mouse +7239,14387340,"TERMINAL",0,0,"177503",,terminal_output +7240,14388412,"TERMINAL",0,0,"38814",,terminal_output +7241,14388606,"models/lam.py",3134,0,"",python,selection_mouse +7242,14389473,"TERMINAL",0,0,"49925",,terminal_output +7243,14389633,"models/lam.py",3134,1,"z",python,selection_mouse +7244,14389655,"models/lam.py",3134,2,"z ",python,selection_mouse +7245,14389670,"models/lam.py",3134,3,"z =",python,selection_mouse +7246,14389689,"models/lam.py",3134,4,"z = ",python,selection_mouse +7247,14389806,"models/lam.py",3134,5,"z = z",python,selection_mouse +7248,14389807,"models/lam.py",3134,6,"z = z[",python,selection_mouse +7249,14389972,"models/lam.py",3134,7,"z = z[:",python,selection_mouse +7250,14390065,"models/lam.py",3134,8,"z = z[:,",python,selection_mouse +7251,14390097,"models/lam.py",3134,9,"z = z[:, ",python,selection_mouse +7252,14390169,"models/lam.py",3134,10,"z = z[:, 1",python,selection_mouse +7253,14390186,"models/lam.py",3134,11,"z = z[:, 1:",python,selection_mouse +7254,14390250,"models/lam.py",3134,12,"z = z[:, 1:,",python,selection_mouse +7255,14390251,"models/lam.py",3134,13,"z = z[:, 1:, ",python,selection_mouse +7256,14390266,"models/lam.py",3134,14,"z = z[:, 1:, 0",python,selection_mouse +7257,14390322,"models/lam.py",3134,15,"z = z[:, 1:, 0]",python,selection_mouse +7258,14390323,"models/lam.py",3134,16,"z = z[:, 1:, 0] ",python,selection_mouse +7259,14390509,"TERMINAL",0,0,"5505036",,terminal_output +7260,14390783,"models/lam.py",3150,0,"",python,selection_mouse +7261,14390948,"models/lam.py",3149,2," ",python,selection_mouse +7262,14391036,"models/lam.py",3148,3,"] ",python,selection_mouse +7263,14391057,"models/lam.py",3147,4,"0] ",python,selection_mouse +7264,14391072,"models/lam.py",3145,6,", 0] ",python,selection_mouse +7265,14391086,"models/lam.py",3144,7,":, 0] ",python,selection_mouse +7266,14391103,"models/lam.py",3143,8,"1:, 0] ",python,selection_mouse +7267,14391119,"models/lam.py",3142,9," 1:, 0] ",python,selection_mouse +7268,14391172,"models/lam.py",3141,10,", 1:, 0] ",python,selection_mouse +7269,14391172,"models/lam.py",3139,12,"[:, 1:, 0] ",python,selection_mouse +7270,14391238,"models/lam.py",3138,13,"z[:, 1:, 0] ",python,selection_mouse +7271,14391238,"models/lam.py",3137,14," z[:, 1:, 0] ",python,selection_mouse +7272,14391296,"models/lam.py",3136,15,"= z[:, 1:, 0] ",python,selection_mouse +7273,14391435,"models/lam.py",3135,16," = z[:, 1:, 0] ",python,selection_mouse +7274,14391562,"TERMINAL",0,0,"61147",,terminal_output +7275,14391728,"models/lam.py",3134,17,"z = z[:, 1:, 0] ",python,selection_mouse +7276,14392188,"models/lam.py",3134,0,"",python,selection_mouse +7277,14392603,"TERMINAL",0,0,"72258",,terminal_output +7278,14393748,"TERMINAL",0,0,"83369",,terminal_output +7279,14394765,"TERMINAL",0,0,"944720",,terminal_output +7280,14395743,"TERMINAL",0,0,"105581",,terminal_output +7281,14396785,"TERMINAL",0,0,"16692",,terminal_output +7282,14397839,"TERMINAL",0,0,"2777:003",,terminal_output +7283,14398975,"TERMINAL",0,0,"38814",,terminal_output +7284,14399908,"TERMINAL",0,0,"49925",,terminal_output +7285,14401019,"TERMINAL",0,0,"56:006:0036",,terminal_output +7286,14401996,"TERMINAL",0,0,"61147",,terminal_output +7287,14402156,"models/lam.py",3328,0,"",python,selection_mouse +7288,14403034,"TERMINAL",0,0,"72258",,terminal_output +7289,14403226,"models/lam.py",3386,0,"",python,selection_mouse +7290,14404081,"TERMINAL",0,0,"83369",,terminal_output +7291,14405218,"TERMINAL",0,0,"944730",,terminal_output +7292,14405412,"models/dynamics.py",0,0,"",python,tab +7293,14406239,"train_dynamics.py",0,0,"",python,tab +7294,14406312,"TERMINAL",0,0,"205581",,terminal_output +7295,14407120,"models/dynamics.py",0,0,"",python,tab +7296,14407259,"TERMINAL",0,0,"16692",,terminal_output +7297,14408295,"TERMINAL",0,0,"277103",,terminal_output +7298,14409319,"TERMINAL",0,0,"39925",,terminal_output +7299,14410355,"TERMINAL",0,0,"5101036",,terminal_output +7300,14411414,"TERMINAL",0,0,"61147",,terminal_output +7301,14412488,"TERMINAL",0,0,"72258",,terminal_output +7302,14413488,"TERMINAL",0,0,"83369",,terminal_output +7303,14414640,"TERMINAL",0,0,"944740",,terminal_output +7304,14415662,"TERMINAL",0,0,"305581",,terminal_output +7305,14416643,"TERMINAL",0,0,"16692",,terminal_output +7306,14417710,"TERMINAL",0,0,"277203",,terminal_output +7307,14418837,"TERMINAL",0,0,"38814",,terminal_output +7308,14419772,"TERMINAL",0,0,"49925",,terminal_output +7309,14420884,"TERMINAL",0,0,"5202036",,terminal_output +7310,14421909,"TERMINAL",0,0,"61147",,terminal_output +7311,14422933,"TERMINAL",0,0,"72258",,terminal_output +7312,14423961,"TERMINAL",0,0,"83369",,terminal_output +7313,14425083,"TERMINAL",0,0,"944750",,terminal_output +7314,14426115,"TERMINAL",0,0,"405581",,terminal_output +7315,14427098,"TERMINAL",0,0,"16692",,terminal_output +7316,14428155,"TERMINAL",0,0,"277303",,terminal_output +7317,14429282,"TERMINAL",0,0,"38814",,terminal_output +7318,14430246,"TERMINAL",0,0,"49925",,terminal_output +7319,14431285,"TERMINAL",0,0,"5303036",,terminal_output +7320,14432361,"TERMINAL",0,0,"62258",,terminal_output +7321,14433401,"TERMINAL",0,0,"83369",,terminal_output +7322,14434453,"TERMINAL",0,0,"94473:00",,terminal_output +7323,14435530,"TERMINAL",0,0,"505581",,terminal_output +7324,14436558,"TERMINAL",0,0,"16692",,terminal_output +7325,14437592,"TERMINAL",0,0,"277403",,terminal_output +7326,14438683,"TERMINAL",0,0,"38814",,terminal_output +7327,14439727,"TERMINAL",0,0,"49925",,terminal_output +7328,14440749,"TERMINAL",0,0,"5404036",,terminal_output +7329,14441791,"TERMINAL",0,0,"61147",,terminal_output +7330,14442902,"TERMINAL",0,0,"72258",,terminal_output +7331,14443928,"TERMINAL",0,0,"83369",,terminal_output +7332,14444951,"TERMINAL",0,0,"944710",,terminal_output +7333,14445977,"TERMINAL",0,0,"1:005581",,terminal_output +7334,14447100,"TERMINAL",0,0,"16692",,terminal_output +7335,14448123,"TERMINAL",0,0,"277503",,terminal_output +7336,14449152,"TERMINAL",0,0,"38814",,terminal_output +7337,14450212,"TERMINAL",0,0,"49925",,terminal_output +7338,14451300,"TERMINAL",0,0,"5505036",,terminal_output +7339,14452259,"TERMINAL",0,0,"61147",,terminal_output +7340,14453350,"TERMINAL",0,0,"72258",,terminal_output +7341,14454381,"TERMINAL",0,0,"844720",,terminal_output +7342,14455498,"TERMINAL",0,0,"105581",,terminal_output +7343,14456524,"TERMINAL",0,0,"16692",,terminal_output +7344,14457547,"TERMINAL",0,0,"2778:003",,terminal_output +7345,14458672,"TERMINAL",0,0,"38814",,terminal_output +7346,14459641,"TERMINAL",0,0,"49925",,terminal_output +7347,14459919,"models/dynamics.py",4106,0,"",python,selection_mouse +7348,14460307,"models/dynamics.py",4132,0,"",python,selection_mouse +7349,14460694,"TERMINAL",0,0,"57:007:0036",,terminal_output +7350,14461740,"TERMINAL",0,0,"61147",,terminal_output +7351,14462775,"TERMINAL",0,0,"72258",,terminal_output +7352,14463012,"train_dynamics.py",0,0,"",python,tab +7353,14463829,"TERMINAL",0,0,"83369",,terminal_output +7354,14464907,"TERMINAL",0,0,"944730",,terminal_output +7355,14465941,"TERMINAL",0,0,"205581",,terminal_output +7356,14466960,"TERMINAL",0,0,"16692",,terminal_output +7357,14468008,"TERMINAL",0,0,"277103",,terminal_output +7358,14469116,"TERMINAL",0,0,"38814",,terminal_output +7359,14470096,"TERMINAL",0,0,"49925",,terminal_output +7360,14471165,"TERMINAL",0,0,"5101036",,terminal_output +7361,14472290,"TERMINAL",0,0,"61147",,terminal_output +7362,14473317,"TERMINAL",0,0,"72258",,terminal_output +7363,14474273,"TERMINAL",0,0,"83369",,terminal_output +7364,14475317,"TERMINAL",0,0,"955841",,terminal_output +7365,14476362,"TERMINAL",0,0,"316692",,terminal_output +7366,14477386,"train_dynamics.py",3185,0,"",python,selection_mouse +7367,14477419,"TERMINAL",0,0,"277203",,terminal_output +7368,14477521,"train_dynamics.py",3182,7,"ce_loss",python,selection_mouse +7369,14478442,"TERMINAL",0,0,"38814",,terminal_output +7370,14478619,"train_dynamics.py",3186,0,"",python,selection_mouse +7371,14478620,"train_dynamics.py",3182,7,"ce_loss",python,selection_mouse +7372,14478862,"train_dynamics.py",3182,10,"ce_loss = ",python,selection_mouse +7373,14478876,"train_dynamics.py",3182,15,"ce_loss = optax",python,selection_mouse +7374,14478901,"train_dynamics.py",3182,57,"ce_loss = optax.softmax_cross_entropy_with_integer_labels",python,selection_mouse +7375,14479205,"train_dynamics.py",3182,58,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(",python,selection_mouse +7376,14479225,"train_dynamics.py",3182,64,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits",python,selection_mouse +7377,14479296,"train_dynamics.py",3182,65,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits,",python,selection_mouse +7378,14479296,"train_dynamics.py",3182,73,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets",python,selection_mouse +7379,14479371,"train_dynamics.py",3182,74,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)",python,selection_mouse +7380,14479484,"TERMINAL",0,0,"49925",,terminal_output +7381,14480585,"TERMINAL",0,0,"5202036",,terminal_output +7382,14481597,"TERMINAL",0,0,"61147",,terminal_output +7383,14482239,"train_dynamics.py",3256,0,"",python,selection_mouse +7384,14482254,"train_dynamics.py",3255,0,"",python,selection_command +7385,14482634,"TERMINAL",0,0,"72258",,terminal_output +7386,14483675,"TERMINAL",0,0,"83369",,terminal_output +7387,14484786,"TERMINAL",0,0,"944750",,terminal_output +7388,14485733,"TERMINAL",0,0,"405581",,terminal_output +7389,14486780,"TERMINAL",0,0,"16692",,terminal_output +7390,14486864,"train_dynamics.py",3006,0,"",python,selection_mouse +7391,14487448,"train_dynamics.py",2998,0,"",python,selection_mouse +7392,14487589,"train_dynamics.py",2991,12,"video_tokens",python,selection_mouse +7393,14487821,"TERMINAL",0,0,"277303",,terminal_output +7394,14488880,"TERMINAL",0,0,"38814",,terminal_output +7395,14490007,"TERMINAL",0,0,"49925",,terminal_output +7396,14491030,"TERMINAL",0,0,"5303036",,terminal_output +7397,14492057,"TERMINAL",0,0,"61147",,terminal_output +7398,14493082,"TERMINAL",0,0,"72258",,terminal_output +7399,14494102,"TERMINAL",0,0,"83369",,terminal_output +7400,14495228,"TERMINAL",0,0,"94474:00",,terminal_output +7401,14496251,"TERMINAL",0,0,"505581",,terminal_output +7402,14497292,"TERMINAL",0,0,"16692",,terminal_output +7403,14498267,"TERMINAL",0,0,"277403",,terminal_output +7404,14499631,"TERMINAL",0,0,"39925",,terminal_output +7405,14500763,"TERMINAL",0,0,"5404036",,terminal_output +7406,14501703,"TERMINAL",0,0,"61147",,terminal_output +7407,14502754,"TERMINAL",0,0,"72258",,terminal_output +7408,14503828,"TERMINAL",0,0,"83369",,terminal_output +7409,14504954,"TERMINAL",0,0,"944710",,terminal_output +7410,14505986,"TERMINAL",0,0,"2:005581",,terminal_output +7411,14507004,"TERMINAL",0,0,"16692",,terminal_output +7412,14508028,"TERMINAL",0,0,"277503",,terminal_output +7413,14509050,"TERMINAL",0,0,"38814",,terminal_output +7414,14510097,"TERMINAL",0,0,"49925",,terminal_output +7415,14511154,"TERMINAL",0,0,"5505036",,terminal_output +7416,14512192,"TERMINAL",0,0,"61147",,terminal_output +7417,14513251,"TERMINAL",0,0,"72258",,terminal_output +7418,14514378,"TERMINAL",0,0,"83369",,terminal_output +7419,14515338,"TERMINAL",0,0,"955821",,terminal_output +7420,14516381,"TERMINAL",0,0,"116692",,terminal_output +7421,14517432,"TERMINAL",0,0,"2779:003",,terminal_output +7422,14518484,"TERMINAL",0,0,"38814",,terminal_output +7423,14519527,"TERMINAL",0,0,"49925",,terminal_output +7424,14520627,"TERMINAL",0,0,"58:008:0036",,terminal_output +7425,14521614,"TERMINAL",0,0,"61147",,terminal_output +7426,14522675,"TERMINAL",0,0,"72258",,terminal_output +7427,14523698,"TERMINAL",0,0,"83369",,terminal_output +7428,14524822,"TERMINAL",0,0,"944730",,terminal_output +7429,14525784,"TERMINAL",0,0,"205581",,terminal_output +7430,14526879,"TERMINAL",0,0,"16692",,terminal_output +7431,14527900,"TERMINAL",0,0,"277103",,terminal_output +7432,14528919,"TERMINAL",0,0,"38814",,terminal_output +7433,14529946,"TERMINAL",0,0,"49925",,terminal_output +7434,14531069,"TERMINAL",0,0,"5101036",,terminal_output +7435,14532093,"TERMINAL",0,0,"61147",,terminal_output +7436,14533118,"TERMINAL",0,0,"72258",,terminal_output +7437,14534144,"TERMINAL",0,0,"83369",,terminal_output +7438,14535192,"TERMINAL",0,0,"944740",,terminal_output +7439,14536309,"TERMINAL",0,0,"305581",,terminal_output +7440,14537315,"TERMINAL",0,0,"16692",,terminal_output +7441,14538303,"TERMINAL",0,0,"277203",,terminal_output +7442,14539334,"TERMINAL",0,0,"39925",,terminal_output +7443,14540380,"TERMINAL",0,0,"5202036",,terminal_output +7444,14541414,"TERMINAL",0,0,"61147",,terminal_output +7445,14542537,"TERMINAL",0,0,"72258",,terminal_output +7446,14543562,"TERMINAL",0,0,"83369",,terminal_output +7447,14544552,"TERMINAL",0,0,"944750",,terminal_output +7448,14545611,"TERMINAL",0,0,"405581",,terminal_output +7449,14546653,"TERMINAL",0,0,"16692",,terminal_output +7450,14547760,"TERMINAL",0,0,"277303",,terminal_output +7451,14548752,"TERMINAL",0,0,"38814",,terminal_output +7452,14549791,"TERMINAL",0,0,"49925",,terminal_output +7453,14550842,"TERMINAL",0,0,"5303036",,terminal_output +7454,14551880,"TERMINAL",0,0,"61147",,terminal_output +7455,14552933,"TERMINAL",0,0,"72258",,terminal_output +7456,14554010,"TERMINAL",0,0,"83369",,terminal_output +7457,14555019,"TERMINAL",0,0,"94475:00",,terminal_output +7458,14556062,"TERMINAL",0,0,"505581",,terminal_output +7459,14557183,"TERMINAL",0,0,"16692",,terminal_output +7460,14558143,"TERMINAL",0,0,"277403",,terminal_output +7461,14559188,"TERMINAL",0,0,"38814",,terminal_output +7462,14560228,"TERMINAL",0,0,"49925",,terminal_output +7463,14561279,"TERMINAL",0,0,"5404036",,terminal_output +7464,14562336,"TERMINAL",0,0,"62258",,terminal_output +7465,14563363,"TERMINAL",0,0,"83369",,terminal_output +7466,14564406,"TERMINAL",0,0,"944710",,terminal_output +7467,14565465,"TERMINAL",0,0,"3:005581",,terminal_output +7468,14566600,"TERMINAL",0,0,"16692",,terminal_output +7469,14567550,"TERMINAL",0,0,"277503",,terminal_output +7470,14568587,"TERMINAL",0,0,"38814",,terminal_output +7471,14569109,"train_dynamics.py",2983,0,"",python,selection_mouse +7472,14569281,"train_dynamics.py",2982,7,"outputs",python,selection_mouse +7473,14569625,"TERMINAL",0,0,"49925",,terminal_output +7474,14569807,"train_dynamics.py",2999,0,"",python,selection_mouse +7475,14569972,"train_dynamics.py",2991,12,"video_tokens",python,selection_mouse +7476,14570396,"train_dynamics.py",2985,0,"",python,selection_mouse +7477,14570545,"train_dynamics.py",2982,7,"outputs",python,selection_mouse +7478,14570692,"TERMINAL",0,0,"5505036",,terminal_output +7479,14571706,"TERMINAL",0,0,"61147",,terminal_output +7480,14572631,"train_dynamics.py",2977,0,"",python,selection_mouse +7481,14572757,"TERMINAL",0,0,"72258",,terminal_output +7482,14572851,"train_dynamics.py",2972,7,"targets",python,selection_mouse +7483,14573870,"TERMINAL",0,0,"83369",,terminal_output +7484,14574837,"TERMINAL",0,0,"944720",,terminal_output +7485,14575918,"TERMINAL",0,0,"105581",,terminal_output +7486,14576946,"TERMINAL",0,0,"16692",,terminal_output +7487,14577967,"TERMINAL",0,0,"27750:003",,terminal_output +7488,14579009,"TERMINAL",0,0,"38814",,terminal_output +7489,14580120,"TERMINAL",0,0,"49925",,terminal_output +7490,14581150,"TERMINAL",0,0,"59:009:0036",,terminal_output +7491,14582155,"TERMINAL",0,0,"61147",,terminal_output +7492,14583241,"TERMINAL",0,0,"72258",,terminal_output +7493,14584332,"TERMINAL",0,0,"83369",,terminal_output +7494,14585291,"TERMINAL",0,0,"944730",,terminal_output +7495,14586376,"TERMINAL",0,0,"206692",,terminal_output +7496,14587377,"TERMINAL",0,0,"277103",,terminal_output +7497,14588424,"TERMINAL",0,0,"38814",,terminal_output +7498,14589483,"TERMINAL",0,0,"49925",,terminal_output +7499,14590563,"TERMINAL",0,0,"5101036",,terminal_output +7500,14591598,"TERMINAL",0,0,"61147",,terminal_output +7501,14592714,"TERMINAL",0,0,"72258",,terminal_output +7502,14593739,"TERMINAL",0,0,"83369",,terminal_output +7503,14594685,"TERMINAL",0,0,"944740",,terminal_output +7504,14595724,"TERMINAL",0,0,"305581",,terminal_output +7505,14596763,"TERMINAL",0,0,"16692",,terminal_output +7506,14597835,"TERMINAL",0,0,"277203",,terminal_output +7507,14598962,"TERMINAL",0,0,"38814",,terminal_output +7508,14599985,"TERMINAL",0,0,"49925",,terminal_output +7509,14600943,"TERMINAL",0,0,"5202036",,terminal_output +7510,14601984,"TERMINAL",0,0,"61147",,terminal_output +7511,14603033,"TERMINAL",0,0,"72258",,terminal_output +7512,14604091,"TERMINAL",0,0,"83369",,terminal_output +7513,14605134,"TERMINAL",0,0,"944750",,terminal_output +7514,14606187,"TERMINAL",0,0,"405581",,terminal_output +7515,14607257,"TERMINAL",0,0,"16692",,terminal_output +7516,14608267,"TERMINAL",0,0,"277303",,terminal_output +7517,14609361,"TERMINAL",0,0,"39925",,terminal_output +7518,14610356,"TERMINAL",0,0,"5303036",,terminal_output +7519,14611391,"TERMINAL",0,0,"61147",,terminal_output +7520,14612435,"TERMINAL",0,0,"72258",,terminal_output +7521,14613730,"TERMINAL",0,0,"83369",,terminal_output +7522,14614631,"TERMINAL",0,0,"94476:00",,terminal_output +7523,14615667,"TERMINAL",0,0,"505581",,terminal_output +7524,14616784,"TERMINAL",0,0,"16692",,terminal_output +7525,14617804,"TERMINAL",0,0,"277403",,terminal_output +7526,14618827,"TERMINAL",0,0,"38814",,terminal_output +7527,14620055,"TERMINAL",0,0,"49925",,terminal_output +7528,14621063,"TERMINAL",0,0,"5404036",,terminal_output +7529,14622205,"TERMINAL",0,0,"61147",,terminal_output +7530,14623162,"TERMINAL",0,0,"72258",,terminal_output +7531,14624254,"TERMINAL",0,0,"83369",,terminal_output +7532,14625254,"TERMINAL",0,0,"944710",,terminal_output +7533,14626301,"TERMINAL",0,0,"4:005581",,terminal_output +7534,14627429,"TERMINAL",0,0,"177503",,terminal_output +7535,14628387,"TERMINAL",0,0,"38814",,terminal_output +7536,14629429,"TERMINAL",0,0,"49925",,terminal_output +7537,14630470,"TERMINAL",0,0,"5505036",,terminal_output +7538,14631512,"TERMINAL",0,0,"61147",,terminal_output +7539,14632555,"TERMINAL",0,0,"72258",,terminal_output +7540,14633676,"TERMINAL",0,0,"83369",,terminal_output +7541,14634638,"TERMINAL",0,0,"944720",,terminal_output +7542,14635722,"TERMINAL",0,0,"105581",,terminal_output +7543,14636723,"TERMINAL",0,0,"16692",,terminal_output +7544,14637767,"TERMINAL",0,0,"2771:003",,terminal_output +7545,14638904,"TERMINAL",0,0,"38814",,terminal_output +7546,14639921,"TERMINAL",0,0,"49925",,terminal_output +7547,14640945,"TERMINAL",0,0,"530:0030:0036",,terminal_output +7548,14641968,"TERMINAL",0,0,"61147",,terminal_output +7549,14642983,"TERMINAL",0,0,"72258",,terminal_output +7550,14644119,"TERMINAL",0,0,"83369",,terminal_output +7551,14645097,"TERMINAL",0,0,"944730",,terminal_output +7552,14646168,"TERMINAL",0,0,"205581",,terminal_output +7553,14647199,"TERMINAL",0,0,"16692",,terminal_output +7554,14648318,"TERMINAL",0,0,"277103",,terminal_output +7555,14649353,"TERMINAL",0,0,"38814",,terminal_output +7556,14650336,"TERMINAL",0,0,"4101036",,terminal_output +7557,14651404,"TERMINAL",0,0,"61147",,terminal_output +7558,14652425,"TERMINAL",0,0,"72258",,terminal_output +7559,14653476,"TERMINAL",0,0,"83369",,terminal_output +7560,14654523,"TERMINAL",0,0,"944740",,terminal_output +7561,14655590,"TERMINAL",0,0,"305581",,terminal_output +7562,14656617,"TERMINAL",0,0,"16692",,terminal_output +7563,14657738,"TERMINAL",0,0,"277203",,terminal_output +7564,14658762,"TERMINAL",0,0,"38814",,terminal_output +7565,14659789,"TERMINAL",0,0,"49925",,terminal_output +7566,14660814,"TERMINAL",0,0,"5202036",,terminal_output +7567,14661937,"TERMINAL",0,0,"61147",,terminal_output +7568,14662895,"TERMINAL",0,0,"72258",,terminal_output +7569,14663987,"TERMINAL",0,0,"83369",,terminal_output +7570,14665009,"TERMINAL",0,0,"944750",,terminal_output +7571,14666034,"TERMINAL",0,0,"405581",,terminal_output +7572,14667043,"TERMINAL",0,0,"16692",,terminal_output +7573,14668184,"TERMINAL",0,0,"277303",,terminal_output +7574,14669136,"TERMINAL",0,0,"38814",,terminal_output +7575,14670206,"TERMINAL",0,0,"49925",,terminal_output +7576,14671229,"TERMINAL",0,0,"5303036",,terminal_output +7577,14672276,"TERMINAL",0,0,"61147",,terminal_output +7578,14673405,"TERMINAL",0,0,"73369",,terminal_output +7579,14674382,"TERMINAL",0,0,"94477:00",,terminal_output +7580,14675407,"TERMINAL",0,0,"505581",,terminal_output +7581,14676484,"TERMINAL",0,0,"16692",,terminal_output +7582,14677500,"TERMINAL",0,0,"277403",,terminal_output +7583,14678574,"TERMINAL",0,0,"38814",,terminal_output +7584,14679655,"TERMINAL",0,0,"49925",,terminal_output +7585,14680676,"TERMINAL",0,0,"5404036",,terminal_output +7586,14681695,"TERMINAL",0,0,"61147",,terminal_output +7587,14682840,"TERMINAL",0,0,"72258",,terminal_output +7588,14683829,"TERMINAL",0,0,"83369",,terminal_output +7589,14684816,"TERMINAL",0,0,"944710",,terminal_output +7590,14685904,"TERMINAL",0,0,"5:005581",,terminal_output +7591,14686905,"TERMINAL",0,0,"16692",,terminal_output +7592,14687989,"TERMINAL",0,0,"277503",,terminal_output +7593,14688996,"TERMINAL",0,0,"38814",,terminal_output +7594,14690043,"TERMINAL",0,0,"49925",,terminal_output +7595,14691100,"TERMINAL",0,0,"5505036",,terminal_output +7596,14692141,"TERMINAL",0,0,"61147",,terminal_output +7597,14693203,"TERMINAL",0,0,"72258",,terminal_output +7598,14694265,"TERMINAL",0,0,"83369",,terminal_output +7599,14695320,"TERMINAL",0,0,"944720",,terminal_output +7600,14696344,"TERMINAL",0,0,"106692",,terminal_output +7601,14697387,"TERMINAL",0,0,"2772:003",,terminal_output +7602,14698498,"TERMINAL",0,0,"38814",,terminal_output +7603,14699457,"TERMINAL",0,0,"49925",,terminal_output +7604,14700504,"TERMINAL",0,0,"51:001:0036",,terminal_output +7605,14701544,"TERMINAL",0,0,"61147",,terminal_output +7606,14702584,"TERMINAL",0,0,"72258",,terminal_output +7607,14703626,"TERMINAL",0,0,"83369",,terminal_output +7608,14704742,"TERMINAL",0,0,"944730",,terminal_output +7609,14705769,"TERMINAL",0,0,"205581",,terminal_output +7610,14706754,"TERMINAL",0,0,"16692",,terminal_output +7611,14707817,"TERMINAL",0,0,"277103",,terminal_output +7612,14708842,"TERMINAL",0,0,"38814",,terminal_output +7613,14709883,"TERMINAL",0,0,"49925",,terminal_output +7614,14710990,"TERMINAL",0,0,"5101036",,terminal_output +7615,14712137,"TERMINAL",0,0,"61147",,terminal_output +7616,14713246,"TERMINAL",0,0,"72258",,terminal_output +7617,14714234,"TERMINAL",0,0,"83369",,terminal_output +7618,14715274,"TERMINAL",0,0,"944740",,terminal_output +7619,14716422,"TERMINAL",0,0,"306692",,terminal_output +7620,14717446,"TERMINAL",0,0,"277203",,terminal_output +7621,14718467,"TERMINAL",0,0,"38814",,terminal_output +7622,14719450,"TERMINAL",0,0,"49925",,terminal_output +7623,14720509,"TERMINAL",0,0,"5202036",,terminal_output +7624,14721547,"TERMINAL",0,0,"61147",,terminal_output +7625,14722581,"TERMINAL",0,0,"72258",,terminal_output +7626,14723642,"TERMINAL",0,0,"83369",,terminal_output +7627,14724715,"TERMINAL",0,0,"944750",,terminal_output +7628,14725743,"TERMINAL",0,0,"405581",,terminal_output +7629,14726858,"TERMINAL",0,0,"16692",,terminal_output +7630,14727837,"TERMINAL",0,0,"277303",,terminal_output +7631,14728909,"TERMINAL",0,0,"38814",,terminal_output +7632,14729937,"TERMINAL",0,0,"49925",,terminal_output +7633,14731087,"TERMINAL",0,0,"5303036",,terminal_output +7634,14732003,"TERMINAL",0,0,"61147",,terminal_output +7635,14733064,"TERMINAL",0,0,"72258",,terminal_output +7636,14734144,"TERMINAL",0,0,"83369",,terminal_output +7637,14735166,"TERMINAL",0,0,"94478:00",,terminal_output +7638,14736316,"TERMINAL",0,0,"505581",,terminal_output +7639,14737311,"TERMINAL",0,0,"16692",,terminal_output +7640,14738337,"TERMINAL",0,0,"277403",,terminal_output +7641,14739392,"TERMINAL",0,0,"38814",,terminal_output +7642,14740473,"TERMINAL",0,0,"4404036",,terminal_output +7643,14741492,"TERMINAL",0,0,"61147",,terminal_output +7644,14742540,"TERMINAL",0,0,"72258",,terminal_output +7645,14743587,"TERMINAL",0,0,"83369",,terminal_output +7646,14744682,"TERMINAL",0,0,"944710",,terminal_output +7647,14745717,"TERMINAL",0,0,"6:005581",,terminal_output +7648,14746731,"TERMINAL",0,0,"16692",,terminal_output +7649,14747857,"TERMINAL",0,0,"277503",,terminal_output +7650,14748880,"TERMINAL",0,0,"38814",,terminal_output +7651,14749905,"TERMINAL",0,0,"49925",,terminal_output +7652,14750923,"TERMINAL",0,0,"5505036",,terminal_output +7653,14751937,"TERMINAL",0,0,"61147",,terminal_output +7654,14752964,"TERMINAL",0,0,"72258",,terminal_output +7655,14754018,"TERMINAL",0,0,"83369",,terminal_output +7656,14755139,"TERMINAL",0,0,"944720",,terminal_output +7657,14756094,"TERMINAL",0,0,"105581",,terminal_output +7658,14757138,"TERMINAL",0,0,"16692",,terminal_output +7659,14758194,"TERMINAL",0,0,"2773:003",,terminal_output +7660,14759373,"TERMINAL",0,0,"38814",,terminal_output +7661,14760290,"TERMINAL",0,0,"49925",,terminal_output +7662,14761373,"TERMINAL",0,0,"52:002:0036",,terminal_output +7663,14762363,"TERMINAL",0,0,"72258",,terminal_output +7664,14763432,"TERMINAL",0,0,"83369",,terminal_output +7665,14764446,"TERMINAL",0,0,"944730",,terminal_output +7666,14765487,"TERMINAL",0,0,"205581",,terminal_output +7667,14766798,"TERMINAL",0,0,"16692",,terminal_output +7668,14767821,"TERMINAL",0,0,"277103",,terminal_output +7669,14768846,"TERMINAL",0,0,"38814",,terminal_output +7670,14769868,"TERMINAL",0,0,"49925",,terminal_output +7671,14770948,"TERMINAL",0,0,"5101036",,terminal_output +7672,14772019,"TERMINAL",0,0,"61147",,terminal_output +7673,14772982,"TERMINAL",0,0,"72258",,terminal_output +7674,14774069,"TERMINAL",0,0,"83369",,terminal_output +7675,14775091,"TERMINAL",0,0,"944740",,terminal_output +7676,14776108,"TERMINAL",0,0,"305581",,terminal_output +7677,14777243,"TERMINAL",0,0,"16692",,terminal_output +7678,14778195,"TERMINAL",0,0,"277203",,terminal_output +7679,14779229,"TERMINAL",0,0,"38814",,terminal_output +7680,14780273,"TERMINAL",0,0,"49925",,terminal_output +7681,14781338,"TERMINAL",0,0,"5212147",,terminal_output +7682,14782463,"TERMINAL",0,0,"72258",,terminal_output +7683,14783436,"TERMINAL",0,0,"83369",,terminal_output +7684,14784444,"TERMINAL",0,0,"944750",,terminal_output +7685,14785538,"TERMINAL",0,0,"405581",,terminal_output +7686,14786530,"TERMINAL",0,0,"16692",,terminal_output +7687,14787574,"TERMINAL",0,0,"277303",,terminal_output +7688,14788624,"TERMINAL",0,0,"38814",,terminal_output +7689,14789734,"TERMINAL",0,0,"49925",,terminal_output +7690,14790757,"TERMINAL",0,0,"5303036",,terminal_output +7691,14791885,"TERMINAL",0,0,"61147",,terminal_output +7692,14792821,"TERMINAL",0,0,"72258",,terminal_output +7693,14793865,"TERMINAL",0,0,"83369",,terminal_output +7694,14794906,"TERMINAL",0,0,"94479:00",,terminal_output +7695,14795950,"TERMINAL",0,0,"505581",,terminal_output +7696,14797006,"TERMINAL",0,0,"16692",,terminal_output +7697,14798132,"TERMINAL",0,0,"277403",,terminal_output +7698,14799155,"TERMINAL",0,0,"38814",,terminal_output +7699,14800141,"TERMINAL",0,0,"49925",,terminal_output +7700,14801200,"TERMINAL",0,0,"5404036",,terminal_output +7701,14802331,"TERMINAL",0,0,"61147",,terminal_output +7702,14803353,"TERMINAL",0,0,"72258",,terminal_output +7703,14804369,"TERMINAL",0,0,"844710",,terminal_output +7704,14805408,"TERMINAL",0,0,"7:005581",,terminal_output +7705,14806527,"TERMINAL",0,0,"16692",,terminal_output +7706,14807481,"TERMINAL",0,0,"277503",,terminal_output +7707,14808527,"TERMINAL",0,0,"38814",,terminal_output +7708,14809599,"TERMINAL",0,0,"49925",,terminal_output +7709,14810615,"TERMINAL",0,0,"5505036",,terminal_output +7710,14811662,"TERMINAL",0,0,"61147",,terminal_output +7711,14812774,"TERMINAL",0,0,"72258",,terminal_output +7712,14813754,"TERMINAL",0,0,"83369",,terminal_output +7713,14814822,"TERMINAL",0,0,"944720",,terminal_output +7714,14815841,"TERMINAL",0,0,"105581",,terminal_output +7715,14817022,"TERMINAL",0,0,"16692",,terminal_output +7716,14817933,"TERMINAL",0,0,"2774:003",,terminal_output +7717,14819021,"TERMINAL",0,0,"38814",,terminal_output +7718,14820034,"TERMINAL",0,0,"49925",,terminal_output +7719,14821171,"TERMINAL",0,0,"53:003:0036",,terminal_output +7720,14822134,"TERMINAL",0,0,"61147",,terminal_output +7721,14823221,"TERMINAL",0,0,"72258",,terminal_output +7722,14824368,"TERMINAL",0,0,"83369",,terminal_output +7723,14825321,"TERMINAL",0,0,"944730",,terminal_output +7724,14826394,"TERMINAL",0,0,"206692",,terminal_output +7725,14827447,"TERMINAL",0,0,"277103",,terminal_output +7726,14828455,"TERMINAL",0,0,"38814",,terminal_output +7727,14829470,"TERMINAL",0,0,"49925",,terminal_output +7728,14830521,"TERMINAL",0,0,"5101036",,terminal_output +7729,14831565,"TERMINAL",0,0,"61147",,terminal_output +7730,14832603,"TERMINAL",0,0,"72258",,terminal_output +7731,14833652,"TERMINAL",0,0,"83369",,terminal_output +7732,14834790,"TERMINAL",0,0,"944740",,terminal_output +7733,14835739,"TERMINAL",0,0,"305581",,terminal_output +7734,14836841,"TERMINAL",0,0,"16692",,terminal_output +7735,14837827,"TERMINAL",0,0,"277203",,terminal_output +7736,14838885,"TERMINAL",0,0,"38814",,terminal_output +7737,14839916,"TERMINAL",0,0,"49925",,terminal_output +7738,14840986,"TERMINAL",0,0,"5202036",,terminal_output +7739,14842061,"TERMINAL",0,0,"61147",,terminal_output +7740,14843085,"TERMINAL",0,0,"72258",,terminal_output +7741,14844082,"TERMINAL",0,0,"83369",,terminal_output +7742,14845125,"TERMINAL",0,0,"944750",,terminal_output +7743,14846260,"TERMINAL",0,0,"405581",,terminal_output +7744,14847217,"TERMINAL",0,0,"16692",,terminal_output +7745,14848310,"TERMINAL",0,0,"277303",,terminal_output +7746,14849365,"TERMINAL",0,0,"39925",,terminal_output +7747,14850458,"TERMINAL",0,0,"5303036",,terminal_output +7748,14851409,"TERMINAL",0,0,"61147",,terminal_output +7749,14852476,"TERMINAL",0,0,"72258",,terminal_output +7750,14853497,"TERMINAL",0,0,"83369",,terminal_output +7751,14854568,"TERMINAL",0,0,"944740:00",,terminal_output +7752,14855610,"TERMINAL",0,0,"505581",,terminal_output +7753,14856660,"TERMINAL",0,0,"16692",,terminal_output +7754,14857707,"TERMINAL",0,0,"277403",,terminal_output +7755,14858859,"TERMINAL",0,0,"38814",,terminal_output +7756,14859796,"TERMINAL",0,0,"49925",,terminal_output +7757,14861006,"TERMINAL",0,0,"5404036",,terminal_output +7758,14862013,"TERMINAL",0,0,"61147",,terminal_output +7759,14863055,"TERMINAL",0,0,"72258",,terminal_output +7760,14864179,"TERMINAL",0,0,"83369",,terminal_output +7761,14865159,"TERMINAL",0,0,"944710",,terminal_output +7762,14866226,"TERMINAL",0,0,"8:005581",,terminal_output +7763,14867354,"TERMINAL",0,0,"16692",,terminal_output +7764,14868378,"TERMINAL",0,0,"277503",,terminal_output +7765,14869380,"TERMINAL",0,0,"39925",,terminal_output +7766,14870423,"TERMINAL",0,0,"5505036",,terminal_output +7767,14871550,"TERMINAL",0,0,"61147",,terminal_output +7768,14872577,"TERMINAL",0,0,"72258",,terminal_output +7769,14873537,"TERMINAL",0,0,"83369",,terminal_output +7770,14874581,"TERMINAL",0,0,"944720",,terminal_output +7771,14875622,"TERMINAL",0,0,"105581",,terminal_output +7772,14876658,"TERMINAL",0,0,"16692",,terminal_output +7773,14877697,"TERMINAL",0,0,"2775:003",,terminal_output +7774,14878736,"TERMINAL",0,0,"38814",,terminal_output +7775,14879776,"TERMINAL",0,0,"49925",,terminal_output +7776,14880829,"TERMINAL",0,0,"54:004:0036",,terminal_output +7777,14881894,"TERMINAL",0,0,"61147",,terminal_output +7778,14882911,"TERMINAL",0,0,"72258",,terminal_output +7779,14883949,"TERMINAL",0,0,"83369",,terminal_output +7780,14885068,"TERMINAL",0,0,"944730",,terminal_output +7781,14886094,"TERMINAL",0,0,"205581",,terminal_output +7782,14887119,"TERMINAL",0,0,"16692",,terminal_output +7783,14888141,"TERMINAL",0,0,"277103",,terminal_output +7784,14889268,"TERMINAL",0,0,"38814",,terminal_output +7785,14890292,"TERMINAL",0,0,"49925",,terminal_output +7786,14891314,"TERMINAL",0,0,"5101036",,terminal_output +7787,14892339,"TERMINAL",0,0,"61147",,terminal_output +7788,14893371,"TERMINAL",0,0,"73369",,terminal_output +7789,14894377,"TERMINAL",0,0,"944740",,terminal_output +7790,14895513,"TERMINAL",0,0,"305581",,terminal_output +7791,14896541,"TERMINAL",0,0,"16692",,terminal_output +7792,14897517,"TERMINAL",0,0,"277203",,terminal_output +7793,14898553,"TERMINAL",0,0,"38814",,terminal_output +7794,14899598,"TERMINAL",0,0,"49925",,terminal_output +7795,14900650,"TERMINAL",0,0,"5202036",,terminal_output +7796,14901760,"TERMINAL",0,0,"61147",,terminal_output +7797,14902738,"TERMINAL",0,0,"72258",,terminal_output +7798,14903777,"TERMINAL",0,0,"83369",,terminal_output +7799,14904833,"TERMINAL",0,0,"944750",,terminal_output +7800,14905959,"TERMINAL",0,0,"405581",,terminal_output +7801,14906960,"TERMINAL",0,0,"16692",,terminal_output +7802,14907961,"TERMINAL",0,0,"277303",,terminal_output +7803,14909031,"TERMINAL",0,0,"38814",,terminal_output +7804,14910055,"TERMINAL",0,0,"49925",,terminal_output +7805,14911180,"TERMINAL",0,0,"5303036",,terminal_output +7806,14912207,"TERMINAL",0,0,"61147",,terminal_output +7807,14913230,"TERMINAL",0,0,"72258",,terminal_output +7808,14914216,"TERMINAL",0,0,"83369",,terminal_output +7809,14915254,"TERMINAL",0,0,"94471:00",,terminal_output +7810,14916404,"TERMINAL",0,0,"505581",,terminal_output +7811,14917338,"TERMINAL",0,0,"177403",,terminal_output +7812,14918455,"TERMINAL",0,0,"38814",,terminal_output +7813,14919424,"TERMINAL",0,0,"49925",,terminal_output +7814,14920499,"TERMINAL",0,0,"5404036",,terminal_output +7815,14921503,"TERMINAL",0,0,"61147",,terminal_output +7816,14922557,"TERMINAL",0,0,"72258",,terminal_output +7817,14923604,"TERMINAL",0,0,"83369",,terminal_output +7818,14924698,"TERMINAL",0,0,"944710",,terminal_output +7819,14925726,"TERMINAL",0,0,"9:005581",,terminal_output +7820,14926746,"TERMINAL",0,0,"16692",,terminal_output +7821,14927781,"TERMINAL",0,0,"277503",,terminal_output +7822,14928897,"TERMINAL",0,0,"38814",,terminal_output +7823,14929924,"TERMINAL",0,0,"49925",,terminal_output +7824,14930918,"TERMINAL",0,0,"5505036",,terminal_output +7825,14931969,"TERMINAL",0,0,"61147",,terminal_output +7826,14933007,"TERMINAL",0,0,"72258",,terminal_output +7827,14934059,"TERMINAL",0,0,"83369",,terminal_output +7828,14935144,"TERMINAL",0,0,"944720",,terminal_output +7829,14936151,"TERMINAL",0,0,"105581",,terminal_output +7830,14937294,"TERMINAL",0,0,"16692",,terminal_output +7831,14938319,"TERMINAL",0,0,"2776:003",,terminal_output +7832,14939368,"TERMINAL",0,0,"38814",,terminal_output +7833,14940323,"TERMINAL",0,0,"45:005:0036",,terminal_output +7834,14941390,"TERMINAL",0,0,"61147",,terminal_output +7835,14942416,"TERMINAL",0,0,"72258",,terminal_output +7836,14943541,"TERMINAL",0,0,"83369",,terminal_output +7837,14944515,"TERMINAL",0,0,"944730",,terminal_output +7838,14945594,"TERMINAL",0,0,"205581",,terminal_output +7839,14946617,"TERMINAL",0,0,"16692",,terminal_output +7840,14947740,"TERMINAL",0,0,"277103",,terminal_output +7841,14948716,"TERMINAL",0,0,"38814",,terminal_output +7842,14949788,"TERMINAL",0,0,"49925",,terminal_output +7843,14950915,"TERMINAL",0,0,"5101036",,terminal_output +7844,14951936,"TERMINAL",0,0,"61147",,terminal_output +7845,14952962,"TERMINAL",0,0,"72258",,terminal_output +7846,14953990,"TERMINAL",0,0,"83369",,terminal_output +7847,14955045,"TERMINAL",0,0,"944740",,terminal_output +7848,14956084,"TERMINAL",0,0,"305581",,terminal_output +7849,14957168,"TERMINAL",0,0,"16692",,terminal_output +7850,14958288,"TERMINAL",0,0,"277203",,terminal_output +7851,14959307,"TERMINAL",0,0,"38814",,terminal_output +7852,14960273,"TERMINAL",0,0,"49925",,terminal_output +7853,14961368,"TERMINAL",0,0,"5212147",,terminal_output +7854,14962367,"TERMINAL",0,0,"72258",,terminal_output +7855,14963417,"TERMINAL",0,0,"83369",,terminal_output +7856,14964461,"TERMINAL",0,0,"944750",,terminal_output +7857,14965560,"TERMINAL",0,0,"405581",,terminal_output +7858,14966585,"TERMINAL",0,0,"16692",,terminal_output +7859,14967622,"TERMINAL",0,0,"277303",,terminal_output +7860,14968679,"TERMINAL",0,0,"38814",,terminal_output +7861,14969754,"TERMINAL",0,0,"49925",,terminal_output +7862,14970781,"TERMINAL",0,0,"5303036",,terminal_output +7863,14971906,"TERMINAL",0,0,"61147",,terminal_output +7864,14972931,"TERMINAL",0,0,"72258",,terminal_output +7865,14973953,"TERMINAL",0,0,"83369",,terminal_output +7866,14974964,"TERMINAL",0,0,"94472:00",,terminal_output +7867,14976104,"TERMINAL",0,0,"505581",,terminal_output +7868,14977055,"TERMINAL",0,0,"16692",,terminal_output +7869,14978103,"TERMINAL",0,0,"277403",,terminal_output +7870,14979154,"TERMINAL",0,0,"38814",,terminal_output +7871,14980240,"TERMINAL",0,0,"49925",,terminal_output +7872,14981390,"TERMINAL",0,0,"5414147",,terminal_output +7873,14982440,"TERMINAL",0,0,"72258",,terminal_output +7874,14983490,"TERMINAL",0,0,"83369",,terminal_output +7875,14984550,"TERMINAL",0,0,"944710",,terminal_output +7876,14985602,"TERMINAL",0,0,"10:005581",,terminal_output +7877,14986633,"TERMINAL",0,0,"16692",,terminal_output +7878,14987682,"TERMINAL",0,0,"277503",,terminal_output +7879,14988727,"TERMINAL",0,0,"38814",,terminal_output +7880,14989827,"TERMINAL",0,0,"49925",,terminal_output +7881,14990823,"TERMINAL",0,0,"5505036",,terminal_output +7882,14991977,"TERMINAL",0,0,"61147",,terminal_output +7883,14992939,"TERMINAL",0,0,"72258",,terminal_output +7884,14994024,"TERMINAL",0,0,"83369",,terminal_output +7885,14995047,"TERMINAL",0,0,"944720",,terminal_output +7886,14996072,"TERMINAL",0,0,"105581",,terminal_output +7887,14997111,"TERMINAL",0,0,"16692",,terminal_output +7888,14998223,"TERMINAL",0,0,"2777:003",,terminal_output +7889,14999251,"TERMINAL",0,0,"38814",,terminal_output +7890,15000263,"TERMINAL",0,0,"49925",,terminal_output +7891,15001397,"TERMINAL",0,0,"56:016:0147",,terminal_output +7892,15002423,"TERMINAL",0,0,"72258",,terminal_output +7893,15003413,"TERMINAL",0,0,"83369",,terminal_output +7894,15004466,"TERMINAL",0,0,"944730",,terminal_output +7895,15005525,"TERMINAL",0,0,"205581",,terminal_output +7896,15006620,"TERMINAL",0,0,"16692",,terminal_output +7897,15007608,"TERMINAL",0,0,"277103",,terminal_output +7898,15008653,"TERMINAL",0,0,"38814",,terminal_output +7899,15009804,"TERMINAL",0,0,"49925",,terminal_output +7900,15010749,"TERMINAL",0,0,"5101036",,terminal_output +7901,15011842,"TERMINAL",0,0,"61147",,terminal_output +7902,15012865,"TERMINAL",0,0,"72258",,terminal_output +7903,15013890,"TERMINAL",0,0,"83369",,terminal_output +7904,15015018,"TERMINAL",0,0,"944740",,terminal_output +7905,15015991,"TERMINAL",0,0,"305581",,terminal_output +7906,15017065,"TERMINAL",0,0,"16692",,terminal_output +7907,15018090,"TERMINAL",0,0,"277203",,terminal_output +7908,15019214,"TERMINAL",0,0,"38814",,terminal_output +7909,15020237,"TERMINAL",0,0,"49925",,terminal_output +7910,15021263,"TERMINAL",0,0,"5202036",,terminal_output +7911,15022286,"TERMINAL",0,0,"61147",,terminal_output +7912,15023326,"TERMINAL",0,0,"73369",,terminal_output +7913,15024373,"TERMINAL",0,0,"944750",,terminal_output +7914,15025458,"TERMINAL",0,0,"405581",,terminal_output +7915,15026463,"TERMINAL",0,0,"16692",,terminal_output +7916,15027620,"TERMINAL",0,0,"277303",,terminal_output +7917,15028626,"TERMINAL",0,0,"38814[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +7918,15406792,"TERMINAL",0,0,"mv ../jafar_jobs_2/que^C",,terminal_command +7919,15406814,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;406cfb31-2341-454a-afa8-cae7781806b2]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +7920,15408471,"TERMINAL",0,0,"queue",,terminal_command +7921,15408544,"TERMINAL",0,0,"]633;E;2025-07-24 18:17:03 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Jul 24 18:17:03 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3371237 accelerat train_dy tum_cte0 R 17:42:48\t 8 hkn[0618,0625-0626,0628-0631,0634]3371238 accelerat train_dy tum_cte0 R 17:42:48\t 2 hkn[0706,0710]3372631 accelerat train_dy tum_cte0 R 4:03:51\t 2 hkn[0515,0622]3372629 accelerat train_dy tum_cte0 R 4:49:14\t 8 hkn[0410,0429,0520,0607,0610,0810,0814,0817]",,terminal_output +7922,15409672,"TERMINAL",0,0,"49925",,terminal_output +7923,15410402,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f5cc1012-d9cc-4040-b516-e1a241d907881753603147797-2025_07_27-09.59.46.87/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f5cc1012-d9cc-4040-b516-e1a241d907881753603147797-2025_07_27-09.59.46.87/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..cf27a17b9e405de3ab6b721bfab9493da0d21f5b --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f5cc1012-d9cc-4040-b516-e1a241d907881753603147797-2025_07_27-09.59.46.87/source.csv @@ -0,0 +1,6428 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,891,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"9:59:46 AM [info] Activating crowd-code\n9:59:46 AM [info] Recording started\n9:59:46 AM [info] Initializing git provider using file system watchers...\n9:59:46 AM [info] Git repository found\n9:59:46 AM [info] Git provider initialized successfully\n",Log,tab +3,1313,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"9:59:47 AM [info] Initial git state: [object Object]\n",Log,content +4,3907,"TERMINAL",0,0,"bash",,terminal_focus +5,5790,"TERMINAL",0,0,"queue",,terminal_command +6,5882,"TERMINAL",0,0,"]633;E;2025-07-27 09:59:51 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Sun Jul 27 09:59:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373407 accelerat train_dy tum_cte0 R 1-23:15:23\t 2 hkn[0501,0506]3373408 accelerat train_dy tum_cte0 R 1-23:15:23\t 8 hkn[0406,0409,0411,0413,0421,0423-0424,0525]3371238 accelerat train_dy tum_cte0 R 1-00:14:22\t 2 hkn[0714,0716]",,terminal_output +7,7012,"TERMINAL",0,0,"2443",,terminal_output +8,7987,"TERMINAL",0,0,"3554",,terminal_output +9,9026,"TERMINAL",0,0,"4665",,terminal_output +10,10050,"TERMINAL",0,0,"6887",,terminal_output +11,11152,"TERMINAL",0,0,"7998",,terminal_output +12,11231,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +13,31149,"TERMINAL",0,0,"queue",,terminal_command +14,31175,"TERMINAL",0,0,"]633;E;2025-07-27 10:00:17 queue;406cfb31-2341-454a-afa8-cae7781806b2]633;C",,terminal_output +15,31235,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Sun Jul 27 10:00:17 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373407 accelerat train_dy tum_cte0 R 1-23:15:49\t 2 hkn[0501,0506]3373408 accelerat train_dy tum_cte0 R 1-23:15:49\t 8 hkn[0406,0409,0411,0413,0421,0423-0424,0525]3371238 accelerat train_dy tum_cte0 R 1-00:14:48\t 2 hkn[0714,0716]",,terminal_output +16,32322,"TERMINAL",0,0,"850509",,terminal_output +17,33352,"TERMINAL",0,0,"91150",,terminal_output +18,34415,"TERMINAL",0,0,"20221",,terminal_output +19,35457,"TERMINAL",0,0,"1332",,terminal_output +20,36504,"TERMINAL",0,0,"2443",,terminal_output +21,37520,"TERMINAL",0,0,"3554",,terminal_output +22,38569,"TERMINAL",0,0,"4665",,terminal_output +23,39624,"TERMINAL",0,0,"5776",,terminal_output +24,40692,"TERMINAL",0,0,"6887",,terminal_output +25,41715,"TERMINAL",0,0,"7998",,terminal_output +26,42797,"TERMINAL",0,0,"86:006:009",,terminal_output +27,43821,"TERMINAL",0,0,"9115:00",,terminal_output +28,44882,"TERMINAL",0,0,"30221",,terminal_output +29,45919,"TERMINAL",0,0,"1332",,terminal_output +30,46964,"TERMINAL",0,0,"2443",,terminal_output +31,48021,"TERMINAL",0,0,"3554",,terminal_output +32,49049,"TERMINAL",0,0,"4776",,terminal_output +33,50142,"TERMINAL",0,0,"6887",,terminal_output +34,51172,"TERMINAL",0,0,"7998",,terminal_output +35,52197,"TERMINAL",0,0,"810109",,terminal_output +36,53253,"TERMINAL",0,0,"91110",,terminal_output +37,54266,"TERMINAL",0,0,"40221",,terminal_output +38,55330,"TERMINAL",0,0,"1332",,terminal_output +39,56381,"TERMINAL",0,0,"2443",,terminal_output +40,57429,"TERMINAL",0,0,"3554",,terminal_output +41,58441,"TERMINAL",0,0,"4665",,terminal_output +42,59513,"TERMINAL",0,0,"5776",,terminal_output +43,60536,"TERMINAL",0,0,"6887",,terminal_output +44,61588,"TERMINAL",0,0,"7998",,terminal_output +45,62645,"TERMINAL",0,0,"820209",,terminal_output +46,63669,"TERMINAL",0,0,"91120",,terminal_output +47,64740,"TERMINAL",0,0,"50221",,terminal_output +48,65758,"TERMINAL",0,0,"1332",,terminal_output +49,66813,"TERMINAL",0,0,"2443",,terminal_output +50,67849,"TERMINAL",0,0,"3554",,terminal_output +51,68929,"TERMINAL",0,0,"4665",,terminal_output +52,69949,"TERMINAL",0,0,"5776",,terminal_output +53,71011,"TERMINAL",0,0,"6887",,terminal_output +54,72055,"TERMINAL",0,0,"730309",,terminal_output +55,73064,"TERMINAL",0,0,"91130",,terminal_output +56,74151,"TERMINAL",0,0,"1:00221",,terminal_output +57,75185,"TERMINAL",0,0,"1332",,terminal_output +58,76238,"TERMINAL",0,0,"2443",,terminal_output +59,77250,"TERMINAL",0,0,"3554",,terminal_output +60,78297,"TERMINAL",0,0,"4665",,terminal_output +61,79335,"TERMINAL",0,0,"5776",,terminal_output +62,80378,"TERMINAL",0,0,"6887",,terminal_output +63,81419,"TERMINAL",0,0,"7998",,terminal_output +64,82484,"TERMINAL",0,0,"840409",,terminal_output +65,83505,"TERMINAL",0,0,"91140",,terminal_output +66,84547,"TERMINAL",0,0,"10221",,terminal_output +67,85590,"TERMINAL",0,0,"1332",,terminal_output +68,86637,"TERMINAL",0,0,"2443",,terminal_output +69,87689,"TERMINAL",0,0,"3554",,terminal_output +70,88829,"TERMINAL",0,0,"4665",,terminal_output +71,89856,"TERMINAL",0,0,"5776",,terminal_output +72,90980,"TERMINAL",0,0,"6887",,terminal_output +73,91901,"TERMINAL",0,0,"7998",,terminal_output +74,92906,"TERMINAL",0,0,"850509",,terminal_output +75,93953,"TERMINAL",0,0,"91150",,terminal_output +76,94984,"TERMINAL",0,0,"20221",,terminal_output +77,96057,"TERMINAL",0,0,"1443",,terminal_output +78,97160,"TERMINAL",0,0,"3554",,terminal_output +79,98158,"TERMINAL",0,0,"4665",,terminal_output +80,99214,"TERMINAL",0,0,"5776",,terminal_output +81,100223,"TERMINAL",0,0,"6887",,terminal_output +82,101272,"TERMINAL",0,0,"7998",,terminal_output +83,102314,"TERMINAL",0,0,"87:007:009",,terminal_output +84,103349,"TERMINAL",0,0,"9116:00",,terminal_output +85,104394,"TERMINAL",0,0,"30221",,terminal_output +86,105439,"TERMINAL",0,0,"1332",,terminal_output +87,106505,"TERMINAL",0,0,"2443",,terminal_output +88,107569,"TERMINAL",0,0,"3554",,terminal_output +89,108695,"TERMINAL",0,0,"4665",,terminal_output +90,109721,"TERMINAL",0,0,"5776",,terminal_output +91,110728,"TERMINAL",0,0,"6887",,terminal_output +92,111722,"TERMINAL",0,0,"7998",,terminal_output +93,112793,"TERMINAL",0,0,"810109",,terminal_output +94,113811,"TERMINAL",0,0,"91110",,terminal_output +95,114845,"TERMINAL",0,0,"40221",,terminal_output +96,115885,"TERMINAL",0,0,"1332",,terminal_output +97,116991,"TERMINAL",0,0,"2443",,terminal_output +98,117968,"TERMINAL",0,0,"3554",,terminal_output +99,119044,"TERMINAL",0,0,"4776",,terminal_output +100,120062,"TERMINAL",0,0,"6887",,terminal_output +101,121093,"TERMINAL",0,0,"7998",,terminal_output +102,122137,"TERMINAL",0,0,"820209",,terminal_output +103,123237,"TERMINAL",0,0,"91120",,terminal_output +104,124225,"TERMINAL",0,0,"50221",,terminal_output +105,125266,"TERMINAL",0,0,"1332",,terminal_output +106,126318,"TERMINAL",0,0,"2443",,terminal_output +107,127436,"TERMINAL",0,0,"3554",,terminal_output +108,128421,"TERMINAL",0,0,"4665",,terminal_output +109,129484,"TERMINAL",0,0,"5776",,terminal_output +110,130574,"TERMINAL",0,0,"6887",,terminal_output +111,131634,"TERMINAL",0,0,"7998",,terminal_output +112,132657,"TERMINAL",0,0,"830309",,terminal_output +113,133891,"TERMINAL",0,0,"91130",,terminal_output +114,135016,"TERMINAL",0,0,"2:00221",,terminal_output +115,135941,"TERMINAL",0,0,"1332",,terminal_output +116,136957,"TERMINAL",0,0,"2443",,terminal_output +117,137981,"TERMINAL",0,0,"3554",,terminal_output +118,139005,"TERMINAL",0,0,"4665",,terminal_output +119,140030,"TERMINAL",0,0,"5776",,terminal_output +120,141055,"TERMINAL",0,0,"6887",,terminal_output +121,142182,"TERMINAL",0,0,"740409",,terminal_output +122,143149,"TERMINAL",0,0,"91140",,terminal_output +123,144173,"TERMINAL",0,0,"10221",,terminal_output +124,145166,"TERMINAL",0,0,"1332",,terminal_output +125,146488,"TERMINAL",0,0,"2443",,terminal_output +126,147249,"TERMINAL",0,0,"3554",,terminal_output +127,148297,"TERMINAL",0,0,"4665",,terminal_output +128,149344,"TERMINAL",0,0,"5776",,terminal_output +129,150511,"TERMINAL",0,0,"6887",,terminal_output +130,151445,"TERMINAL",0,0,"7998",,terminal_output +131,152627,"TERMINAL",0,0,"850509",,terminal_output +132,153505,"TERMINAL",0,0,"91150",,terminal_output +133,154573,"TERMINAL",0,0,"20221",,terminal_output +134,155606,"TERMINAL",0,0,"1332",,terminal_output +135,156928,"TERMINAL",0,0,"2443",,terminal_output +136,157760,"TERMINAL",0,0,"3554",,terminal_output +137,158743,"TERMINAL",0,0,"4665",,terminal_output +138,159900,"TERMINAL",0,0,"5776",,terminal_output +139,160920,"TERMINAL",0,0,"6887",,terminal_output +140,161993,"TERMINAL",0,0,"7998",,terminal_output +141,162969,"TERMINAL",0,0,"88:008:009",,terminal_output +142,164097,"TERMINAL",0,0,"9117:00",,terminal_output +143,165006,"TERMINAL",0,0,"30221",,terminal_output +144,166200,"TERMINAL",0,0,"1443",,terminal_output +145,167169,"TERMINAL",0,0,"3554",,terminal_output +146,168195,"TERMINAL",0,0,"4665",,terminal_output +147,169180,"TERMINAL",0,0,"5776",,terminal_output +148,170357,"TERMINAL",0,0,"6887",,terminal_output +149,171371,"TERMINAL",0,0,"7998",,terminal_output +150,172317,"TERMINAL",0,0,"810109",,terminal_output +151,173413,"TERMINAL",0,0,"91110",,terminal_output +152,174439,"TERMINAL",0,0,"40221",,terminal_output +153,175575,"TERMINAL",0,0,"1332",,terminal_output +154,176587,"TERMINAL",0,0,"2443",,terminal_output +155,177592,"TERMINAL",0,0,"3554",,terminal_output +156,178603,"TERMINAL",0,0,"4665",,terminal_output +157,179663,"TERMINAL",0,0,"5776",,terminal_output +158,180788,"TERMINAL",0,0,"6887",,terminal_output +159,181811,"TERMINAL",0,0,"7998",,terminal_output +160,182845,"TERMINAL",0,0,"820209",,terminal_output +161,183904,"TERMINAL",0,0,"91120",,terminal_output +162,185362,"TERMINAL",0,0,"50221",,terminal_output +163,186083,"TERMINAL",0,0,"1332",,terminal_output +164,186985,"TERMINAL",0,0,"2443",,terminal_output +165,187993,"TERMINAL",0,0,"3554",,terminal_output +166,189038,"TERMINAL",0,0,"4776",,terminal_output +167,190079,"TERMINAL",0,0,"6887",,terminal_output +168,191155,"TERMINAL",0,0,"7998",,terminal_output +169,192359,"TERMINAL",0,0,"830309",,terminal_output +170,193206,"TERMINAL",0,0,"91130",,terminal_output +171,194250,"TERMINAL",0,0,"3:00221",,terminal_output +172,195331,"TERMINAL",0,0,"1332",,terminal_output +173,196344,"TERMINAL",0,0,"2443",,terminal_output +174,197389,"TERMINAL",0,0,"3554",,terminal_output +175,198434,"TERMINAL",0,0,"4665",,terminal_output +176,199525,"TERMINAL",0,0,"5776",,terminal_output +177,200557,"TERMINAL",0,0,"6887",,terminal_output +178,201679,"TERMINAL",0,0,"7998",,terminal_output +179,202701,"TERMINAL",0,0,"840409",,terminal_output +180,203727,"TERMINAL",0,0,"91140",,terminal_output +181,204719,"TERMINAL",0,0,"10221",,terminal_output +182,205978,"TERMINAL",0,0,"1332",,terminal_output +183,207001,"TERMINAL",0,0,"2443",,terminal_output +184,207868,"TERMINAL",0,0,"3554",,terminal_output +185,208909,"TERMINAL",0,0,"4665",,terminal_output +186,209972,"TERMINAL",0,0,"5776",,terminal_output +187,211205,"TERMINAL",0,0,"6887",,terminal_output +188,212249,"TERMINAL",0,0,"750509",,terminal_output +189,213247,"TERMINAL",0,0,"91150",,terminal_output +190,214145,"TERMINAL",0,0,"20221",,terminal_output +191,215212,"TERMINAL",0,0,"1332",,terminal_output +192,216245,"TERMINAL",0,0,"2443",,terminal_output +193,217319,"TERMINAL",0,0,"3554",,terminal_output +194,218331,"TERMINAL",0,0,"4665",,terminal_output +195,219379,"TERMINAL",0,0,"5776",,terminal_output +196,220485,"TERMINAL",0,0,"6887",,terminal_output +197,221543,"TERMINAL",0,0,"7998",,terminal_output +198,222567,"TERMINAL",0,0,"89:009:009",,terminal_output +199,223564,"TERMINAL",0,0,"9118:00",,terminal_output +200,224615,"TERMINAL",0,0,"30221",,terminal_output +201,225745,"TERMINAL",0,0,"1332",,terminal_output +202,226764,"TERMINAL",0,0,"2443",,terminal_output +203,227795,"TERMINAL",0,0,"3554",,terminal_output +204,228933,"TERMINAL",0,0,"4665",,terminal_output +205,229861,"TERMINAL",0,0,"5776",,terminal_output +206,230963,"TERMINAL",0,0,"6887",,terminal_output +207,231934,"TERMINAL",0,0,"7998",,terminal_output +208,233114,"TERMINAL",0,0,"810109",,terminal_output +209,234043,"TERMINAL",0,0,"92211",,terminal_output +210,235068,"TERMINAL",0,0,"41332",,terminal_output +211,236263,"TERMINAL",0,0,"2443",,terminal_output +212,237200,"TERMINAL",0,0,"3554",,terminal_output +213,238256,"TERMINAL",0,0,"4665",,terminal_output +214,239271,"TERMINAL",0,0,"5776",,terminal_output +215,240311,"TERMINAL",0,0,"6887",,terminal_output +216,241360,"TERMINAL",0,0,"7998",,terminal_output +217,242432,"TERMINAL",0,0,"820209",,terminal_output +218,243560,"TERMINAL",0,0,"91120",,terminal_output +219,244517,"TERMINAL",0,0,"50221",,terminal_output +220,245562,"TERMINAL",0,0,"1332",,terminal_output +221,246631,"TERMINAL",0,0,"2443",,terminal_output +222,247759,"TERMINAL",0,0,"3554",,terminal_output +223,248700,"TERMINAL",0,0,"4665",,terminal_output +224,249803,"TERMINAL",0,0,"5776",,terminal_output +225,250930,"TERMINAL",0,0,"6887",,terminal_output +226,251954,"TERMINAL",0,0,"7998",,terminal_output +227,252892,"TERMINAL",0,0,"830309",,terminal_output +228,253938,"TERMINAL",0,0,"91130",,terminal_output +229,254988,"TERMINAL",0,0,"4:00221",,terminal_output +230,256031,"TERMINAL",0,0,"1443",,terminal_output +231,257093,"TERMINAL",0,0,"3554",,terminal_output +232,258312,"TERMINAL",0,0,"4665",,terminal_output +233,259359,"TERMINAL",0,0,"5776",,terminal_output +234,260405,"TERMINAL",0,0,"6887",,terminal_output +235,261452,"TERMINAL",0,0,"7998",,terminal_output +236,262501,"TERMINAL",0,0,"840409",,terminal_output +237,263552,"TERMINAL",0,0,"91140",,terminal_output +238,264673,"TERMINAL",0,0,"10221",,terminal_output +239,265676,"TERMINAL",0,0,"1332",,terminal_output +240,266906,"TERMINAL",0,0,"2443",,terminal_output +241,267742,"TERMINAL",0,0,"3554",,terminal_output +242,268956,"TERMINAL",0,0,"4665",,terminal_output +243,270118,"TERMINAL",0,0,"5776",,terminal_output +244,270889,"TERMINAL",0,0,"6887",,terminal_output +245,271937,"TERMINAL",0,0,"7998",,terminal_output +246,272991,"TERMINAL",0,0,"850509",,terminal_output +247,274046,"TERMINAL",0,0,"92251",,terminal_output +248,275097,"TERMINAL",0,0,"21332",,terminal_output +249,276156,"TERMINAL",0,0,"2443",,terminal_output +250,277176,"TERMINAL",0,0,"3554",,terminal_output +251,278225,"TERMINAL",0,0,"4665",,terminal_output +252,279276,"TERMINAL",0,0,"5776",,terminal_output +253,280326,"TERMINAL",0,0,"6887",,terminal_output +254,281370,"TERMINAL",0,0,"7998",,terminal_output +255,282573,"TERMINAL",0,0,"820:0020:009",,terminal_output +256,283497,"TERMINAL",0,0,"9119:00",,terminal_output +257,284516,"TERMINAL",0,0,"30221",,terminal_output +258,285561,"TERMINAL",0,0,"1332",,terminal_output +259,286777,"TERMINAL",0,0,"2443",,terminal_output +260,287701,"TERMINAL",0,0,"3554",,terminal_output +261,288705,"TERMINAL",0,0,"4665",,terminal_output +262,289789,"TERMINAL",0,0,"5776",,terminal_output +263,290867,"TERMINAL",0,0,"6887",,terminal_output +264,291843,"TERMINAL",0,0,"7998",,terminal_output +265,292915,"TERMINAL",0,0,"810109",,terminal_output +266,293937,"TERMINAL",0,0,"91110",,terminal_output +267,295065,"TERMINAL",0,0,"40221",,terminal_output +268,296194,"TERMINAL",0,0,"1443",,terminal_output +269,297117,"TERMINAL",0,0,"3554",,terminal_output +270,298142,"TERMINAL",0,0,"4665",,terminal_output +271,299213,"TERMINAL",0,0,"5776",,terminal_output +272,300218,"TERMINAL",0,0,"6887",,terminal_output +273,301264,"TERMINAL",0,0,"7998",,terminal_output +274,302313,"TERMINAL",0,0,"820209",,terminal_output +275,303361,"TERMINAL",0,0,"91120",,terminal_output +276,304487,"TERMINAL",0,0,"50221",,terminal_output +277,305464,"TERMINAL",0,0,"1332",,terminal_output +278,306638,"TERMINAL",0,0,"2443",,terminal_output +279,307766,"TERMINAL",0,0,"3554",,terminal_output +280,308614,"TERMINAL",0,0,"4665",,terminal_output +281,309811,"TERMINAL",0,0,"5776",,terminal_output +282,310709,"TERMINAL",0,0,"6887",,terminal_output +283,311756,"TERMINAL",0,0,"7998",,terminal_output +284,312882,"TERMINAL",0,0,"830309",,terminal_output +285,313909,"TERMINAL",0,0,"91130",,terminal_output +286,315034,"TERMINAL",0,0,"5:00221",,terminal_output +287,316057,"TERMINAL",0,0,"1332",,terminal_output +288,317084,"TERMINAL",0,0,"2443",,terminal_output +289,318210,"TERMINAL",0,0,"3665",,terminal_output +290,319127,"TERMINAL",0,0,"5776",,terminal_output +291,320186,"TERMINAL",0,0,"6887",,terminal_output +292,321189,"TERMINAL",0,0,"7998",,terminal_output +293,322248,"TERMINAL",0,0,"840409",,terminal_output +294,323281,"TERMINAL",0,0,"91140",,terminal_output +295,324322,"TERMINAL",0,0,"10221",,terminal_output +296,325479,"TERMINAL",0,0,"1332",,terminal_output +297,326505,"TERMINAL",0,0,"2443",,terminal_output +298,327473,"TERMINAL",0,0,"3554",,terminal_output +299,328522,"TERMINAL",0,0,"4665",,terminal_output +300,329573,"TERMINAL",0,0,"5776",,terminal_output +301,330617,"TERMINAL",0,0,"6887",,terminal_output +302,331828,"TERMINAL",0,0,"7998",,terminal_output +303,332717,"TERMINAL",0,0,"850509",,terminal_output +304,333877,"TERMINAL",0,0,"91150",,terminal_output +305,334811,"TERMINAL",0,0,"20221",,terminal_output +306,336026,"TERMINAL",0,0,"1332",,terminal_output +307,337093,"TERMINAL",0,0,"2443",,terminal_output +308,338075,"TERMINAL",0,0,"3554",,terminal_output +309,339099,"TERMINAL",0,0,"4665",,terminal_output +310,340257,"TERMINAL",0,0,"5887",,terminal_output +311,341177,"TERMINAL",0,0,"7998",,terminal_output +312,342174,"TERMINAL",0,0,"81:001:009",,terminal_output +313,343297,"TERMINAL",0,0,"91120:00",,terminal_output +314,344237,"TERMINAL",0,0,"30221",,terminal_output +315,345285,"TERMINAL",0,0,"1332",,terminal_output +316,346374,"TERMINAL",0,0,"2443",,terminal_output +317,347404,"TERMINAL",0,0,"3554",,terminal_output +318,348638,"TERMINAL",0,0,"4665",,terminal_output +319,349647,"TERMINAL",0,0,"5776",,terminal_output +320,350567,"TERMINAL",0,0,"6887",,terminal_output +321,351590,"TERMINAL",0,0,"7998",,terminal_output +322,352719,"TERMINAL",0,0,"810109",,terminal_output +323,353741,"TERMINAL",0,0,"91110",,terminal_output +324,354764,"TERMINAL",0,0,"40221",,terminal_output +325,355788,"TERMINAL",0,0,"1332",,terminal_output +326,356916,"TERMINAL",0,0,"2443",,terminal_output +327,357870,"TERMINAL",0,0,"3554",,terminal_output +328,358921,"TERMINAL",0,0,"4665",,terminal_output +329,359987,"TERMINAL",0,0,"5776",,terminal_output +330,361113,"TERMINAL",0,0,"6998",,terminal_output +331,362065,"TERMINAL",0,0,"820209",,terminal_output +332,363269,"TERMINAL",0,0,"91120",,terminal_output +333,364197,"TERMINAL",0,0,"50221",,terminal_output +334,365313,"TERMINAL",0,0,"1332",,terminal_output +335,366256,"TERMINAL",0,0,"2443",,terminal_output +336,367362,"TERMINAL",0,0,"3554",,terminal_output +337,368349,"TERMINAL",0,0,"4665",,terminal_output +338,369389,"TERMINAL",0,0,"5776",,terminal_output +339,370467,"TERMINAL",0,0,"6887",,terminal_output +340,371559,"TERMINAL",0,0,"7998",,terminal_output +341,372582,"TERMINAL",0,0,"830309",,terminal_output +342,373619,"TERMINAL",0,0,"91130",,terminal_output +343,374734,"TERMINAL",0,0,"6:00221",,terminal_output +344,375758,"TERMINAL",0,0,"1332",,terminal_output +345,376783,"TERMINAL",0,0,"2443",,terminal_output +346,377805,"TERMINAL",0,0,"3554",,terminal_output +347,379033,"TERMINAL",0,0,"4665",,terminal_output +348,379948,"TERMINAL",0,0,"5776",,terminal_output +349,380984,"TERMINAL",0,0,"6887",,terminal_output +350,381992,"TERMINAL",0,0,"7998",,terminal_output +351,383263,"TERMINAL",0,0,"8414140",,terminal_output +352,384078,"TERMINAL",0,0,"10221",,terminal_output +353,385118,"TERMINAL",0,0,"1332",,terminal_output +354,386185,"TERMINAL",0,0,"2443",,terminal_output +355,387331,"TERMINAL",0,0,"3554",,terminal_output +356,388357,"TERMINAL",0,0,"4665",,terminal_output +357,389301,"TERMINAL",0,0,"5776",,terminal_output +358,390349,"TERMINAL",0,0,"6887",,terminal_output +359,391399,"TERMINAL",0,0,"7998",,terminal_output +360,392443,"TERMINAL",0,0,"850509",,terminal_output +361,393491,"TERMINAL",0,0,"91150",,terminal_output +362,394707,"TERMINAL",0,0,"20221",,terminal_output +363,395727,"TERMINAL",0,0,"1332",,terminal_output +364,396750,"TERMINAL",0,0,"2443",,terminal_output +365,397671,"TERMINAL",0,0,"3554",,terminal_output +366,398799,"TERMINAL",0,0,"4665",,terminal_output +367,399823,"TERMINAL",0,0,"5776",,terminal_output +368,400845,"TERMINAL",0,0,"6887",,terminal_output +369,401869,"TERMINAL",0,0,"7998",,terminal_output +370,403099,"TERMINAL",0,0,"82:002:009",,terminal_output +371,403933,"TERMINAL",0,0,"9111:00",,terminal_output +372,405041,"TERMINAL",0,0,"30221",,terminal_output +373,406201,"TERMINAL",0,0,"1443",,terminal_output +374,407075,"TERMINAL",0,0,"3554",,terminal_output +375,408250,"TERMINAL",0,0,"4665",,terminal_output +376,409214,"TERMINAL",0,0,"5776",,terminal_output +377,410269,"TERMINAL",0,0,"6887",,terminal_output +378,411289,"TERMINAL",0,0,"7998",,terminal_output +379,412320,"TERMINAL",0,0,"810109",,terminal_output +380,413365,"TERMINAL",0,0,"91110",,terminal_output +381,414409,"TERMINAL",0,0,"40221",,terminal_output +382,415487,"TERMINAL",0,0,"1332",,terminal_output +383,416487,"TERMINAL",0,0,"2443",,terminal_output +384,417538,"TERMINAL",0,0,"3554",,terminal_output +385,418665,"TERMINAL",0,0,"4665",,terminal_output +386,419633,"TERMINAL",0,0,"5776",,terminal_output +387,420711,"TERMINAL",0,0,"6887",,terminal_output +388,421736,"TERMINAL",0,0,"7998",,terminal_output +389,422769,"TERMINAL",0,0,"820209",,terminal_output +390,423888,"TERMINAL",0,0,"91120",,terminal_output +391,424909,"TERMINAL",0,0,"50221",,terminal_output +392,425935,"TERMINAL",0,0,"1332",,terminal_output +393,426956,"TERMINAL",0,0,"2443",,terminal_output +394,427995,"TERMINAL",0,0,"3554",,terminal_output +395,429109,"TERMINAL",0,0,"4776",,terminal_output +396,430120,"TERMINAL",0,0,"6887",,terminal_output +397,431154,"TERMINAL",0,0,"7998",,terminal_output +398,432159,"TERMINAL",0,0,"830309",,terminal_output +399,433309,"TERMINAL",0,0,"91130",,terminal_output +400,434247,"TERMINAL",0,0,"7:00221",,terminal_output +401,435358,"TERMINAL",0,0,"1332",,terminal_output +402,436342,"TERMINAL",0,0,"2443",,terminal_output +403,437366,"TERMINAL",0,0,"3554",,terminal_output +404,438408,"TERMINAL",0,0,"4665",,terminal_output +405,439559,"TERMINAL",0,0,"5776",,terminal_output +406,440677,"TERMINAL",0,0,"6887",,terminal_output +407,441538,"TERMINAL",0,0,"7998",,terminal_output +408,442626,"TERMINAL",0,0,"840409",,terminal_output +409,443642,"TERMINAL",0,0,"91140",,terminal_output +410,444677,"TERMINAL",0,0,"10221",,terminal_output +411,445808,"TERMINAL",0,0,"1332",,terminal_output +412,446743,"TERMINAL",0,0,"2443",,terminal_output +413,447793,"TERMINAL",0,0,"3554",,terminal_output +414,448871,"TERMINAL",0,0,"4665",,terminal_output +415,449879,"TERMINAL",0,0,"5776",,terminal_output +416,450927,"TERMINAL",0,0,"6887",,terminal_output +417,451975,"TERMINAL",0,0,"7998",,terminal_output +418,453020,"TERMINAL",0,0,"8515150",,terminal_output +419,454265,"TERMINAL",0,0,"20221",,terminal_output +420,455125,"TERMINAL",0,0,"1332",,terminal_output +421,456159,"TERMINAL",0,0,"2443",,terminal_output +422,457267,"TERMINAL",0,0,"3554",,terminal_output +423,458397,"TERMINAL",0,0,"4665",,terminal_output +424,459293,"TERMINAL",0,0,"5776",,terminal_output +425,460336,"TERMINAL",0,0,"6887",,terminal_output +426,461377,"TERMINAL",0,0,"7998",,terminal_output +427,462417,"TERMINAL",0,0,"83:003:009",,terminal_output +428,463457,"TERMINAL",0,0,"9112:00",,terminal_output +429,464540,"TERMINAL",0,0,"30221",,terminal_output +430,465665,"TERMINAL",0,0,"1332",,terminal_output +431,466791,"TERMINAL",0,0,"2443",,terminal_output +432,467645,"TERMINAL",0,0,"3554",,terminal_output +433,468839,"TERMINAL",0,0,"4665",,terminal_output +434,469767,"TERMINAL",0,0,"5776",,terminal_output +435,470990,"TERMINAL",0,0,"6887",,terminal_output +436,471865,"TERMINAL",0,0,"7998",,terminal_output +437,472877,"TERMINAL",0,0,"810109",,terminal_output +438,473928,"TERMINAL",0,0,"91110",,terminal_output +439,474985,"TERMINAL",0,0,"40221",,terminal_output +440,476058,"TERMINAL",0,0,"1443",,terminal_output +441,477138,"TERMINAL",0,0,"3554",,terminal_output +442,478125,"TERMINAL",0,0,"4665",,terminal_output +443,479226,"TERMINAL",0,0,"5776",,terminal_output +444,480413,"TERMINAL",0,0,"6887",,terminal_output +445,481439,"TERMINAL",0,0,"7998",,terminal_output +446,482565,"TERMINAL",0,0,"820209",,terminal_output +447,483383,"TERMINAL",0,0,"91120",,terminal_output +448,484395,"TERMINAL",0,0,"50221",,terminal_output +449,485441,"TERMINAL",0,0,"1332",,terminal_output +450,486495,"TERMINAL",0,0,"2443",,terminal_output +451,487565,"TERMINAL",0,0,"3554",,terminal_output +452,488567,"TERMINAL",0,0,"4665",,terminal_output +453,489669,"TERMINAL",0,0,"5776",,terminal_output +454,490647,"TERMINAL",0,0,"6887",,terminal_output +455,491737,"TERMINAL",0,0,"7998",,terminal_output +456,492724,"TERMINAL",0,0,"830309",,terminal_output +457,493879,"TERMINAL",0,0,"91130",,terminal_output +458,494893,"TERMINAL",0,0,"8:00221",,terminal_output +459,495915,"TERMINAL",0,0,"1332",,terminal_output +460,496940,"TERMINAL",0,0,"2443",,terminal_output +461,497913,"TERMINAL",0,0,"3554",,terminal_output +462,498952,"TERMINAL",0,0,"4665",,terminal_output +463,499993,"TERMINAL",0,0,"5776",,terminal_output +464,501083,"TERMINAL",0,0,"6998",,terminal_output +465,502163,"TERMINAL",0,0,"840409",,terminal_output +466,503187,"TERMINAL",0,0,"91140",,terminal_output +467,504200,"TERMINAL",0,0,"10221",,terminal_output +468,505234,"TERMINAL",0,0,"1332",,terminal_output +469,506259,"TERMINAL",0,0,"2443",,terminal_output +470,507387,"TERMINAL",0,0,"3554",,terminal_output +471,508325,"TERMINAL",0,0,"4665",,terminal_output +472,509363,"TERMINAL",0,0,"5776",,terminal_output +473,510411,"TERMINAL",0,0,"6887",,terminal_output +474,511463,"TERMINAL",0,0,"7998",,terminal_output +475,512495,"TERMINAL",0,0,"850509",,terminal_output +476,513735,"TERMINAL",0,0,"91150",,terminal_output +477,514655,"TERMINAL",0,0,"20221",,terminal_output +478,515679,"TERMINAL",0,0,"1332",,terminal_output +479,516807,"TERMINAL",0,0,"2443",,terminal_output +480,517730,"TERMINAL",0,0,"3554",,terminal_output +481,518863,"TERMINAL",0,0,"4665",,terminal_output +482,519788,"TERMINAL",0,0,"5776",,terminal_output +483,520834,"TERMINAL",0,0,"6887",,terminal_output +484,521925,"TERMINAL",0,0,"7998",,terminal_output +485,522953,"TERMINAL",0,0,"84:004:009",,terminal_output +486,523956,"TERMINAL",0,0,"9113:00",,terminal_output +487,525101,"TERMINAL",0,0,"30221",,terminal_output +488,526126,"TERMINAL",0,0,"1443",,terminal_output +489,527094,"TERMINAL",0,0,"3554",,terminal_output +490,528312,"TERMINAL",0,0,"4665",,terminal_output +491,529206,"TERMINAL",0,0,"5776",,terminal_output +492,530222,"TERMINAL",0,0,"6887",,terminal_output +493,531376,"TERMINAL",0,0,"7998",,terminal_output +494,532301,"TERMINAL",0,0,"810109",,terminal_output +495,533348,"TERMINAL",0,0,"91110",,terminal_output +496,534399,"TERMINAL",0,0,"40221",,terminal_output +497,535433,"TERMINAL",0,0,"1332",,terminal_output +498,536489,"TERMINAL",0,0,"2443",,terminal_output +499,537642,"TERMINAL",0,0,"3554",,terminal_output +500,538700,"TERMINAL",0,0,"4665",,terminal_output +501,539605,"TERMINAL",0,0,"5776",,terminal_output +502,540666,"TERMINAL",0,0,"6887",,terminal_output +503,541895,"TERMINAL",0,0,"7998",,terminal_output +504,542745,"TERMINAL",0,0,"820209",,terminal_output +505,543789,"TERMINAL",0,0,"91120",,terminal_output +506,544864,"TERMINAL",0,0,"50221",,terminal_output +507,546009,"TERMINAL",0,0,"1332",,terminal_output +508,546954,"TERMINAL",0,0,"2443",,terminal_output +509,548140,"TERMINAL",0,0,"3554",,terminal_output +510,549213,"TERMINAL",0,0,"4776",,terminal_output +511,550298,"TERMINAL",0,0,"6887",,terminal_output +512,551121,"TERMINAL",0,0,"7998",,terminal_output +513,552545,"TERMINAL",0,0,"830309",,terminal_output +514,553291,"TERMINAL",0,0,"91130",,terminal_output +515,554301,"TERMINAL",0,0,"9:00221",,terminal_output +516,555413,"TERMINAL",0,0,"1332",,terminal_output +517,556437,"TERMINAL",0,0,"2443",,terminal_output +518,557461,"TERMINAL",0,0,"3554",,terminal_output +519,558483,"TERMINAL",0,0,"4665",,terminal_output +520,559445,"TERMINAL",0,0,"5776",,terminal_output +521,560634,"TERMINAL",0,0,"6887",,terminal_output +522,561664,"TERMINAL",0,0,"7998",,terminal_output +523,562559,"TERMINAL",0,0,"840409",,terminal_output +524,563610,"TERMINAL",0,0,"91140",,terminal_output +525,564641,"TERMINAL",0,0,"10221",,terminal_output +526,565769,"TERMINAL",0,0,"1332",,terminal_output +527,566885,"TERMINAL",0,0,"2443",,terminal_output +528,567758,"TERMINAL",0,0,"3554",,terminal_output +529,568793,"TERMINAL",0,0,"4665",,terminal_output +530,569849,"TERMINAL",0,0,"5776",,terminal_output +531,571166,"TERMINAL",0,0,"6887",,terminal_output +532,571939,"TERMINAL",0,0,"7998",,terminal_output +533,572959,"TERMINAL",0,0,"850509",,terminal_output +534,574153,"TERMINAL",0,0,"91150",,terminal_output +535,575049,"TERMINAL",0,0,"20332",,terminal_output +536,576200,"TERMINAL",0,0,"2443",,terminal_output +537,577118,"TERMINAL",0,0,"3554",,terminal_output +538,578179,"TERMINAL",0,0,"4665",,terminal_output +539,579200,"TERMINAL",0,0,"5776",,terminal_output +540,580329,"TERMINAL",0,0,"6887",,terminal_output +541,581319,"TERMINAL",0,0,"7998",,terminal_output +542,582449,"TERMINAL",0,0,"85:005:009",,terminal_output +543,583471,"TERMINAL",0,0,"9114:00",,terminal_output +544,584456,"TERMINAL",0,0,"30221",,terminal_output +545,585491,"TERMINAL",0,0,"1332",,terminal_output +546,586542,"TERMINAL",0,0,"2443",,terminal_output +547,587668,"TERMINAL",0,0,"3554",,terminal_output +548,588793,"TERMINAL",0,0,"4665",,terminal_output +549,589670,"TERMINAL",0,0,"5776",,terminal_output +550,590716,"TERMINAL",0,0,"6887",,terminal_output +551,591870,"TERMINAL",0,0,"7998",,terminal_output +552,592804,"TERMINAL",0,0,"810109",,terminal_output +553,594016,"TERMINAL",0,0,"91110",,terminal_output +554,595040,"TERMINAL",0,0,"40221",,terminal_output +555,596146,"TERMINAL",0,0,"1332",,terminal_output +556,597089,"TERMINAL",0,0,"2443",,terminal_output +557,598016,"TERMINAL",0,0,"3665",,terminal_output +558,599221,"TERMINAL",0,0,"5776",,terminal_output +559,600301,"TERMINAL",0,0,"6887",,terminal_output +560,601154,"TERMINAL",0,0,"7998",,terminal_output +561,602413,"TERMINAL",0,0,"820209",,terminal_output +562,603437,"TERMINAL",0,0,"91120",,terminal_output +563,604300,"TERMINAL",0,0,"50221",,terminal_output +564,605387,"TERMINAL",0,0,"1332",,terminal_output +565,606414,"TERMINAL",0,0,"2443",,terminal_output +566,607419,"TERMINAL",0,0,"3554",,terminal_output +567,608464,"TERMINAL",0,0,"4665",,terminal_output +568,609582,"TERMINAL",0,0,"5776",,terminal_output +569,610571,"TERMINAL",0,0,"6887",,terminal_output +570,611734,"TERMINAL",0,0,"7998",,terminal_output +571,612756,"TERMINAL",0,0,"830309",,terminal_output +572,613713,"TERMINAL",0,0,"91130",,terminal_output +573,614757,"TERMINAL",0,0,"10:00221",,terminal_output +574,615806,"TERMINAL",0,0,"1332",,terminal_output +575,616901,"TERMINAL",0,0,"2443",,terminal_output +576,617980,"TERMINAL",0,0,"3554",,terminal_output +577,619106,"TERMINAL",0,0,"4665",,terminal_output +578,620129,"TERMINAL",0,0,"5887",,terminal_output +579,621165,"TERMINAL",0,0,"7998",,terminal_output +580,622123,"TERMINAL",0,0,"840409",,terminal_output +581,623202,"TERMINAL",0,0,"91140",,terminal_output +582,624205,"TERMINAL",0,0,"10221",,terminal_output +583,625246,"TERMINAL",0,0,"1332",,terminal_output +584,626384,"TERMINAL",0,0,"2443",,terminal_output +585,627398,"TERMINAL",0,0,"3554",,terminal_output +586,628432,"TERMINAL",0,0,"4665",,terminal_output +587,629427,"TERMINAL",0,0,"5776",,terminal_output +588,630477,"TERMINAL",0,0,"6887",,terminal_output +589,631517,"TERMINAL",0,0,"7998",,terminal_output +590,632626,"TERMINAL",0,0,"850509",,terminal_output +591,633658,"TERMINAL",0,0,"91150",,terminal_output +592,634676,"TERMINAL",0,0,"20221",,terminal_output +593,635697,"TERMINAL",0,0,"1332",,terminal_output +594,636822,"TERMINAL",0,0,"2443",,terminal_output +595,637949,"TERMINAL",0,0,"3554",,terminal_output +596,638972,"TERMINAL",0,0,"4665",,terminal_output +597,639895,"TERMINAL",0,0,"5776",,terminal_output +598,641020,"TERMINAL",0,0,"6887",,terminal_output +599,641945,"TERMINAL",0,0,"7998",,terminal_output +600,643174,"TERMINAL",0,0,"86:006:009",,terminal_output +601,644094,"TERMINAL",0,0,"9225:01",,terminal_output +602,645123,"TERMINAL",0,0,"31332",,terminal_output +603,646242,"TERMINAL",0,0,"2443",,terminal_output +604,647145,"TERMINAL",0,0,"3554",,terminal_output +605,648318,"TERMINAL",0,0,"4665",,terminal_output +606,649245,"TERMINAL",0,0,"5776",,terminal_output +607,650339,"TERMINAL",0,0,"6887",,terminal_output +608,651366,"TERMINAL",0,0,"7998",,terminal_output +609,652471,"TERMINAL",0,0,"810109",,terminal_output +610,653414,"TERMINAL",0,0,"91110",,terminal_output +611,654454,"TERMINAL",0,0,"40221",,terminal_output +612,655495,"TERMINAL",0,0,"1332",,terminal_output +613,656538,"TERMINAL",0,0,"2443",,terminal_output +614,657618,"TERMINAL",0,0,"3554",,terminal_output +615,658635,"TERMINAL",0,0,"4665",,terminal_output +616,659679,"TERMINAL",0,0,"5776",,terminal_output +617,660887,"TERMINAL",0,0,"6887",,terminal_output +618,661910,"TERMINAL",0,0,"7998",,terminal_output +619,662934,"TERMINAL",0,0,"820209",,terminal_output +620,663959,"TERMINAL",0,0,"91120",,terminal_output +621,665086,"TERMINAL",0,0,"50221",,terminal_output +622,666107,"TERMINAL",0,0,"1332",,terminal_output +623,667034,"TERMINAL",0,0,"2443",,terminal_output +624,668105,"TERMINAL",0,0,"3665",,terminal_output +625,669117,"TERMINAL",0,0,"5776",,terminal_output +626,670215,"TERMINAL",0,0,"6887",,terminal_output +627,671229,"TERMINAL",0,0,"7998",,terminal_output +628,672662,"TERMINAL",0,0,"830309",,terminal_output +629,673314,"TERMINAL",0,0,"91130",,terminal_output +630,674350,"TERMINAL",0,0,"1:00221",,terminal_output +631,675532,"TERMINAL",0,0,"1332",,terminal_output +632,676453,"TERMINAL",0,0,"2443",,terminal_output +633,677456,"TERMINAL",0,0,"3554",,terminal_output +634,678502,"TERMINAL",0,0,"4665",,terminal_output +635,679633,"TERMINAL",0,0,"5776",,terminal_output +636,680592,"TERMINAL",0,0,"6887",,terminal_output +637,681673,"TERMINAL",0,0,"7998",,terminal_output +638,682799,"TERMINAL",0,0,"840409",,terminal_output +639,683940,"TERMINAL",0,0,"91140",,terminal_output +640,684870,"TERMINAL",0,0,"10221",,terminal_output +641,685974,"TERMINAL",0,0,"1332",,terminal_output +642,686998,"TERMINAL",0,0,"2443",,terminal_output +643,688020,"TERMINAL",0,0,"3554",,terminal_output +644,688974,"TERMINAL",0,0,"4665",,terminal_output +645,690212,"TERMINAL",0,0,"5887",,terminal_output +646,691094,"TERMINAL",0,0,"7998",,terminal_output +647,692220,"TERMINAL",0,0,"850509",,terminal_output +648,693243,"TERMINAL",0,0,"91150",,terminal_output +649,694199,"TERMINAL",0,0,"20221",,terminal_output +650,695325,"TERMINAL",0,0,"1332",,terminal_output +651,696417,"TERMINAL",0,0,"2443",,terminal_output +652,697443,"TERMINAL",0,0,"3554",,terminal_output +653,698464,"TERMINAL",0,0,"4665",,terminal_output +654,699418,"TERMINAL",0,0,"5776",,terminal_output +655,700481,"TERMINAL",0,0,"6887",,terminal_output +656,701498,"TERMINAL",0,0,"7998",,terminal_output +657,702531,"TERMINAL",0,0,"87:007:009",,terminal_output +658,703688,"TERMINAL",0,0,"9116:00",,terminal_output +659,704610,"TERMINAL",0,0,"30221",,terminal_output +660,705647,"TERMINAL",0,0,"1332",,terminal_output +661,706862,"TERMINAL",0,0,"2443",,terminal_output +662,707743,"TERMINAL",0,0,"3554",,terminal_output +663,708807,"TERMINAL",0,0,"4665",,terminal_output +664,710037,"TERMINAL",0,0,"5776",,terminal_output +665,710944,"TERMINAL",0,0,"6887",,terminal_output +666,711953,"TERMINAL",0,0,"7998",,terminal_output +667,713109,"TERMINAL",0,0,"810109",,terminal_output +668,714049,"TERMINAL",0,0,"91110",,terminal_output +669,715157,"TERMINAL",0,0,"40332",,terminal_output +670,716098,"TERMINAL",0,0,"2443",,terminal_output +671,717133,"TERMINAL",0,0,"3554",,terminal_output +672,718173,"TERMINAL",0,0,"4665",,terminal_output +673,719224,"TERMINAL",0,0,"5776",,terminal_output +674,720248,"TERMINAL",0,0,"6887",,terminal_output +675,721284,"TERMINAL",0,0,"7998",,terminal_output +676,722357,"TERMINAL",0,0,"820209",,terminal_output +677,723370,"TERMINAL",0,0,"91120",,terminal_output +678,724408,"TERMINAL",0,0,"50221",,terminal_output +679,725442,"TERMINAL",0,0,"1332",,terminal_output +680,726480,"TERMINAL",0,0,"2443",,terminal_output +681,727520,"TERMINAL",0,0,"3554",,terminal_output +682,728559,"TERMINAL",0,0,"4665",,terminal_output +683,729599,"TERMINAL",0,0,"5776",,terminal_output +684,730735,"TERMINAL",0,0,"6887",,terminal_output +685,731680,"TERMINAL",0,0,"7998",,terminal_output +686,732720,"TERMINAL",0,0,"830309",,terminal_output +687,733794,"TERMINAL",0,0,"91130",,terminal_output +688,734820,"TERMINAL",0,0,"2:00221",,terminal_output +689,736046,"TERMINAL",0,0,"1332",,terminal_output +690,736968,"TERMINAL",0,0,"2443",,terminal_output +691,738096,"TERMINAL",0,0,"3554",,terminal_output +692,738969,"TERMINAL",0,0,"4665",,terminal_output +693,740142,"TERMINAL",0,0,"5887",,terminal_output +694,741164,"TERMINAL",0,0,"7998",,terminal_output +695,742135,"TERMINAL",0,0,"840409",,terminal_output +696,743278,"TERMINAL",0,0,"91140",,terminal_output +697,744214,"TERMINAL",0,0,"10221",,terminal_output +698,745264,"TERMINAL",0,0,"1332",,terminal_output +699,746286,"TERMINAL",0,0,"2443",,terminal_output +700,747341,"TERMINAL",0,0,"3554",,terminal_output +701,748543,"TERMINAL",0,0,"4665",,terminal_output +702,749370,"TERMINAL",0,0,"5776",,terminal_output +703,750421,"TERMINAL",0,0,"6887",,terminal_output +704,751457,"TERMINAL",0,0,"7998",,terminal_output +705,752545,"TERMINAL",0,0,"850509",,terminal_output +706,753535,"TERMINAL",0,0,"91150",,terminal_output +707,754686,"TERMINAL",0,0,"20221",,terminal_output +708,755615,"TERMINAL",0,0,"1332",,terminal_output +709,756834,"TERMINAL",0,0,"2443",,terminal_output +710,757691,"TERMINAL",0,0,"3554",,terminal_output +711,758883,"TERMINAL",0,0,"4665",,terminal_output +712,759803,"TERMINAL",0,0,"5776",,terminal_output +713,760829,"TERMINAL",0,0,"6887",,terminal_output +714,762056,"TERMINAL",0,0,"7998",,terminal_output +715,762893,"TERMINAL",0,0,"88:008:009",,terminal_output +716,763965,"TERMINAL",0,0,"9117:00",,terminal_output +717,764977,"TERMINAL",0,0,"30221",,terminal_output +718,766153,"TERMINAL",0,0,"1443",,terminal_output +719,767079,"TERMINAL",0,0,"3554",,terminal_output +720,768303,"TERMINAL",0,0,"4665",,terminal_output +721,769149,"TERMINAL",0,0,"5776",,terminal_output +722,770191,"TERMINAL",0,0,"6887",,terminal_output +723,771408,"TERMINAL",0,0,"7998",,terminal_output +724,772282,"TERMINAL",0,0,"810109",,terminal_output +725,773357,"TERMINAL",0,0,"91110",,terminal_output +726,774351,"TERMINAL",0,0,"40221",,terminal_output +727,775394,"TERMINAL",0,0,"1332",,terminal_output +728,776427,"TERMINAL",0,0,"2443",,terminal_output +729,777521,"TERMINAL",0,0,"3554",,terminal_output +730,778501,"TERMINAL",0,0,"4665",,terminal_output +731,779565,"TERMINAL",0,0,"5776",,terminal_output +732,780596,"TERMINAL",0,0,"6887",,terminal_output +733,781620,"TERMINAL",0,0,"7998",,terminal_output +734,782743,"TERMINAL",0,0,"820209",,terminal_output +735,783771,"TERMINAL",0,0,"91120",,terminal_output +736,784792,"TERMINAL",0,0,"50221",,terminal_output +737,785919,"TERMINAL",0,0,"1332",,terminal_output +738,786821,"TERMINAL",0,0,"2443",,terminal_output +739,787971,"TERMINAL",0,0,"3554",,terminal_output +740,788989,"TERMINAL",0,0,"4665",,terminal_output +741,790011,"TERMINAL",0,0,"5776",,terminal_output +742,791037,"TERMINAL",0,0,"6998",,terminal_output +743,792068,"TERMINAL",0,0,"830309",,terminal_output +744,793117,"TERMINAL",0,0,"91130",,terminal_output +745,794173,"TERMINAL",0,0,"3:00221",,terminal_output +746,795370,"TERMINAL",0,0,"1332",,terminal_output +747,796399,"TERMINAL",0,0,"2443",,terminal_output +748,797385,"TERMINAL",0,0,"3554",,terminal_output +749,798428,"TERMINAL",0,0,"4665",,terminal_output +750,799437,"TERMINAL",0,0,"5776",,terminal_output +751,800430,"TERMINAL",0,0,"6887",,terminal_output +752,801519,"TERMINAL",0,0,"7998",,terminal_output +753,802531,"TERMINAL",0,0,"840409",,terminal_output +754,803585,"TERMINAL",0,0,"91140",,terminal_output +755,804658,"TERMINAL",0,0,"10221",,terminal_output +756,805696,"TERMINAL",0,0,"1332",,terminal_output +757,806735,"TERMINAL",0,0,"2443",,terminal_output +758,807777,"TERMINAL",0,0,"3554",,terminal_output +759,808857,"TERMINAL",0,0,"4665",,terminal_output +760,809877,"TERMINAL",0,0,"5776",,terminal_output +761,811107,"TERMINAL",0,0,"6887",,terminal_output +762,811970,"TERMINAL",0,0,"7998",,terminal_output +763,813157,"TERMINAL",0,0,"850509",,terminal_output +764,814077,"TERMINAL",0,0,"92251",,terminal_output +765,815101,"TERMINAL",0,0,"21332",,terminal_output +766,816335,"TERMINAL",0,0,"2443",,terminal_output +767,817560,"TERMINAL",0,0,"3554",,terminal_output +768,818380,"TERMINAL",0,0,"4665",,terminal_output +769,819278,"TERMINAL",0,0,"5776",,terminal_output +770,820458,"TERMINAL",0,0,"6887",,terminal_output +771,821453,"TERMINAL",0,0,"7998",,terminal_output +772,822479,"TERMINAL",0,0,"89:009:009",,terminal_output +773,823457,"TERMINAL",0,0,"9118:00",,terminal_output +774,824500,"TERMINAL",0,0,"30221",,terminal_output +775,825549,"TERMINAL",0,0,"1332",,terminal_output +776,826592,"TERMINAL",0,0,"2443",,terminal_output +777,827800,"TERMINAL",0,0,"3554",,terminal_output +778,828679,"TERMINAL",0,0,"4665",,terminal_output +779,829743,"TERMINAL",0,0,"5776",,terminal_output +780,830772,"TERMINAL",0,0,"6887",,terminal_output +781,831823,"TERMINAL",0,0,"7998",,terminal_output +782,832871,"TERMINAL",0,0,"810109",,terminal_output +783,833944,"TERMINAL",0,0,"91110",,terminal_output +784,834953,"TERMINAL",0,0,"40221",,terminal_output +785,836193,"TERMINAL",0,0,"1332",,terminal_output +786,837116,"TERMINAL",0,0,"2554",,terminal_output +787,838087,"TERMINAL",0,0,"4665",,terminal_output +788,839166,"TERMINAL",0,0,"5776",,terminal_output +789,840188,"TERMINAL",0,0,"6887",,terminal_output +790,841315,"TERMINAL",0,0,"7998",,terminal_output +791,842388,"TERMINAL",0,0,"820209",,terminal_output +792,843465,"TERMINAL",0,0,"91120",,terminal_output +793,844366,"TERMINAL",0,0,"50221",,terminal_output +794,845431,"TERMINAL",0,0,"1332",,terminal_output +795,846437,"TERMINAL",0,0,"2443",,terminal_output +796,847565,"TERMINAL",0,0,"3554",,terminal_output +797,848586,"TERMINAL",0,0,"4665",,terminal_output +798,849615,"TERMINAL",0,0,"5776",,terminal_output +799,850627,"TERMINAL",0,0,"6887",,terminal_output +800,851692,"TERMINAL",0,0,"7998",,terminal_output +801,852731,"TERMINAL",0,0,"830309",,terminal_output +802,853807,"TERMINAL",0,0,"91130",,terminal_output +803,854832,"TERMINAL",0,0,"4:00221",,terminal_output +804,856061,"TERMINAL",0,0,"1332",,terminal_output +805,857086,"TERMINAL",0,0,"2443",,terminal_output +806,857935,"TERMINAL",0,0,"3554",,terminal_output +807,859134,"TERMINAL",0,0,"4665",,terminal_output +808,860157,"TERMINAL",0,0,"5887",,terminal_output +809,861181,"TERMINAL",0,0,"7998",,terminal_output +810,862121,"TERMINAL",0,0,"840409",,terminal_output +811,863288,"TERMINAL",0,0,"91140",,terminal_output +812,864211,"TERMINAL",0,0,"10221",,terminal_output +813,865255,"TERMINAL",0,0,"1332",,terminal_output +814,866435,"TERMINAL",0,0,"2443",,terminal_output +815,867341,"TERMINAL",0,0,"3554",,terminal_output +816,868457,"TERMINAL",0,0,"4665",,terminal_output +817,869494,"TERMINAL",0,0,"5776",,terminal_output +818,870602,"TERMINAL",0,0,"6887",,terminal_output +819,871510,"TERMINAL",0,0,"7998",,terminal_output +820,872581,"TERMINAL",0,0,"850509",,terminal_output +821,873605,"TERMINAL",0,0,"91150",,terminal_output +822,874698,"TERMINAL",0,0,"20221",,terminal_output +823,875684,"TERMINAL",0,0,"1332",,terminal_output +824,876849,"TERMINAL",0,0,"2443",,terminal_output +825,877769,"TERMINAL",0,0,"3554",,terminal_output +826,878813,"TERMINAL",0,0,"4665",,terminal_output +827,879919,"TERMINAL",0,0,"5776",,terminal_output +828,881155,"TERMINAL",0,0,"6887",,terminal_output +829,881968,"TERMINAL",0,0,"7998",,terminal_output +830,883197,"TERMINAL",0,0,"830:0030:009",,terminal_output +831,884119,"TERMINAL",0,0,"9229:01",,terminal_output +832,885141,"TERMINAL",0,0,"31332",,terminal_output +833,886270,"TERMINAL",0,0,"2443",,terminal_output +834,887154,"TERMINAL",0,0,"3554",,terminal_output +835,888318,"TERMINAL",0,0,"4665",,terminal_output +836,889254,"TERMINAL",0,0,"5776",,terminal_output +837,890364,"TERMINAL",0,0,"6887",,terminal_output +838,891420,"TERMINAL",0,0,"7998",,terminal_output +839,892412,"TERMINAL",0,0,"810109",,terminal_output +840,893437,"TERMINAL",0,0,"91110",,terminal_output +841,894462,"TERMINAL",0,0,"40221",,terminal_output +842,895488,"TERMINAL",0,0,"1332",,terminal_output +843,896519,"TERMINAL",0,0,"2443",,terminal_output +844,897565,"TERMINAL",0,0,"3554",,terminal_output +845,898605,"TERMINAL",0,0,"4665",,terminal_output +846,899634,"TERMINAL",0,0,"5776",,terminal_output +847,900674,"TERMINAL",0,0,"6887",,terminal_output +848,901715,"TERMINAL",0,0,"7998",,terminal_output +849,902858,"TERMINAL",0,0,"820209",,terminal_output +850,903983,"TERMINAL",0,0,"91120",,terminal_output +851,904827,"TERMINAL",0,0,"50221",,terminal_output +852,905873,"TERMINAL",0,0,"1332",,terminal_output +853,907056,"TERMINAL",0,0,"2443",,terminal_output +854,907978,"TERMINAL",0,0,"3554",,terminal_output +855,909049,"TERMINAL",0,0,"4665",,terminal_output +856,910235,"TERMINAL",0,0,"5887",,terminal_output +857,911253,"TERMINAL",0,0,"7998",,terminal_output +858,912176,"TERMINAL",0,0,"830309",,terminal_output +859,913181,"TERMINAL",0,0,"91130",,terminal_output +860,914221,"TERMINAL",0,0,"5:00221",,terminal_output +861,915453,"TERMINAL",0,0,"1332",,terminal_output +862,916517,"TERMINAL",0,0,"2443",,terminal_output +863,917361,"TERMINAL",0,0,"3554",,terminal_output +864,918423,"TERMINAL",0,0,"4665",,terminal_output +865,919453,"TERMINAL",0,0,"5776",,terminal_output +866,920580,"TERMINAL",0,0,"6887",,terminal_output +867,921603,"TERMINAL",0,0,"7998",,terminal_output +868,922588,"TERMINAL",0,0,"840409",,terminal_output +869,923623,"TERMINAL",0,0,"91140",,terminal_output +870,924661,"TERMINAL",0,0,"10221",,terminal_output +871,925796,"TERMINAL",0,0,"1332",,terminal_output +872,926740,"TERMINAL",0,0,"2443",,terminal_output +873,927800,"TERMINAL",0,0,"3554",,terminal_output +874,928877,"TERMINAL",0,0,"4665",,terminal_output +875,929893,"TERMINAL",0,0,"5776",,terminal_output +876,931056,"TERMINAL",0,0,"6887",,terminal_output +877,931983,"TERMINAL",0,0,"7998",,terminal_output +878,933069,"TERMINAL",0,0,"850509",,terminal_output +879,934237,"TERMINAL",0,0,"92251",,terminal_output +880,935219,"TERMINAL",0,0,"21332",,terminal_output +881,936133,"TERMINAL",0,0,"2443",,terminal_output +882,937161,"TERMINAL",0,0,"3554",,terminal_output +883,938328,"TERMINAL",0,0,"4665",,terminal_output +884,939268,"TERMINAL",0,0,"5776",,terminal_output +885,940340,"TERMINAL",0,0,"6887",,terminal_output +886,941380,"TERMINAL",0,0,"7998",,terminal_output +887,942488,"TERMINAL",0,0,"81:001:009",,terminal_output +888,943524,"TERMINAL",0,0,"91130:00",,terminal_output +889,944444,"TERMINAL",0,0,"30221",,terminal_output +890,945490,"TERMINAL",0,0,"1332",,terminal_output +891,946585,"TERMINAL",0,0,"2443",,terminal_output +892,947609,"TERMINAL",0,0,"3554",,terminal_output +893,948639,"TERMINAL",0,0,"4665",,terminal_output +894,949758,"TERMINAL",0,0,"5776",,terminal_output +895,950694,"TERMINAL",0,0,"6887",,terminal_output +896,951734,"TERMINAL",0,0,"7998",,terminal_output +897,952934,"TERMINAL",0,0,"810109",,terminal_output +898,953821,"TERMINAL",0,0,"91110",,terminal_output +899,954865,"TERMINAL",0,0,"40221",,terminal_output +900,955906,"TERMINAL",0,0,"1332",,terminal_output +901,957029,"TERMINAL",0,0,"2443",,terminal_output +902,958005,"TERMINAL",0,0,"3554",,terminal_output +903,959033,"TERMINAL",0,0,"4776",,terminal_output +904,960079,"TERMINAL",0,0,"6887",,terminal_output +905,961118,"TERMINAL",0,0,"7998",,terminal_output +906,962151,"TERMINAL",0,0,"820209",,terminal_output +907,963309,"TERMINAL",0,0,"91120",,terminal_output +908,964281,"TERMINAL",0,0,"50221",,terminal_output +909,965274,"TERMINAL",0,0,"1332",,terminal_output +910,966481,"TERMINAL",0,0,"2443",,terminal_output +911,967484,"TERMINAL",0,0,"3554",,terminal_output +912,968518,"TERMINAL",0,0,"4665",,terminal_output +913,969541,"TERMINAL",0,0,"5776",,terminal_output +914,970573,"TERMINAL",0,0,"6887",,terminal_output +915,971529,"TERMINAL",0,0,"7998",,terminal_output +916,972573,"TERMINAL",0,0,"830309",,terminal_output +917,973614,"TERMINAL",0,0,"91130",,terminal_output +918,974746,"TERMINAL",0,0,"6:00221",,terminal_output +919,975816,"TERMINAL",0,0,"1332",,terminal_output +920,976793,"TERMINAL",0,0,"2443",,terminal_output +921,977795,"TERMINAL",0,0,"3554",,terminal_output +922,978838,"TERMINAL",0,0,"4665",,terminal_output +923,979971,"TERMINAL",0,0,"5776",,terminal_output +924,981619,"TERMINAL",0,0,"6998",,terminal_output +925,982666,"TERMINAL",0,0,"840409",,terminal_output +926,983751,"TERMINAL",0,0,"91140",,terminal_output +927,984763,"TERMINAL",0,0,"10221",,terminal_output +928,985907,"TERMINAL",0,0,"1332",,terminal_output +929,986930,"TERMINAL",0,0,"2443",,terminal_output +930,987953,"TERMINAL",0,0,"3554",,terminal_output +931,989079,"TERMINAL",0,0,"4665",,terminal_output +932,990103,"TERMINAL",0,0,"5776",,terminal_output +933,991127,"TERMINAL",0,0,"6998",,terminal_output +934,992050,"TERMINAL",0,0,"850509",,terminal_output +935,993175,"TERMINAL",0,0,"91150",,terminal_output +936,994226,"TERMINAL",0,0,"20221",,terminal_output +937,995327,"TERMINAL",0,0,"1332",,terminal_output +938,996234,"TERMINAL",0,0,"2443",,terminal_output +939,997271,"TERMINAL",0,0,"3554",,terminal_output +940,998530,"TERMINAL",0,0,"4665",,terminal_output +941,999368,"TERMINAL",0,0,"5776",,terminal_output +942,1000492,"TERMINAL",0,0,"6887",,terminal_output +943,1001676,"TERMINAL",0,0,"7998",,terminal_output +944,1002519,"TERMINAL",0,0,"82:002:009",,terminal_output +945,1003623,"TERMINAL",0,0,"9111:00",,terminal_output +946,1004609,"TERMINAL",0,0,"30221",,terminal_output +947,1005670,"TERMINAL",0,0,"1332",,terminal_output +948,1006705,"TERMINAL",0,0,"2443",,terminal_output +949,1007749,"TERMINAL",0,0,"3554",,terminal_output +950,1008849,"TERMINAL",0,0,"4665",,terminal_output +951,1009868,"TERMINAL",0,0,"5776",,terminal_output +952,1011096,"TERMINAL",0,0,"6887",,terminal_output +953,1011962,"TERMINAL",0,0,"7998",,terminal_output +954,1013041,"TERMINAL",0,0,"8111110",,terminal_output +955,1014244,"TERMINAL",0,0,"40221",,terminal_output +956,1015238,"TERMINAL",0,0,"1332",,terminal_output +957,1016161,"TERMINAL",0,0,"2443",,terminal_output +958,1017343,"TERMINAL",0,0,"3554",,terminal_output +959,1018263,"TERMINAL",0,0,"4665",,terminal_output +960,1019287,"TERMINAL",0,0,"5776",,terminal_output +961,1020349,"TERMINAL",0,0,"6887",,terminal_output +962,1021398,"TERMINAL",0,0,"7998",,terminal_output +963,1022423,"TERMINAL",0,0,"820209",,terminal_output +964,1023554,"TERMINAL",0,0,"91120",,terminal_output +965,1024513,"TERMINAL",0,0,"50221",,terminal_output +966,1025537,"TERMINAL",0,0,"1332",,terminal_output +967,1026664,"TERMINAL",0,0,"2443",,terminal_output +968,1027618,"TERMINAL",0,0,"3554",,terminal_output +969,1028684,"TERMINAL",0,0,"4665",,terminal_output +970,1029733,"TERMINAL",0,0,"5776",,terminal_output +971,1030747,"TERMINAL",0,0,"6887",,terminal_output +972,1031986,"TERMINAL",0,0,"7998",,terminal_output +973,1033010,"TERMINAL",0,0,"830309",,terminal_output +974,1033943,"TERMINAL",0,0,"91130",,terminal_output +975,1035059,"TERMINAL",0,0,"7:00221",,terminal_output +976,1036086,"TERMINAL",0,0,"1332",,terminal_output +977,1037055,"TERMINAL",0,0,"2443",,terminal_output +978,1038131,"TERMINAL",0,0,"3665",,terminal_output +979,1039209,"TERMINAL",0,0,"5776",,terminal_output +980,1040178,"TERMINAL",0,0,"6887",,terminal_output +981,1041203,"TERMINAL",0,0,"7998",,terminal_output +982,1042229,"TERMINAL",0,0,"840409",,terminal_output +983,1043251,"TERMINAL",0,0,"91140",,terminal_output +984,1044261,"TERMINAL",0,0,"10221",,terminal_output +985,1045398,"TERMINAL",0,0,"1332",,terminal_output +986,1046630,"TERMINAL",0,0,"2443",,terminal_output +987,1047505,"TERMINAL",0,0,"3554",,terminal_output +988,1048501,"TERMINAL",0,0,"4665",,terminal_output +989,1049495,"TERMINAL",0,0,"5776",,terminal_output +990,1050534,"TERMINAL",0,0,"6887",,terminal_output +991,1051550,"TERMINAL",0,0,"7998",,terminal_output +992,1052619,"TERMINAL",0,0,"850509",,terminal_output +993,1053631,"TERMINAL",0,0,"91150",,terminal_output +994,1054718,"TERMINAL",0,0,"20221",,terminal_output +995,1055744,"TERMINAL",0,0,"1332",,terminal_output +996,1056766,"TERMINAL",0,0,"2443",,terminal_output +997,1058000,"TERMINAL",0,0,"3554",,terminal_output +998,1058849,"TERMINAL",0,0,"4665",,terminal_output +999,1059889,"TERMINAL",0,0,"5776",,terminal_output +1000,1061012,"TERMINAL",0,0,"6887",,terminal_output +1001,1061991,"TERMINAL",0,0,"7998",,terminal_output +1002,1063115,"TERMINAL",0,0,"83:013:012:00",,terminal_output +1003,1064141,"TERMINAL",0,0,"30221",,terminal_output +1004,1065111,"TERMINAL",0,0,"1332",,terminal_output +1005,1066290,"TERMINAL",0,0,"2443",,terminal_output +1006,1067215,"TERMINAL",0,0,"3554",,terminal_output +1007,1068338,"TERMINAL",0,0,"4665",,terminal_output +1008,1069319,"TERMINAL",0,0,"5776",,terminal_output +1009,1070387,"TERMINAL",0,0,"6887",,terminal_output +1010,1071412,"TERMINAL",0,0,"7998",,terminal_output +1011,1072536,"TERMINAL",0,0,"810109",,terminal_output +1012,1073561,"TERMINAL",0,0,"91110",,terminal_output +1013,1074587,"TERMINAL",0,0,"40221",,terminal_output +1014,1075609,"TERMINAL",0,0,"1332",,terminal_output +1015,1076634,"TERMINAL",0,0,"2443",,terminal_output +1016,1077680,"TERMINAL",0,0,"3554",,terminal_output +1017,1078707,"TERMINAL",0,0,"4665",,terminal_output +1018,1079752,"TERMINAL",0,0,"5776",,terminal_output +1019,1080782,"TERMINAL",0,0,"6887",,terminal_output +1020,1081838,"TERMINAL",0,0,"7998",,terminal_output +1021,1082980,"TERMINAL",0,0,"820209",,terminal_output +1022,1084024,"TERMINAL",0,0,"91120",,terminal_output +1023,1085028,"TERMINAL",0,0,"50221",,terminal_output +1024,1085984,"TERMINAL",0,0,"1332",,terminal_output +1025,1087022,"TERMINAL",0,0,"2554",,terminal_output +1026,1088101,"TERMINAL",0,0,"4665",,terminal_output +1027,1089171,"TERMINAL",0,0,"5776",,terminal_output +1028,1090195,"TERMINAL",0,0,"6887",,terminal_output +1029,1091381,"TERMINAL",0,0,"7998",,terminal_output +1030,1092300,"TERMINAL",0,0,"830309",,terminal_output +1031,1093324,"TERMINAL",0,0,"91130",,terminal_output +1032,1094334,"TERMINAL",0,0,"8:00221",,terminal_output +1033,1095400,"TERMINAL",0,0,"1332",,terminal_output +1034,1096528,"TERMINAL",0,0,"2443",,terminal_output +1035,1097829,"TERMINAL",0,0,"3554",,terminal_output +1036,1098548,"TERMINAL",0,0,"4665",,terminal_output +1037,1099672,"TERMINAL",0,0,"5776",,terminal_output +1038,1100698,"TERMINAL",0,0,"6887",,terminal_output +1039,1101853,"TERMINAL",0,0,"7998",,terminal_output +1040,1103066,"TERMINAL",0,0,"840409",,terminal_output +1041,1103970,"TERMINAL",0,0,"91140",,terminal_output +1042,1104965,"TERMINAL",0,0,"10221",,terminal_output +1043,1106227,"TERMINAL",0,0,"1443",,terminal_output +1044,1107053,"TERMINAL",0,0,"3554",,terminal_output +1045,1108175,"TERMINAL",0,0,"4665",,terminal_output +1046,1109252,"TERMINAL",0,0,"5776",,terminal_output +1047,1110169,"TERMINAL",0,0,"6887",,terminal_output +1048,1111346,"TERMINAL",0,0,"7998",,terminal_output +1049,1112370,"TERMINAL",0,0,"850509",,terminal_output +1050,1113527,"TERMINAL",0,0,"91150",,terminal_output +1051,1114346,"TERMINAL",0,0,"20221",,terminal_output +1052,1115576,"TERMINAL",0,0,"1332",,terminal_output +1053,1116569,"TERMINAL",0,0,"2443",,terminal_output +1054,1117528,"TERMINAL",0,0,"3554",,terminal_output +1055,1118545,"TERMINAL",0,0,"4665",,terminal_output +1056,1119642,"TERMINAL",0,0,"5776",,terminal_output +1057,1120592,"TERMINAL",0,0,"6887",,terminal_output +1058,1121693,"TERMINAL",0,0,"7998",,terminal_output +1059,1122715,"TERMINAL",0,0,"84:004:009",,terminal_output +1060,1123741,"TERMINAL",0,0,"9113:00",,terminal_output +1061,1124965,"TERMINAL",0,0,"30221",,terminal_output +1062,1125813,"TERMINAL",0,0,"1332",,terminal_output +1063,1126912,"TERMINAL",0,0,"2443",,terminal_output +1064,1127937,"TERMINAL",0,0,"3554",,terminal_output +1065,1129061,"TERMINAL",0,0,"4665",,terminal_output +1066,1130087,"TERMINAL",0,0,"5776",,terminal_output +1067,1131111,"TERMINAL",0,0,"6998",,terminal_output +1068,1132077,"TERMINAL",0,0,"810109",,terminal_output +1069,1133157,"TERMINAL",0,0,"91110",,terminal_output +1070,1134182,"TERMINAL",0,0,"40221",,terminal_output +1071,1135314,"TERMINAL",0,0,"1332",,terminal_output +1072,1136435,"TERMINAL",0,0,"2443",,terminal_output +1073,1137356,"TERMINAL",0,0,"3554",,terminal_output +1074,1138365,"TERMINAL",0,0,"4665",,terminal_output +1075,1139536,"TERMINAL",0,0,"5776",,terminal_output +1076,1140553,"TERMINAL",0,0,"6887",,terminal_output +1077,1141605,"TERMINAL",0,0,"7998",,terminal_output +1078,1142580,"TERMINAL",0,0,"820209",,terminal_output +1079,1143614,"TERMINAL",0,0,"91120",,terminal_output +1080,1144628,"TERMINAL",0,0,"50221",,terminal_output +1081,1145760,"TERMINAL",0,0,"1332",,terminal_output +1082,1146703,"TERMINAL",0,0,"2443",,terminal_output +1083,1147767,"TERMINAL",0,0,"3554",,terminal_output +1084,1148825,"TERMINAL",0,0,"4665",,terminal_output +1085,1150054,"TERMINAL",0,0,"5776",,terminal_output +1086,1151078,"TERMINAL",0,0,"6887",,terminal_output +1087,1152000,"TERMINAL",0,0,"7998",,terminal_output +1088,1153126,"TERMINAL",0,0,"830309",,terminal_output +1089,1154151,"TERMINAL",0,0,"91130",,terminal_output +1090,1155175,"TERMINAL",0,0,"9:00332",,terminal_output +1091,1156200,"TERMINAL",0,0,"2443",,terminal_output +1092,1157148,"TERMINAL",0,0,"3554",,terminal_output +1093,1158246,"TERMINAL",0,0,"4665",,terminal_output +1094,1159223,"TERMINAL",0,0,"5776",,terminal_output +1095,1160294,"TERMINAL",0,0,"6887",,terminal_output +1096,1161317,"TERMINAL",0,0,"7998",,terminal_output +1097,1162350,"TERMINAL",0,0,"840409",,terminal_output +1098,1163386,"TERMINAL",0,0,"91140",,terminal_output +1099,1164491,"TERMINAL",0,0,"10221",,terminal_output +1100,1165617,"TERMINAL",0,0,"1332",,terminal_output +1101,1166570,"TERMINAL",0,0,"2443",,terminal_output +1102,1167650,"TERMINAL",0,0,"3554",,terminal_output +1103,1168693,"TERMINAL",0,0,"4665",,terminal_output +1104,1169655,"TERMINAL",0,0,"5776",,terminal_output +1105,1170693,"TERMINAL",0,0,"6887",,terminal_output +1106,1171783,"TERMINAL",0,0,"7998",,terminal_output +1107,1173447,"TERMINAL",0,0,"8515150",,terminal_output +1108,1174451,"TERMINAL",0,0,"20221",,terminal_output +1109,1175475,"TERMINAL",0,0,"1332",,terminal_output +1110,1176898,"TERMINAL",0,0,"2443",,terminal_output +1111,1177600,"TERMINAL",0,0,"3554",,terminal_output +1112,1178599,"TERMINAL",0,0,"4665",,terminal_output +1113,1179647,"TERMINAL",0,0,"5776",,terminal_output +1114,1180692,"TERMINAL",0,0,"6887",,terminal_output +1115,1181747,"TERMINAL",0,0,"7998",,terminal_output +1116,1182784,"TERMINAL",0,0,"85:005:009",,terminal_output +1117,1183847,"TERMINAL",0,0,"9114:00",,terminal_output +1118,1184877,"TERMINAL",0,0,"30221",,terminal_output +1119,1185923,"TERMINAL",0,0,"1332",,terminal_output +1120,1187168,"TERMINAL",0,0,"2443",,terminal_output +1121,1188148,"TERMINAL",0,0,"3665",,terminal_output +1122,1189089,"TERMINAL",0,0,"5776",,terminal_output +1123,1190196,"TERMINAL",0,0,"6887",,terminal_output +1124,1191200,"TERMINAL",0,0,"7998",,terminal_output +1125,1192227,"TERMINAL",0,0,"810109",,terminal_output +1126,1193267,"TERMINAL",0,0,"91110",,terminal_output +1127,1194291,"TERMINAL",0,0,"40221",,terminal_output +1128,1195387,"TERMINAL",0,0,"1332",,terminal_output +1129,1196577,"TERMINAL",0,0,"2443",,terminal_output +1130,1197567,"TERMINAL",0,0,"3554",,terminal_output +1131,1198480,"TERMINAL",0,0,"4665",,terminal_output +1132,1199615,"TERMINAL",0,0,"5776",,terminal_output +1133,1200744,"TERMINAL",0,0,"6887",,terminal_output +1134,1201664,"TERMINAL",0,0,"7998",,terminal_output +1135,1202669,"TERMINAL",0,0,"820209",,terminal_output +1136,1203716,"TERMINAL",0,0,"91120",,terminal_output +1137,1204749,"TERMINAL",0,0,"50221",,terminal_output +1138,1205799,"TERMINAL",0,0,"1332",,terminal_output +1139,1206834,"TERMINAL",0,0,"2443",,terminal_output +1140,1207885,"TERMINAL",0,0,"3554",,terminal_output +1141,1209037,"TERMINAL",0,0,"4665",,terminal_output +1142,1210060,"TERMINAL",0,0,"5776",,terminal_output +1143,1211083,"TERMINAL",0,0,"6887",,terminal_output +1144,1212153,"TERMINAL",0,0,"730309",,terminal_output +1145,1213237,"TERMINAL",0,0,"91130",,terminal_output +1146,1214198,"TERMINAL",0,0,"20:00221",,terminal_output +1147,1215182,"TERMINAL",0,0,"1332",,terminal_output +1148,1216307,"TERMINAL",0,0,"2443",,terminal_output +1149,1217333,"TERMINAL",0,0,"3554",,terminal_output +1150,1218458,"TERMINAL",0,0,"4665",,terminal_output +1151,1219330,"TERMINAL",0,0,"5776",,terminal_output +1152,1220405,"TERMINAL",0,0,"6887",,terminal_output +1153,1221630,"TERMINAL",0,0,"7998",,terminal_output +1154,1222586,"TERMINAL",0,0,"840409",,terminal_output +1155,1223679,"TERMINAL",0,0,"91140",,terminal_output +1156,1224605,"TERMINAL",0,0,"10221",,terminal_output +1157,1225617,"TERMINAL",0,0,"1332",,terminal_output +1158,1226753,"TERMINAL",0,0,"2443",,terminal_output +1159,1227694,"TERMINAL",0,0,"3554",,terminal_output +1160,1228741,"TERMINAL",0,0,"4665",,terminal_output +1161,1229779,"TERMINAL",0,0,"5776",,terminal_output +1162,1230847,"TERMINAL",0,0,"6887",,terminal_output +1163,1231874,"TERMINAL",0,0,"7998",,terminal_output +1164,1232942,"TERMINAL",0,0,"850509",,terminal_output +1165,1234026,"TERMINAL",0,0,"91150",,terminal_output +1166,1235148,"TERMINAL",0,0,"20221",,terminal_output +1167,1236172,"TERMINAL",0,0,"1443",,terminal_output +1168,1237093,"TERMINAL",0,0,"3554",,terminal_output +1169,1238323,"TERMINAL",0,0,"4665",,terminal_output +1170,1239230,"TERMINAL",0,0,"5776",,terminal_output +1171,1240236,"TERMINAL",0,0,"6887",,terminal_output +1172,1241395,"TERMINAL",0,0,"7998",,terminal_output +1173,1242418,"TERMINAL",0,0,"86:006:009",,terminal_output +1174,1243352,"TERMINAL",0,0,"9115:00",,terminal_output +1175,1244603,"TERMINAL",0,0,"30221",,terminal_output +1176,1245595,"TERMINAL",0,0,"1332",,terminal_output +1177,1246516,"TERMINAL",0,0,"2443",,terminal_output +1178,1247641,"TERMINAL",0,0,"3554",,terminal_output +1179,1248632,"TERMINAL",0,0,"4665",,terminal_output +1180,1249638,"TERMINAL",0,0,"5776",,terminal_output +1181,1250677,"TERMINAL",0,0,"6887",,terminal_output +1182,1251719,"TERMINAL",0,0,"7998",,terminal_output +1183,1252769,"TERMINAL",0,0,"810109",,terminal_output +1184,1253812,"TERMINAL",0,0,"91110",,terminal_output +1185,1254913,"TERMINAL",0,0,"40221",,terminal_output +1186,1255896,"TERMINAL",0,0,"1332",,terminal_output +1187,1257110,"TERMINAL",0,0,"2443",,terminal_output +1188,1258087,"TERMINAL",0,0,"3554",,terminal_output +1189,1259027,"TERMINAL",0,0,"4776",,terminal_output +1190,1260138,"TERMINAL",0,0,"6887",,terminal_output +1191,1261158,"TERMINAL",0,0,"7998",,terminal_output +1192,1262182,"TERMINAL",0,0,"820209",,terminal_output +1193,1263309,"TERMINAL",0,0,"91120",,terminal_output +1194,1264277,"TERMINAL",0,0,"50221",,terminal_output +1195,1265357,"TERMINAL",0,0,"1332",,terminal_output +1196,1266346,"TERMINAL",0,0,"2443",,terminal_output +1197,1267405,"TERMINAL",0,0,"3554",,terminal_output +1198,1268443,"TERMINAL",0,0,"4665",,terminal_output +1199,1269658,"TERMINAL",0,0,"5776",,terminal_output +1200,1270605,"TERMINAL",0,0,"6887",,terminal_output +1201,1271643,"TERMINAL",0,0,"7998",,terminal_output +1202,1272626,"TERMINAL",0,0,"830309",,terminal_output +1203,1273755,"TERMINAL",0,0,"91130",,terminal_output +1204,1274781,"TERMINAL",0,0,"1:00221",,terminal_output +1205,1275748,"TERMINAL",0,0,"1332",,terminal_output +1206,1276791,"TERMINAL",0,0,"2443",,terminal_output +1207,1277837,"TERMINAL",0,0,"3554",,terminal_output +1208,1278876,"TERMINAL",0,0,"4665",,terminal_output +1209,1279922,"TERMINAL",0,0,"5776",,terminal_output +1210,1280971,"TERMINAL",0,0,"6887",,terminal_output +1211,1282299,"TERMINAL",0,0,"7998",,terminal_output +1212,1283188,"TERMINAL",0,0,"8414140",,terminal_output +1213,1284082,"TERMINAL",0,0,"10221",,terminal_output +1214,1285137,"TERMINAL",0,0,"1332",,terminal_output +1215,1286247,"TERMINAL",0,0,"2443",,terminal_output +1216,1287269,"TERMINAL",0,0,"3554",,terminal_output +1217,1288297,"TERMINAL",0,0,"4665",,terminal_output +1218,1289295,"TERMINAL",0,0,"5776",,terminal_output +1219,1290504,"TERMINAL",0,0,"6887",,terminal_output +1220,1291563,"TERMINAL",0,0,"7998",,terminal_output +1221,1292493,"TERMINAL",0,0,"850509",,terminal_output +1222,1293661,"TERMINAL",0,0,"91150",,terminal_output +1223,1294656,"TERMINAL",0,0,"20221",,terminal_output +1224,1295771,"TERMINAL",0,0,"1332",,terminal_output +1225,1296694,"TERMINAL",0,0,"2443",,terminal_output +1226,1297820,"TERMINAL",0,0,"3554",,terminal_output +1227,1298716,"TERMINAL",0,0,"4665",,terminal_output +1228,1299765,"TERMINAL",0,0,"5776",,terminal_output +1229,1300801,"TERMINAL",0,0,"6887",,terminal_output +1230,1301869,"TERMINAL",0,0,"7998",,terminal_output +1231,1302923,"TERMINAL",0,0,"87:007:009",,terminal_output +1232,1304211,"TERMINAL",0,0,"9116:00",,terminal_output +1233,1305025,"TERMINAL",0,0,"30332",,terminal_output +1234,1306066,"TERMINAL",0,0,"2443",,terminal_output +1235,1307137,"TERMINAL",0,0,"3554",,terminal_output +1236,1308161,"TERMINAL",0,0,"4665",,terminal_output +1237,1309223,"TERMINAL",0,0,"5776",,terminal_output +1238,1310311,"TERMINAL",0,0,"6887",,terminal_output +1239,1311335,"TERMINAL",0,0,"7998",,terminal_output +1240,1312388,"TERMINAL",0,0,"810109",,terminal_output +1241,1313371,"TERMINAL",0,0,"91110",,terminal_output +1242,1314645,"TERMINAL",0,0,"40221",,terminal_output +1243,1315559,"TERMINAL",0,0,"1332",,terminal_output +1244,1316557,"TERMINAL",0,0,"2443",,terminal_output +1245,1317634,"TERMINAL",0,0,"3554",,terminal_output +1246,1318643,"TERMINAL",0,0,"4665",,terminal_output +1247,1319667,"TERMINAL",0,0,"5776",,terminal_output +1248,1320655,"TERMINAL",0,0,"6887",,terminal_output +1249,1321709,"TERMINAL",0,0,"7998",,terminal_output +1250,1322758,"TERMINAL",0,0,"820209",,terminal_output +1251,1323799,"TERMINAL",0,0,"91120",,terminal_output +1252,1324846,"TERMINAL",0,0,"50221",,terminal_output +1253,1325893,"TERMINAL",0,0,"1332",,terminal_output +1254,1326934,"TERMINAL",0,0,"2443",,terminal_output +1255,1328026,"TERMINAL",0,0,"3554",,terminal_output +1256,1329058,"TERMINAL",0,0,"4665",,terminal_output +1257,1330183,"TERMINAL",0,0,"5887",,terminal_output +1258,1331250,"TERMINAL",0,0,"7998",,terminal_output +1259,1332328,"TERMINAL",0,0,"830309",,terminal_output +1260,1333275,"TERMINAL",0,0,"91130",,terminal_output +1261,1334237,"TERMINAL",0,0,"2:00221",,terminal_output +1262,1335297,"TERMINAL",0,0,"1332",,terminal_output +1263,1336325,"TERMINAL",0,0,"2443",,terminal_output +1264,1337549,"TERMINAL",0,0,"3554",,terminal_output +1265,1338573,"TERMINAL",0,0,"4665",,terminal_output +1266,1339453,"TERMINAL",0,0,"5776",,terminal_output +1267,1340522,"TERMINAL",0,0,"6887",,terminal_output +1268,1341543,"TERMINAL",0,0,"7998",,terminal_output +1269,1342670,"TERMINAL",0,0,"840409",,terminal_output +1270,1343694,"TERMINAL",0,0,"91140",,terminal_output +1271,1344721,"TERMINAL",0,0,"10221",,terminal_output +1272,1345693,"TERMINAL",0,0,"1332",,terminal_output +1273,1346770,"TERMINAL",0,0,"2443",,terminal_output +1274,1347781,"TERMINAL",0,0,"3554",,terminal_output +1275,1348818,"TERMINAL",0,0,"4665",,terminal_output +1276,1350052,"TERMINAL",0,0,"5776",,terminal_output +1277,1351068,"TERMINAL",0,0,"6887",,terminal_output +1278,1351970,"TERMINAL",0,0,"7998",,terminal_output +1279,1352994,"TERMINAL",0,0,"850509",,terminal_output +1280,1354023,"TERMINAL",0,0,"92251",,terminal_output +1281,1355051,"TERMINAL",0,0,"21332",,terminal_output +1282,1356192,"TERMINAL",0,0,"2443",,terminal_output +1283,1357133,"TERMINAL",0,0,"3554",,terminal_output +1284,1358250,"TERMINAL",0,0,"4665",,terminal_output +1285,1359219,"TERMINAL",0,0,"5776",,terminal_output +1286,1360257,"TERMINAL",0,0,"6887",,terminal_output +1287,1361299,"TERMINAL",0,0,"7998",,terminal_output +1288,1362435,"TERMINAL",0,0,"88:008:009",,terminal_output +1289,1363565,"TERMINAL",0,0,"9117:00",,terminal_output +1290,1364481,"TERMINAL",0,0,"30221",,terminal_output +1291,1365608,"TERMINAL",0,0,"1332",,terminal_output +1292,1366556,"TERMINAL",0,0,"2443",,terminal_output +1293,1367656,"TERMINAL",0,0,"3554",,terminal_output +1294,1368677,"TERMINAL",0,0,"4665",,terminal_output +1295,1369743,"TERMINAL",0,0,"5776",,terminal_output +1296,1370756,"TERMINAL",0,0,"6887",,terminal_output +1297,1371709,"TERMINAL",0,0,"7998",,terminal_output +1298,1372757,"TERMINAL",0,0,"810109",,terminal_output +1299,1373800,"TERMINAL",0,0,"91110",,terminal_output +1300,1374840,"TERMINAL",0,0,"40221",,terminal_output +1301,1375875,"TERMINAL",0,0,"1332",,terminal_output +1302,1376918,"TERMINAL",0,0,"2443",,terminal_output +1303,1378101,"TERMINAL",0,0,"3554",,terminal_output +1304,1379125,"TERMINAL",0,0,"4665",,terminal_output +1305,1380149,"TERMINAL",0,0,"6887",,terminal_output +1306,1381271,"TERMINAL",0,0,"7998",,terminal_output +1307,1382198,"TERMINAL",0,0,"820209",,terminal_output +1308,1383223,"TERMINAL",0,0,"91120",,terminal_output +1309,1384253,"TERMINAL",0,0,"50221",,terminal_output +1310,1385371,"TERMINAL",0,0,"1332",,terminal_output +1311,1386396,"TERMINAL",0,0,"2443",,terminal_output +1312,1387424,"TERMINAL",0,0,"3554",,terminal_output +1313,1388449,"TERMINAL",0,0,"4665",,terminal_output +1314,1389467,"TERMINAL",0,0,"5776",,terminal_output +1315,1390498,"TERMINAL",0,0,"6887",,terminal_output +1316,1391722,"TERMINAL",0,0,"7998",,terminal_output +1317,1392747,"TERMINAL",0,0,"830309",,terminal_output +1318,1393768,"TERMINAL",0,0,"91130",,terminal_output +1319,1394681,"TERMINAL",0,0,"3:00221",,terminal_output +1320,1395692,"TERMINAL",0,0,"1332",,terminal_output +1321,1396741,"TERMINAL",0,0,"2443",,terminal_output +1322,1397798,"TERMINAL",0,0,"3554",,terminal_output +1323,1398824,"TERMINAL",0,0,"4665",,terminal_output +1324,1399959,"TERMINAL",0,0,"5776",,terminal_output +1325,1400899,"TERMINAL",0,0,"6887",,terminal_output +1326,1401971,"TERMINAL",0,0,"7998",,terminal_output +1327,1402972,"TERMINAL",0,0,"840409",,terminal_output +1328,1404013,"TERMINAL",0,0,"92241",,terminal_output +1329,1405055,"TERMINAL",0,0,"11332",,terminal_output +1330,1406158,"TERMINAL",0,0,"2443",,terminal_output +1331,1407284,"TERMINAL",0,0,"3554",,terminal_output +1332,1408310,"TERMINAL",0,0,"4665",,terminal_output +1333,1409228,"TERMINAL",0,0,"5776",,terminal_output +1334,1410357,"TERMINAL",0,0,"6887",,terminal_output +1335,1411483,"TERMINAL",0,0,"7998",,terminal_output +1336,1412508,"TERMINAL",0,0,"850509",,terminal_output +1337,1413680,"TERMINAL",0,0,"91150",,terminal_output +1338,1414463,"TERMINAL",0,0,"20221",,terminal_output +1339,1415727,"TERMINAL",0,0,"1332",,terminal_output +1340,1416606,"TERMINAL",0,0,"2443",,terminal_output +1341,1417630,"TERMINAL",0,0,"3554",,terminal_output +1342,1418890,"TERMINAL",0,0,"4665",,terminal_output +1343,1419688,"TERMINAL",0,0,"5776",,terminal_output +1344,1420737,"TERMINAL",0,0,"6887",,terminal_output +1345,1421770,"TERMINAL",0,0,"7998",,terminal_output +1346,1422852,"TERMINAL",0,0,"89:009:009",,terminal_output +1347,1423874,"TERMINAL",0,0,"9118:00",,terminal_output +1348,1424899,"TERMINAL",0,0,"30221",,terminal_output +1349,1426023,"TERMINAL",0,0,"1332",,terminal_output +1350,1426990,"TERMINAL",0,0,"2443",,terminal_output +1351,1428178,"TERMINAL",0,0,"3665",,terminal_output +1352,1429199,"TERMINAL",0,0,"5776",,terminal_output +1353,1430121,"TERMINAL",0,0,"6887",,terminal_output +1354,1431246,"TERMINAL",0,0,"7998",,terminal_output +1355,1432271,"TERMINAL",0,0,"810109",,terminal_output +1356,1433295,"TERMINAL",0,0,"91110",,terminal_output +1357,1434276,"TERMINAL",0,0,"40221",,terminal_output +1358,1435345,"TERMINAL",0,0,"1332",,terminal_output +1359,1436377,"TERMINAL",0,0,"2443",,terminal_output +1360,1437494,"TERMINAL",0,0,"3554",,terminal_output +1361,1438460,"TERMINAL",0,0,"4665",,terminal_output +1362,1439697,"TERMINAL",0,0,"5776",,terminal_output +1363,1440770,"TERMINAL",0,0,"6887",,terminal_output +1364,1441693,"TERMINAL",0,0,"7998",,terminal_output +1365,1442761,"TERMINAL",0,0,"820209",,terminal_output +1366,1443847,"TERMINAL",0,0,"91120",,terminal_output +1367,1444773,"TERMINAL",0,0,"50221",,terminal_output +1368,1445814,"TERMINAL",0,0,"1332",,terminal_output +1369,1446860,"TERMINAL",0,0,"2443",,terminal_output +1370,1447918,"TERMINAL",0,0,"3554",,terminal_output +1371,1448962,"TERMINAL",0,0,"4665",,terminal_output +1372,1449995,"TERMINAL",0,0,"5776",,terminal_output +1373,1451111,"TERMINAL",0,0,"6998",,terminal_output +1374,1452112,"TERMINAL",0,0,"830309",,terminal_output +1375,1453137,"TERMINAL",0,0,"91130",,terminal_output +1376,1454199,"TERMINAL",0,0,"4:00221",,terminal_output +1377,1455401,"TERMINAL",0,0,"1332",,terminal_output +1378,1456341,"TERMINAL",0,0,"2443",,terminal_output +1379,1457461,"TERMINAL",0,0,"3554",,terminal_output +1380,1458485,"TERMINAL",0,0,"4665",,terminal_output +1381,1459509,"TERMINAL",0,0,"5776",,terminal_output +1382,1460534,"TERMINAL",0,0,"6887",,terminal_output +1383,1461695,"TERMINAL",0,0,"7998",,terminal_output +1384,1462542,"TERMINAL",0,0,"840409",,terminal_output +1385,1463600,"TERMINAL",0,0,"91140",,terminal_output +1386,1464633,"TERMINAL",0,0,"10221",,terminal_output +1387,1465859,"TERMINAL",0,0,"1332",,terminal_output +1388,1466882,"TERMINAL",0,0,"2443",,terminal_output +1389,1467781,"TERMINAL",0,0,"3554",,terminal_output +1390,1468827,"TERMINAL",0,0,"4665",,terminal_output +1391,1469876,"TERMINAL",0,0,"5776",,terminal_output +1392,1470930,"TERMINAL",0,0,"6887",,terminal_output +1393,1471977,"TERMINAL",0,0,"7998",,terminal_output +1394,1473026,"TERMINAL",0,0,"8515150",,terminal_output +1395,1474204,"TERMINAL",0,0,"20221",,terminal_output +1396,1475175,"TERMINAL",0,0,"1332",,terminal_output +1397,1476200,"TERMINAL",0,0,"2443",,terminal_output +1398,1477259,"TERMINAL",0,0,"3554",,terminal_output +1399,1478388,"TERMINAL",0,0,"4665",,terminal_output +1400,1479288,"TERMINAL",0,0,"5776",,terminal_output +1401,1480328,"TERMINAL",0,0,"6887",,terminal_output +1402,1481525,"TERMINAL",0,0,"7998",,terminal_output +1403,1482552,"TERMINAL",0,0,"840:0040:009",,terminal_output +1404,1483470,"TERMINAL",0,0,"9119:00",,terminal_output +1405,1484736,"TERMINAL",0,0,"30221",,terminal_output +1406,1485727,"TERMINAL",0,0,"1332",,terminal_output +1407,1486752,"TERMINAL",0,0,"2443",,terminal_output +1408,1487704,"TERMINAL",0,0,"3554",,terminal_output +1409,1488723,"TERMINAL",0,0,"4665",,terminal_output +1410,1489947,"TERMINAL",0,0,"5776",,terminal_output +1411,1490757,"TERMINAL",0,0,"6887",,terminal_output +1412,1491802,"TERMINAL",0,0,"7998",,terminal_output +1413,1492840,"TERMINAL",0,0,"810109",,terminal_output +1414,1493882,"TERMINAL",0,0,"91110",,terminal_output +1415,1494921,"TERMINAL",0,0,"40221",,terminal_output +1416,1495961,"TERMINAL",0,0,"1332",,terminal_output +1417,1497017,"TERMINAL",0,0,"2443",,terminal_output +1418,1498115,"TERMINAL",0,0,"3665",,terminal_output +1419,1499084,"TERMINAL",0,0,"5776",,terminal_output +1420,1500163,"TERMINAL",0,0,"6887",,terminal_output +1421,1501186,"TERMINAL",0,0,"7998",,terminal_output +1422,1502239,"TERMINAL",0,0,"820209",,terminal_output +1423,1503256,"TERMINAL",0,0,"91120",,terminal_output +1424,1504300,"TERMINAL",0,0,"50221",,terminal_output +1425,1505487,"TERMINAL",0,0,"1332",,terminal_output +1426,1506512,"TERMINAL",0,0,"2443",,terminal_output +1427,1507433,"TERMINAL",0,0,"3554",,terminal_output +1428,1508662,"TERMINAL",0,0,"4665",,terminal_output +1429,1509716,"TERMINAL",0,0,"5776",,terminal_output +1430,1510742,"TERMINAL",0,0,"6887",,terminal_output +1431,1511734,"TERMINAL",0,0,"7998",,terminal_output +1432,1512761,"TERMINAL",0,0,"830309",,terminal_output +1433,1513883,"TERMINAL",0,0,"91130",,terminal_output +1434,1514753,"TERMINAL",0,0,"5:00221",,terminal_output +1435,1516036,"TERMINAL",0,0,"1332",,terminal_output +1436,1516841,"TERMINAL",0,0,"2443",,terminal_output +1437,1517883,"TERMINAL",0,0,"3554",,terminal_output +1438,1519106,"TERMINAL",0,0,"4665",,terminal_output +1439,1519978,"TERMINAL",0,0,"5776",,terminal_output +1440,1521024,"TERMINAL",0,0,"6998",,terminal_output +1441,1522055,"TERMINAL",0,0,"840409",,terminal_output +1442,1523108,"TERMINAL",0,0,"91140",,terminal_output +1443,1524210,"TERMINAL",0,0,"10221",,terminal_output +1444,1525250,"TERMINAL",0,0,"1332",,terminal_output +1445,1526322,"TERMINAL",0,0,"2443",,terminal_output +1446,1527530,"TERMINAL",0,0,"3554",,terminal_output +1447,1528372,"TERMINAL",0,0,"4665",,terminal_output +1448,1529407,"TERMINAL",0,0,"5776",,terminal_output +1449,1530575,"TERMINAL",0,0,"6887",,terminal_output +1450,1531600,"TERMINAL",0,0,"7998",,terminal_output +1451,1532523,"TERMINAL",0,0,"850509",,terminal_output +1452,1533536,"TERMINAL",0,0,"91150",,terminal_output +1453,1534588,"TERMINAL",0,0,"20221",,terminal_output +1454,1535725,"TERMINAL",0,0,"1332",,terminal_output +1455,1536857,"TERMINAL",0,0,"2443",,terminal_output +1456,1537745,"TERMINAL",0,0,"3554",,terminal_output +1457,1538769,"TERMINAL",0,0,"4665",,terminal_output +1458,1539817,"TERMINAL",0,0,"5776",,terminal_output +1459,1540865,"TERMINAL",0,0,"6887",,terminal_output +1460,1541959,"TERMINAL",0,0,"7998",,terminal_output +1461,1542930,"TERMINAL",0,0,"81:001:009",,terminal_output +1462,1543990,"TERMINAL",0,0,"91140:00",,terminal_output +1463,1545107,"TERMINAL",0,0,"30332",,terminal_output +1464,1546087,"TERMINAL",0,0,"2443",,terminal_output +1465,1547189,"TERMINAL",0,0,"3554",,terminal_output +1466,1548190,"TERMINAL",0,0,"4665",,terminal_output +1467,1549207,"TERMINAL",0,0,"5776",,terminal_output +1468,1550340,"TERMINAL",0,0,"6887",,terminal_output +1469,1551480,"TERMINAL",0,0,"7998",,terminal_output +1470,1552392,"TERMINAL",0,0,"810109",,terminal_output +1471,1553411,"TERMINAL",0,0,"91110",,terminal_output +1472,1554460,"TERMINAL",0,0,"40221",,terminal_output +1473,1555459,"TERMINAL",0,0,"1332",,terminal_output +1474,1556688,"TERMINAL",0,0,"2443",,terminal_output +1475,1557623,"TERMINAL",0,0,"3554",,terminal_output +1476,1558742,"TERMINAL",0,0,"4665",,terminal_output +1477,1559611,"TERMINAL",0,0,"5776",,terminal_output +1478,1560681,"TERMINAL",0,0,"6887",,terminal_output +1479,1561807,"TERMINAL",0,0,"7998",,terminal_output +1480,1562763,"TERMINAL",0,0,"820209",,terminal_output +1481,1563979,"TERMINAL",0,0,"91120",,terminal_output +1482,1564810,"TERMINAL",0,0,"50221",,terminal_output +1483,1565852,"TERMINAL",0,0,"1332",,terminal_output +1484,1566933,"TERMINAL",0,0,"2443",,terminal_output +1485,1567955,"TERMINAL",0,0,"3554",,terminal_output +1486,1568989,"TERMINAL",0,0,"4665",,terminal_output +1487,1570030,"TERMINAL",0,0,"5887",,terminal_output +1488,1571117,"TERMINAL",0,0,"7998",,terminal_output +1489,1572151,"TERMINAL",0,0,"830309",,terminal_output +1490,1573203,"TERMINAL",0,0,"91130",,terminal_output +1491,1574213,"TERMINAL",0,0,"6:00221",,terminal_output +1492,1575428,"TERMINAL",0,0,"1332",,terminal_output +1493,1576349,"TERMINAL",0,0,"2443",,terminal_output +1494,1577374,"TERMINAL",0,0,"3554",,terminal_output +1495,1578396,"TERMINAL",0,0,"4665",,terminal_output +1496,1579436,"TERMINAL",0,0,"5776",,terminal_output +1497,1580548,"TERMINAL",0,0,"6887",,terminal_output +1498,1581578,"TERMINAL",0,0,"7998",,terminal_output +1499,1582798,"TERMINAL",0,0,"840409",,terminal_output +1500,1583924,"TERMINAL",0,0,"91140",,terminal_output +1501,1584849,"TERMINAL",0,0,"10221",,terminal_output +1502,1585818,"TERMINAL",0,0,"1332",,terminal_output +1503,1587003,"TERMINAL",0,0,"2443",,terminal_output +1504,1587912,"TERMINAL",0,0,"3554",,terminal_output +1505,1588968,"TERMINAL",0,0,"4665",,terminal_output +1506,1590017,"TERMINAL",0,0,"5887",,terminal_output +1507,1591066,"TERMINAL",0,0,"7998",,terminal_output +1508,1592242,"TERMINAL",0,0,"850509",,terminal_output +1509,1593250,"TERMINAL",0,0,"91150",,terminal_output +1510,1594203,"TERMINAL",0,0,"20221",,terminal_output +1511,1595292,"TERMINAL",0,0,"1332",,terminal_output +1512,1596318,"TERMINAL",0,0,"2443",,terminal_output +1513,1597545,"TERMINAL",0,0,"3554",,terminal_output +1514,1598467,"TERMINAL",0,0,"4665",,terminal_output +1515,1599491,"TERMINAL",0,0,"5776",,terminal_output +1516,1600480,"TERMINAL",0,0,"6887",,terminal_output +1517,1601523,"TERMINAL",0,0,"7998",,terminal_output +1518,1602768,"TERMINAL",0,0,"82:002:009",,terminal_output +1519,1603790,"TERMINAL",0,0,"9111:00",,terminal_output +1520,1604638,"TERMINAL",0,0,"30221",,terminal_output +1521,1605870,"TERMINAL",0,0,"1332",,terminal_output +1522,1607482,"TERMINAL",0,0,"2443",,terminal_output +1523,1608094,"TERMINAL",0,0,"3554",,terminal_output +1524,1608818,"TERMINAL",0,0,"4665",,terminal_output +1525,1609850,"TERMINAL",0,0,"5776",,terminal_output +1526,1610901,"TERMINAL",0,0,"6887",,terminal_output +1527,1611937,"TERMINAL",0,0,"7998",,terminal_output +1528,1612983,"TERMINAL",0,0,"810109",,terminal_output +1529,1614033,"TERMINAL",0,0,"92211",,terminal_output +1530,1615080,"TERMINAL",0,0,"41332",,terminal_output +1531,1616127,"TERMINAL",0,0,"2443",,terminal_output +1532,1617308,"TERMINAL",0,0,"3554",,terminal_output +1533,1618335,"TERMINAL",0,0,"4665",,terminal_output +1534,1619254,"TERMINAL",0,0,"5776",,terminal_output +1535,1620481,"TERMINAL",0,0,"6887",,terminal_output +1536,1621506,"TERMINAL",0,0,"7998",,terminal_output +1537,1622385,"TERMINAL",0,0,"820209",,terminal_output +1538,1623555,"TERMINAL",0,0,"91120",,terminal_output +1539,1624821,"TERMINAL",0,0,"50221",,terminal_output +1540,1625605,"TERMINAL",0,0,"1332",,terminal_output +1541,1626942,"TERMINAL",0,0,"2443",,terminal_output +1542,1627958,"TERMINAL",0,0,"3554",,terminal_output +1543,1628780,"TERMINAL",0,0,"4665",,terminal_output +1544,1629803,"TERMINAL",0,0,"5776",,terminal_output +1545,1630833,"TERMINAL",0,0,"6887",,terminal_output +1546,1631952,"TERMINAL",0,0,"7998",,terminal_output +1547,1632978,"TERMINAL",0,0,"830309",,terminal_output +1548,1633898,"TERMINAL",0,0,"91130",,terminal_output +1549,1634879,"TERMINAL",0,0,"7:00221",,terminal_output +1550,1635926,"TERMINAL",0,0,"1332",,terminal_output +1551,1636993,"TERMINAL",0,0,"2443",,terminal_output +1552,1638024,"TERMINAL",0,0,"3665",,terminal_output +1553,1639077,"TERMINAL",0,0,"5776",,terminal_output +1554,1640246,"TERMINAL",0,0,"6887",,terminal_output +1555,1641271,"TERMINAL",0,0,"7998",,terminal_output +1556,1642294,"TERMINAL",0,0,"840409",,terminal_output +1557,1643419,"TERMINAL",0,0,"91140",,terminal_output +1558,1644305,"TERMINAL",0,0,"10221",,terminal_output +1559,1645367,"TERMINAL",0,0,"1332",,terminal_output +1560,1646410,"TERMINAL",0,0,"2443",,terminal_output +1561,1647602,"TERMINAL",0,0,"3554",,terminal_output +1562,1648644,"TERMINAL",0,0,"4665",,terminal_output +1563,1649667,"TERMINAL",0,0,"5776",,terminal_output +1564,1651002,"TERMINAL",0,0,"6887",,terminal_output +1565,1651625,"TERMINAL",0,0,"7998",,terminal_output +1566,1652841,"TERMINAL",0,0,"850509",,terminal_output +1567,1653865,"TERMINAL",0,0,"91150",,terminal_output +1568,1654889,"TERMINAL",0,0,"20221",,terminal_output +1569,1655916,"TERMINAL",0,0,"1332",,terminal_output +1570,1656845,"TERMINAL",0,0,"2443",,terminal_output +1571,1657914,"TERMINAL",0,0,"3554",,terminal_output +1572,1658968,"TERMINAL",0,0,"4665",,terminal_output +1573,1660111,"TERMINAL",0,0,"5776",,terminal_output +1574,1661237,"TERMINAL",0,0,"6998",,terminal_output +1575,1662262,"TERMINAL",0,0,"83:003:009",,terminal_output +1576,1663284,"TERMINAL",0,0,"9112:00",,terminal_output +1577,1664201,"TERMINAL",0,0,"30221",,terminal_output +1578,1665241,"TERMINAL",0,0,"1332",,terminal_output +1579,1666295,"TERMINAL",0,0,"2443",,terminal_output +1580,1667345,"TERMINAL",0,0,"3554",,terminal_output +1581,1668384,"TERMINAL",0,0,"4665",,terminal_output +1582,1669428,"TERMINAL",0,0,"5776",,terminal_output +1583,1670472,"TERMINAL",0,0,"6887",,terminal_output +1584,1671525,"TERMINAL",0,0,"7998",,terminal_output +1585,1672612,"TERMINAL",0,0,"810109",,terminal_output +1586,1673608,"TERMINAL",0,0,"91110",,terminal_output +1587,1674651,"TERMINAL",0,0,"40221",,terminal_output +1588,1675806,"TERMINAL",0,0,"1332",,terminal_output +1589,1676796,"TERMINAL",0,0,"2443",,terminal_output +1590,1677807,"TERMINAL",0,0,"3554",,terminal_output +1591,1678829,"TERMINAL",0,0,"4665",,terminal_output +1592,1679978,"TERMINAL",0,0,"5776",,terminal_output +1593,1680919,"TERMINAL",0,0,"6887",,terminal_output +1594,1681965,"TERMINAL",0,0,"7998",,terminal_output +1595,1683011,"TERMINAL",0,0,"8212120",,terminal_output +1596,1684074,"TERMINAL",0,0,"50221",,terminal_output +1597,1685114,"TERMINAL",0,0,"1332",,terminal_output +1598,1686328,"TERMINAL",0,0,"2443",,terminal_output +1599,1687203,"TERMINAL",0,0,"3554",,terminal_output +1600,1688290,"TERMINAL",0,0,"4665",,terminal_output +1601,1689300,"TERMINAL",0,0,"5776",,terminal_output +1602,1690422,"TERMINAL",0,0,"6887",,terminal_output +1603,1691381,"TERMINAL",0,0,"7998",,terminal_output +1604,1692432,"TERMINAL",0,0,"830309",,terminal_output +1605,1693495,"TERMINAL",0,0,"91130",,terminal_output +1606,1694628,"TERMINAL",0,0,"8:00221",,terminal_output +1607,1695867,"TERMINAL",0,0,"1332",,terminal_output +1608,1696671,"TERMINAL",0,0,"2443",,terminal_output +1609,1697691,"TERMINAL",0,0,"3554",,terminal_output +1610,1698723,"TERMINAL",0,0,"4665",,terminal_output +1611,1699744,"TERMINAL",0,0,"5776",,terminal_output +1612,1700826,"TERMINAL",0,0,"6887",,terminal_output +1613,1701995,"TERMINAL",0,0,"7998",,terminal_output +1614,1702881,"TERMINAL",0,0,"840409",,terminal_output +1615,1703936,"TERMINAL",0,0,"91140",,terminal_output +1616,1704966,"TERMINAL",0,0,"10221",,terminal_output +1617,1706014,"TERMINAL",0,0,"1443",,terminal_output +1618,1707218,"TERMINAL",0,0,"3554",,terminal_output +1619,1708239,"TERMINAL",0,0,"4665",,terminal_output +1620,1709209,"TERMINAL",0,0,"5776",,terminal_output +1621,1710396,"TERMINAL",0,0,"6887",,terminal_output +1622,1711413,"TERMINAL",0,0,"7998",,terminal_output +1623,1712548,"TERMINAL",0,0,"850509",,terminal_output +1624,1713362,"TERMINAL",0,0,"91150",,terminal_output +1625,1714393,"TERMINAL",0,0,"20221",,terminal_output +1626,1715616,"TERMINAL",0,0,"1332",,terminal_output +1627,1716688,"TERMINAL",0,0,"2443",,terminal_output +1628,1717558,"TERMINAL",0,0,"3554",,terminal_output +1629,1718687,"TERMINAL",0,0,"4665",,terminal_output +1630,1719708,"TERMINAL",0,0,"5776",,terminal_output +1631,1720734,"TERMINAL",0,0,"6887",,terminal_output +1632,1721760,"TERMINAL",0,0,"7998",,terminal_output +1633,1722812,"TERMINAL",0,0,"84:004:009",,terminal_output +1634,1724012,"TERMINAL",0,0,"9113:00",,terminal_output +1635,1724844,"TERMINAL",0,0,"30221",,terminal_output +1636,1725884,"TERMINAL",0,0,"1332",,terminal_output +1637,1726930,"TERMINAL",0,0,"2443",,terminal_output +1638,1728014,"TERMINAL",0,0,"3554",,terminal_output +1639,1729029,"TERMINAL",0,0,"4776",,terminal_output +1640,1730063,"TERMINAL",0,0,"6887",,terminal_output +1641,1731280,"TERMINAL",0,0,"7998",,terminal_output +1642,1732209,"TERMINAL",0,0,"810109",,terminal_output +1643,1733205,"TERMINAL",0,0,"91110",,terminal_output +1644,1734253,"TERMINAL",0,0,"40221",,terminal_output +1645,1735377,"TERMINAL",0,0,"1332",,terminal_output +1646,1736398,"TERMINAL",0,0,"2443",,terminal_output +1647,1737423,"TERMINAL",0,0,"3554",,terminal_output +1648,1738446,"TERMINAL",0,0,"4665",,terminal_output +1649,1739495,"TERMINAL",0,0,"5776",,terminal_output +1650,1740702,"TERMINAL",0,0,"6887",,terminal_output +1651,1741725,"TERMINAL",0,0,"7998",,terminal_output +1652,1742663,"TERMINAL",0,0,"820209",,terminal_output +1653,1743774,"TERMINAL",0,0,"91120",,terminal_output +1654,1744816,"TERMINAL",0,0,"50221",,terminal_output +1655,1745799,"TERMINAL",0,0,"1332",,terminal_output +1656,1746865,"TERMINAL",0,0,"2443",,terminal_output +1657,1747875,"TERMINAL",0,0,"3554",,terminal_output +1658,1748947,"TERMINAL",0,0,"4665",,terminal_output +1659,1749978,"TERMINAL",0,0,"5776",,terminal_output +1660,1751043,"TERMINAL",0,0,"6998",,terminal_output +1661,1752077,"TERMINAL",0,0,"830309",,terminal_output +1662,1753128,"TERMINAL",0,0,"91130",,terminal_output +1663,1754203,"TERMINAL",0,0,"9:00221",,terminal_output +1664,1755240,"TERMINAL",0,0,"1332",,terminal_output +1665,1756468,"TERMINAL",0,0,"2443",,terminal_output +1666,1757392,"TERMINAL",0,0,"3554",,terminal_output +1667,1758415,"TERMINAL",0,0,"4665",,terminal_output +1668,1759543,"TERMINAL",0,0,"5776",,terminal_output +1669,1760566,"TERMINAL",0,0,"6887",,terminal_output +1670,1761589,"TERMINAL",0,0,"7998",,terminal_output +1671,1762613,"TERMINAL",0,0,"840409",,terminal_output +1672,1763593,"TERMINAL",0,0,"91140",,terminal_output +1673,1764639,"TERMINAL",0,0,"10221",,terminal_output +1674,1765892,"TERMINAL",0,0,"1332",,terminal_output +1675,1767021,"TERMINAL",0,0,"2443",,terminal_output +1676,1767841,"TERMINAL",0,0,"3554",,terminal_output +1677,1768859,"TERMINAL",0,0,"4665",,terminal_output +1678,1769904,"TERMINAL",0,0,"5776",,terminal_output +1679,1770965,"TERMINAL",0,0,"6887",,terminal_output +1680,1772012,"TERMINAL",0,0,"7998",,terminal_output +1681,1773169,"TERMINAL",0,0,"9515150",,terminal_output +1682,1774093,"TERMINAL",0,0,"20221",,terminal_output +1683,1775125,"TERMINAL",0,0,"1332",,terminal_output +1684,1776172,"TERMINAL",0,0,"2443",,terminal_output +1685,1777220,"TERMINAL",0,0,"3554",,terminal_output +1686,1778407,"TERMINAL",0,0,"4665",,terminal_output +1687,1779356,"TERMINAL",0,0,"5776",,terminal_output +1688,1780366,"TERMINAL",0,0,"6887",,terminal_output +1689,1781460,"TERMINAL",0,0,"7998",,terminal_output +1690,1782584,"TERMINAL",0,0,"85:005:009",,terminal_output +1691,1783615,"TERMINAL",0,0,"9114:00",,terminal_output +1692,1784548,"TERMINAL",0,0,"30221",,terminal_output +1693,1785601,"TERMINAL",0,0,"1332",,terminal_output +1694,1786680,"TERMINAL",0,0,"2443",,terminal_output +1695,1787678,"TERMINAL",0,0,"3554",,terminal_output +1696,1788867,"TERMINAL",0,0,"4665",,terminal_output +1697,1789852,"TERMINAL",0,0,"5776",,terminal_output +1698,1790849,"TERMINAL",0,0,"6887",,terminal_output +1699,1791898,"TERMINAL",0,0,"7998",,terminal_output +1700,1792925,"TERMINAL",0,0,"810109",,terminal_output +1701,1793962,"TERMINAL",0,0,"91110",,terminal_output +1702,1795010,"TERMINAL",0,0,"40332",,terminal_output +1703,1796099,"TERMINAL",0,0,"2443",,terminal_output +1704,1797098,"TERMINAL",0,0,"3554",,terminal_output +1705,1798151,"TERMINAL",0,0,"4665",,terminal_output +1706,1799218,"TERMINAL",0,0,"5776",,terminal_output +1707,1800299,"TERMINAL",0,0,"6887",,terminal_output +1708,1801325,"TERMINAL",0,0,"7998",,terminal_output +1709,1802449,"TERMINAL",0,0,"820209",,terminal_output +1710,1803575,"TERMINAL",0,0,"91120",,terminal_output +1711,1804415,"TERMINAL",0,0,"50221",,terminal_output +1712,1805467,"TERMINAL",0,0,"1332",,terminal_output +1713,1806547,"TERMINAL",0,0,"2443",,terminal_output +1714,1807561,"TERMINAL",0,0,"3554",,terminal_output +1715,1808704,"TERMINAL",0,0,"4665",,terminal_output +1716,1809721,"TERMINAL",0,0,"5776",,terminal_output +1717,1811053,"TERMINAL",0,0,"6887",,terminal_output +1718,1811873,"TERMINAL",0,0,"7998",,terminal_output +1719,1813145,"TERMINAL",0,0,"830309",,terminal_output +1720,1814023,"TERMINAL",0,0,"91130",,terminal_output +1721,1814946,"TERMINAL",0,0,"30:00221",,terminal_output +1722,1815987,"TERMINAL",0,0,"1332",,terminal_output +1723,1817045,"TERMINAL",0,0,"2554",,terminal_output +1724,1818077,"TERMINAL",0,0,"4665",,terminal_output +1725,1819136,"TERMINAL",0,0,"5776",,terminal_output +1726,1820369,"TERMINAL",0,0,"6887",,terminal_output +1727,1821291,"TERMINAL",0,0,"7998",,terminal_output +1728,1822423,"TERMINAL",0,0,"840409",,terminal_output +1729,1823340,"TERMINAL",0,0,"91140",,terminal_output +1730,1824347,"TERMINAL",0,0,"10221",,terminal_output +1731,1825491,"TERMINAL",0,0,"1332",,terminal_output +1732,1826444,"TERMINAL",0,0,"2443",,terminal_output +1733,1827484,"TERMINAL",0,0,"3554",,terminal_output +1734,1828530,"TERMINAL",0,0,"4665",,terminal_output +1735,1829585,"TERMINAL",0,0,"5776",,terminal_output +1736,1830918,"TERMINAL",0,0,"6887",,terminal_output +1737,1831744,"TERMINAL",0,0,"7998",,terminal_output +1738,1832966,"TERMINAL",0,0,"850509",,terminal_output +1739,1833886,"TERMINAL",0,0,"91150",,terminal_output +1740,1834907,"TERMINAL",0,0,"20221",,terminal_output +1741,1835933,"TERMINAL",0,0,"1332",,terminal_output +1742,1836893,"TERMINAL",0,0,"2443",,terminal_output +1743,1837937,"TERMINAL",0,0,"3554",,terminal_output +1744,1838998,"TERMINAL",0,0,"4665",,terminal_output +1745,1840041,"TERMINAL",0,0,"5887",,terminal_output +1746,1841108,"TERMINAL",0,0,"7998",,terminal_output +1747,1842181,"TERMINAL",0,0,"86:006:009",,terminal_output +1748,1843210,"TERMINAL",0,0,"9115:00",,terminal_output +1749,1844227,"TERMINAL",0,0,"30221",,terminal_output +1750,1845355,"TERMINAL",0,0,"1332",,terminal_output +1751,1846480,"TERMINAL",0,0,"2443",,terminal_output +1752,1847403,"TERMINAL",0,0,"3554",,terminal_output +1753,1848429,"TERMINAL",0,0,"4665",,terminal_output +1754,1849465,"TERMINAL",0,0,"5776",,terminal_output +1755,1850606,"TERMINAL",0,0,"6887",,terminal_output +1756,1851576,"TERMINAL",0,0,"7998",,terminal_output +1757,1852585,"TERMINAL",0,0,"810109",,terminal_output +1758,1853651,"TERMINAL",0,0,"91110",,terminal_output +1759,1854785,"TERMINAL",0,0,"40221",,terminal_output +1760,1855905,"TERMINAL",0,0,"1332",,terminal_output +1761,1856858,"TERMINAL",0,0,"2443",,terminal_output +1762,1857816,"TERMINAL",0,0,"3554",,terminal_output +1763,1858918,"TERMINAL",0,0,"4665",,terminal_output +1764,1859906,"TERMINAL",0,0,"5776",,terminal_output +1765,1860967,"TERMINAL",0,0,"6887",,terminal_output +1766,1862006,"TERMINAL",0,0,"7998",,terminal_output +1767,1863071,"TERMINAL",0,0,"9212120",,terminal_output +1768,1864203,"TERMINAL",0,0,"50221",,terminal_output +1769,1865221,"TERMINAL",0,0,"1332",,terminal_output +1770,1866245,"TERMINAL",0,0,"2443",,terminal_output +1771,1867270,"TERMINAL",0,0,"3554",,terminal_output +1772,1868265,"TERMINAL",0,0,"4665",,terminal_output +1773,1869317,"TERMINAL",0,0,"5776",,terminal_output +1774,1870443,"TERMINAL",0,0,"6887",,terminal_output +1775,1871387,"TERMINAL",0,0,"7998",,terminal_output +1776,1872492,"TERMINAL",0,0,"830309",,terminal_output +1777,1873517,"TERMINAL",0,0,"91130",,terminal_output +1778,1874541,"TERMINAL",0,0,"1:00221",,terminal_output +1779,1875667,"TERMINAL",0,0,"1332",,terminal_output +1780,1876841,"TERMINAL",0,0,"2443",,terminal_output +1781,1877717,"TERMINAL",0,0,"3554",,terminal_output +1782,1878737,"TERMINAL",0,0,"4665",,terminal_output +1783,1879798,"TERMINAL",0,0,"5776",,terminal_output +1784,1880838,"TERMINAL",0,0,"6887",,terminal_output +1785,1882013,"TERMINAL",0,0,"7998",,terminal_output +1786,1882939,"TERMINAL",0,0,"840409",,terminal_output +1787,1883951,"TERMINAL",0,0,"91140",,terminal_output +1788,1884978,"TERMINAL",0,0,"10221",,terminal_output +1789,1886027,"TERMINAL",0,0,"1443",,terminal_output +1790,1887079,"TERMINAL",0,0,"3554",,terminal_output +1791,1888125,"TERMINAL",0,0,"4665",,terminal_output +1792,1889215,"TERMINAL",0,0,"5776",,terminal_output +1793,1890320,"TERMINAL",0,0,"6887",,terminal_output +1794,1891336,"TERMINAL",0,0,"7998",,terminal_output +1795,1892357,"TERMINAL",0,0,"850509",,terminal_output +1796,1893382,"TERMINAL",0,0,"91150",,terminal_output +1797,1894509,"TERMINAL",0,0,"20221",,terminal_output +1798,1895738,"TERMINAL",0,0,"1332",,terminal_output +1799,1896660,"TERMINAL",0,0,"2443",,terminal_output +1800,1897691,"TERMINAL",0,0,"3554",,terminal_output +1801,1898708,"TERMINAL",0,0,"4665",,terminal_output +1802,1899628,"TERMINAL",0,0,"5776",,terminal_output +1803,1900753,"TERMINAL",0,0,"6887",,terminal_output +1804,1901899,"TERMINAL",0,0,"7998",,terminal_output +1805,1902802,"TERMINAL",0,0,"87:007:009",,terminal_output +1806,1903827,"TERMINAL",0,0,"9116:00",,terminal_output +1807,1904989,"TERMINAL",0,0,"30221",,terminal_output +1808,1905984,"TERMINAL",0,0,"1332",,terminal_output +1809,1906947,"TERMINAL",0,0,"2443",,terminal_output +1810,1908028,"TERMINAL",0,0,"3554",,terminal_output +1811,1909018,"TERMINAL",0,0,"4776",,terminal_output +1812,1910076,"TERMINAL",0,0,"6887",,terminal_output +1813,1911097,"TERMINAL",0,0,"7998",,terminal_output +1814,1912142,"TERMINAL",0,0,"810109",,terminal_output +1815,1913182,"TERMINAL",0,0,"91110",,terminal_output +1816,1914248,"TERMINAL",0,0,"40221",,terminal_output +1817,1915277,"TERMINAL",0,0,"1332",,terminal_output +1818,1916422,"TERMINAL",0,0,"2443",,terminal_output +1819,1917444,"TERMINAL",0,0,"3554",,terminal_output +1820,1918417,"TERMINAL",0,0,"4665",,terminal_output +1821,1919493,"TERMINAL",0,0,"5776",,terminal_output +1822,1920724,"TERMINAL",0,0,"6887",,terminal_output +1823,1921562,"TERMINAL",0,0,"7998",,terminal_output +1824,1922773,"TERMINAL",0,0,"820209",,terminal_output +1825,1923767,"TERMINAL",0,0,"91120",,terminal_output +1826,1924821,"TERMINAL",0,0,"50221",,terminal_output +1827,1925946,"TERMINAL",0,0,"1332",,terminal_output +1828,1926900,"TERMINAL",0,0,"2443",,terminal_output +1829,1927894,"TERMINAL",0,0,"3554",,terminal_output +1830,1928913,"TERMINAL",0,0,"4665",,terminal_output +1831,1929962,"TERMINAL",0,0,"5776",,terminal_output +1832,1931015,"TERMINAL",0,0,"6887",,terminal_output +1833,1932034,"TERMINAL",0,0,"730309",,terminal_output +1834,1933085,"TERMINAL",0,0,"91130",,terminal_output +1835,1934151,"TERMINAL",0,0,"2:00221",,terminal_output +1836,1935179,"TERMINAL",0,0,"1332",,terminal_output +1837,1936291,"TERMINAL",0,0,"2443",,terminal_output +1838,1937318,"TERMINAL",0,0,"3554",,terminal_output +1839,1938325,"TERMINAL",0,0,"4665",,terminal_output +1840,1939371,"TERMINAL",0,0,"5776",,terminal_output +1841,1940408,"TERMINAL",0,0,"6887",,terminal_output +1842,1941467,"TERMINAL",0,0,"7998",,terminal_output +1843,1942645,"TERMINAL",0,0,"840409",,terminal_output +1844,1943771,"TERMINAL",0,0,"91140",,terminal_output +1845,1944608,"TERMINAL",0,0,"10221",,terminal_output +1846,1945814,"TERMINAL",0,0,"1332",,terminal_output +1847,1946732,"TERMINAL",0,0,"2443",,terminal_output +1848,1947754,"TERMINAL",0,0,"3554",,terminal_output +1849,1949121,"TERMINAL",0,0,"4665",,terminal_output +1850,1949905,"TERMINAL",0,0,"5776",,terminal_output +1851,1951057,"TERMINAL",0,0,"6887",,terminal_output +1852,1951955,"TERMINAL",0,0,"7998",,terminal_output +1853,1952984,"TERMINAL",0,0,"850509",,terminal_output +1854,1954008,"TERMINAL",0,0,"91150",,terminal_output +1855,1955036,"TERMINAL",0,0,"20332",,terminal_output +1856,1956157,"TERMINAL",0,0,"2443",,terminal_output +1857,1957132,"TERMINAL",0,0,"3554",,terminal_output +1858,1958178,"TERMINAL",0,0,"4665",,terminal_output +1859,1959222,"TERMINAL",0,0,"5776",,terminal_output +1860,1960274,"TERMINAL",0,0,"6887",,terminal_output +1861,1961377,"TERMINAL",0,0,"7998",,terminal_output +1862,1962501,"TERMINAL",0,0,"88:008:009",,terminal_output +1863,1963430,"TERMINAL",0,0,"9117:00",,terminal_output +1864,1964533,"TERMINAL",0,0,"30221",,terminal_output +1865,1965496,"TERMINAL",0,0,"1332",,terminal_output +1866,1966699,"TERMINAL",0,0,"2443",,terminal_output +1867,1967724,"TERMINAL",0,0,"3554",,terminal_output +1868,1968849,"TERMINAL",0,0,"4665",,terminal_output +1869,1969903,"TERMINAL",0,0,"5776",,terminal_output +1870,1970926,"TERMINAL",0,0,"6887",,terminal_output +1871,1971782,"TERMINAL",0,0,"7998",,terminal_output +1872,1972826,"TERMINAL",0,0,"810109",,terminal_output +1873,1974075,"TERMINAL",0,0,"91110",,terminal_output +1874,1975100,"TERMINAL",0,0,"40221",,terminal_output +1875,1976028,"TERMINAL",0,0,"1332",,terminal_output +1876,1977045,"TERMINAL",0,0,"2554",,terminal_output +1877,1978070,"TERMINAL",0,0,"4665",,terminal_output +1878,1979093,"TERMINAL",0,0,"5776",,terminal_output +1879,1980143,"TERMINAL",0,0,"6887",,terminal_output +1880,1981345,"TERMINAL",0,0,"7998",,terminal_output +1881,1982243,"TERMINAL",0,0,"820209",,terminal_output +1882,1983391,"TERMINAL",0,0,"91120",,terminal_output +1883,1984334,"TERMINAL",0,0,"50221",,terminal_output +1884,1985438,"TERMINAL",0,0,"1332",,terminal_output +1885,1986503,"TERMINAL",0,0,"2443",,terminal_output +1886,1987475,"TERMINAL",0,0,"3554",,terminal_output +1887,1988513,"TERMINAL",0,0,"4665",,terminal_output +1888,1989639,"TERMINAL",0,0,"5776",,terminal_output +1889,1990665,"TERMINAL",0,0,"6887",,terminal_output +1890,1991642,"TERMINAL",0,0,"7998",,terminal_output +1891,1992682,"TERMINAL",0,0,"830309",,terminal_output +1892,1993837,"TERMINAL",0,0,"91130",,terminal_output +1893,1994770,"TERMINAL",0,0,"3:00221",,terminal_output +1894,1996113,"TERMINAL",0,0,"1332",,terminal_output +1895,1996910,"TERMINAL",0,0,"2443",,terminal_output +1896,1997937,"TERMINAL",0,0,"3554",,terminal_output +1897,1999005,"TERMINAL",0,0,"4665",,terminal_output +1898,2000086,"TERMINAL",0,0,"5776",,terminal_output +1899,2001021,"TERMINAL",0,0,"6998",,terminal_output +1900,2002069,"TERMINAL",0,0,"840409",,terminal_output +1901,2003101,"TERMINAL",0,0,"91140",,terminal_output +1902,2004139,"TERMINAL",0,0,"10221",,terminal_output +1903,2005174,"TERMINAL",0,0,"1332",,terminal_output +1904,2006535,"TERMINAL",0,0,"2443",,terminal_output +1905,2007356,"TERMINAL",0,0,"3554",,terminal_output +1906,2008377,"TERMINAL",0,0,"4665",,terminal_output +1907,2009403,"TERMINAL",0,0,"5776",,terminal_output +1908,2010443,"TERMINAL",0,0,"6887",,terminal_output +1909,2011657,"TERMINAL",0,0,"7998",,terminal_output +1910,2012493,"TERMINAL",0,0,"850509",,terminal_output +1911,2013705,"TERMINAL",0,0,"91150",,terminal_output +1912,2014657,"TERMINAL",0,0,"20221",,terminal_output +1913,2015652,"TERMINAL",0,0,"1332",,terminal_output +1914,2016776,"TERMINAL",0,0,"2443",,terminal_output +1915,2017800,"TERMINAL",0,0,"3554",,terminal_output +1916,2018824,"TERMINAL",0,0,"4665",,terminal_output +1917,2019790,"TERMINAL",0,0,"5776",,terminal_output +1918,2020873,"TERMINAL",0,0,"6887",,terminal_output +1919,2021869,"TERMINAL",0,0,"7998",,terminal_output +1920,2023022,"TERMINAL",0,0,"89:009:009",,terminal_output +1921,2024175,"TERMINAL",0,0,"9118:00",,terminal_output +1922,2025015,"TERMINAL",0,0,"30332",,terminal_output +1923,2026047,"TERMINAL",0,0,"2443",,terminal_output +1924,2027142,"TERMINAL",0,0,"3554",,terminal_output +1925,2028144,"TERMINAL",0,0,"4665",,terminal_output +1926,2029214,"TERMINAL",0,0,"5776",,terminal_output +1927,2030290,"TERMINAL",0,0,"6887",,terminal_output +1928,2031420,"TERMINAL",0,0,"7998",,terminal_output +1929,2032295,"TERMINAL",0,0,"810109",,terminal_output +1930,2033365,"TERMINAL",0,0,"91110",,terminal_output +1931,2034492,"TERMINAL",0,0,"40221",,terminal_output +1932,2035615,"TERMINAL",0,0,"1332",,terminal_output +1933,2036459,"TERMINAL",0,0,"2443",,terminal_output +1934,2037499,"TERMINAL",0,0,"3554",,terminal_output +1935,2038588,"TERMINAL",0,0,"4665",,terminal_output +1936,2039615,"TERMINAL",0,0,"5776",,terminal_output +1937,2040739,"TERMINAL",0,0,"6887",,terminal_output +1938,2041762,"TERMINAL",0,0,"7998",,terminal_output +1939,2042785,"TERMINAL",0,0,"820209",,terminal_output +1940,2043957,"TERMINAL",0,0,"91120",,terminal_output +1941,2044973,"TERMINAL",0,0,"50221",,terminal_output +1942,2045861,"TERMINAL",0,0,"1332",,terminal_output +1943,2047176,"TERMINAL",0,0,"2443",,terminal_output +1944,2048009,"TERMINAL",0,0,"3554",,terminal_output +1945,2049137,"TERMINAL",0,0,"4665",,terminal_output +1946,2050072,"TERMINAL",0,0,"5887",,terminal_output +1947,2051084,"TERMINAL",0,0,"7998",,terminal_output +1948,2052110,"TERMINAL",0,0,"830309",,terminal_output +1949,2053143,"TERMINAL",0,0,"91130",,terminal_output +1950,2054223,"TERMINAL",0,0,"4:00221",,terminal_output +1951,2055280,"TERMINAL",0,0,"1332",,terminal_output +1952,2056266,"TERMINAL",0,0,"2443",,terminal_output +1953,2057431,"TERMINAL",0,0,"3554",,terminal_output +1954,2058378,"TERMINAL",0,0,"4665",,terminal_output +1955,2059509,"TERMINAL",0,0,"5776",,terminal_output +1956,2060501,"TERMINAL",0,0,"6887",,terminal_output +1957,2061551,"TERMINAL",0,0,"7998",,terminal_output +1958,2062537,"TERMINAL",0,0,"840409",,terminal_output +1959,2063584,"TERMINAL",0,0,"91140",,terminal_output +1960,2064632,"TERMINAL",0,0,"10221",,terminal_output +1961,2065754,"TERMINAL",0,0,"1332",,terminal_output +1962,2066798,"TERMINAL",0,0,"2443",,terminal_output +1963,2067751,"TERMINAL",0,0,"3554",,terminal_output +1964,2068904,"TERMINAL",0,0,"4665",,terminal_output +1965,2069955,"TERMINAL",0,0,"5776",,terminal_output +1966,2070881,"TERMINAL",0,0,"6887",,terminal_output +1967,2071978,"TERMINAL",0,0,"7998",,terminal_output +1968,2072959,"TERMINAL",0,0,"850509",,terminal_output +1969,2074019,"TERMINAL",0,0,"92251",,terminal_output +1970,2075050,"TERMINAL",0,0,"21332",,terminal_output +1971,2076093,"TERMINAL",0,0,"2443",,terminal_output +1972,2077141,"TERMINAL",0,0,"3554",,terminal_output +1973,2078178,"TERMINAL",0,0,"4665",,terminal_output +1974,2079230,"TERMINAL",0,0,"5776",,terminal_output +1975,2080398,"TERMINAL",0,0,"6887",,terminal_output +1976,2081441,"TERMINAL",0,0,"7998",,terminal_output +1977,2082415,"TERMINAL",0,0,"850:0050:009",,terminal_output +1978,2083446,"TERMINAL",0,0,"9119:00",,terminal_output +1979,2084441,"TERMINAL",0,0,"30221",,terminal_output +1980,2085648,"TERMINAL",0,0,"1332",,terminal_output +1981,2086721,"TERMINAL",0,0,"2443",,terminal_output +1982,2087746,"TERMINAL",0,0,"3554",,terminal_output +1983,2088662,"TERMINAL",0,0,"4665",,terminal_output +1984,2089786,"TERMINAL",0,0,"5776",,terminal_output +1985,2090711,"TERMINAL",0,0,"6887",,terminal_output +1986,2091838,"TERMINAL",0,0,"7998",,terminal_output +1987,2092775,"TERMINAL",0,0,"810109",,terminal_output +1988,2093844,"TERMINAL",0,0,"91110",,terminal_output +1989,2095119,"TERMINAL",0,0,"40221",,terminal_output +1990,2095968,"TERMINAL",0,0,"1332",,terminal_output +1991,2097083,"TERMINAL",0,0,"2443",,terminal_output +1992,2098001,"TERMINAL",0,0,"3554",,terminal_output +1993,2099099,"TERMINAL",0,0,"4776",,terminal_output +1994,2100101,"TERMINAL",0,0,"6887",,terminal_output +1995,2101175,"TERMINAL",0,0,"7998",,terminal_output +1996,2102211,"TERMINAL",0,0,"820209",,terminal_output +1997,2103304,"TERMINAL",0,0,"91120",,terminal_output +1998,2104307,"TERMINAL",0,0,"50221",,terminal_output +1999,2105455,"TERMINAL",0,0,"1332",,terminal_output +2000,2106684,"TERMINAL",0,0,"2443",,terminal_output +2001,2107503,"TERMINAL",0,0,"3554",,terminal_output +2002,2108628,"TERMINAL",0,0,"4665",,terminal_output +2003,2109669,"TERMINAL",0,0,"5776",,terminal_output +2004,2110677,"TERMINAL",0,0,"6887",,terminal_output +2005,2111648,"TERMINAL",0,0,"7998",,terminal_output +2006,2112710,"TERMINAL",0,0,"830309",,terminal_output +2007,2113747,"TERMINAL",0,0,"91130",,terminal_output +2008,2114774,"TERMINAL",0,0,"5:00221",,terminal_output +2009,2115898,"TERMINAL",0,0,"1332",,terminal_output +2010,2116919,"TERMINAL",0,0,"2443",,terminal_output +2011,2117984,"TERMINAL",0,0,"3554",,terminal_output +2012,2119213,"TERMINAL",0,0,"4665",,terminal_output +2013,2120104,"TERMINAL",0,0,"5887",,terminal_output +2014,2121066,"TERMINAL",0,0,"7998",,terminal_output +2015,2122109,"TERMINAL",0,0,"840409",,terminal_output +2016,2123163,"TERMINAL",0,0,"91140",,terminal_output +2017,2124239,"TERMINAL",0,0,"10221",,terminal_output +2018,2125263,"TERMINAL",0,0,"1332",,terminal_output +2019,2126301,"TERMINAL",0,0,"2443",,terminal_output +2020,2127367,"TERMINAL",0,0,"3554",,terminal_output +2021,2128493,"TERMINAL",0,0,"4665",,terminal_output +2022,2129528,"TERMINAL",0,0,"5776",,terminal_output +2023,2130523,"TERMINAL",0,0,"6887",,terminal_output +2024,2131562,"TERMINAL",0,0,"7998",,terminal_output +2025,2132611,"TERMINAL",0,0,"850509",,terminal_output +2026,2133626,"TERMINAL",0,0,"91150",,terminal_output +2027,2134738,"TERMINAL",0,0,"20221",,terminal_output +2028,2135732,"TERMINAL",0,0,"1332",,terminal_output +2029,2136931,"TERMINAL",0,0,"2443",,terminal_output +2030,2137845,"TERMINAL",0,0,"3554",,terminal_output +2031,2139151,"TERMINAL",0,0,"4665",,terminal_output +2032,2139927,"TERMINAL",0,0,"5776",,terminal_output +2033,2141088,"TERMINAL",0,0,"6887",,terminal_output +2034,2142019,"TERMINAL",0,0,"71:001:009",,terminal_output +2035,2143239,"TERMINAL",0,0,"91150:00",,terminal_output +2036,2144117,"TERMINAL",0,0,"30221",,terminal_output +2037,2145185,"TERMINAL",0,0,"1332",,terminal_output +2038,2146413,"TERMINAL",0,0,"2443",,terminal_output +2039,2147477,"TERMINAL",0,0,"3554",,terminal_output +2040,2148462,"TERMINAL",0,0,"4665",,terminal_output +2041,2149349,"TERMINAL",0,0,"5776",,terminal_output +2042,2150509,"TERMINAL",0,0,"6887",,terminal_output +2043,2151533,"TERMINAL",0,0,"7998",,terminal_output +2044,2152502,"TERMINAL",0,0,"810109",,terminal_output +2045,2153544,"TERMINAL",0,0,"91110",,terminal_output +2046,2154724,"TERMINAL",0,0,"40221",,terminal_output +2047,2155833,"TERMINAL",0,0,"1332",,terminal_output +2048,2156742,"TERMINAL",0,0,"2443",,terminal_output +2049,2157775,"TERMINAL",0,0,"3554",,terminal_output +2050,2158905,"TERMINAL",0,0,"4665",,terminal_output +2051,2159836,"TERMINAL",0,0,"5776",,terminal_output +2052,2160886,"TERMINAL",0,0,"6887",,terminal_output +2053,2162014,"TERMINAL",0,0,"7998",,terminal_output +2054,2162987,"TERMINAL",0,0,"820209",,terminal_output +2055,2164035,"TERMINAL",0,0,"92221",,terminal_output +2056,2165091,"TERMINAL",0,0,"51332",,terminal_output +2057,2166127,"TERMINAL",0,0,"2443",,terminal_output +2058,2167199,"TERMINAL",0,0,"3554",,terminal_output +2059,2168380,"TERMINAL",0,0,"4665",,terminal_output +2060,2169276,"TERMINAL",0,0,"5776",,terminal_output +2061,2170483,"TERMINAL",0,0,"6887",,terminal_output +2062,2171502,"TERMINAL",0,0,"7998",,terminal_output +2063,2172446,"TERMINAL",0,0,"830309",,terminal_output +2064,2173549,"TERMINAL",0,0,"91130",,terminal_output +2065,2174575,"TERMINAL",0,0,"6:00221",,terminal_output +2066,2175699,"TERMINAL",0,0,"1332",,terminal_output +2067,2176625,"TERMINAL",0,0,"2443",,terminal_output +2068,2177749,"TERMINAL",0,0,"3554",,terminal_output +2069,2179118,"TERMINAL",0,0,"4665",,terminal_output +2070,2179900,"TERMINAL",0,0,"5776",,terminal_output +2071,2180923,"TERMINAL",0,0,"6887",,terminal_output +2072,2181871,"TERMINAL",0,0,"7998",,terminal_output +2073,2183002,"TERMINAL",0,0,"840409",,terminal_output +2074,2183977,"TERMINAL",0,0,"91140",,terminal_output +2075,2185122,"TERMINAL",0,0,"10332",,terminal_output +2076,2186055,"TERMINAL",0,0,"2443",,terminal_output +2077,2187169,"TERMINAL",0,0,"3554",,terminal_output +2078,2188192,"TERMINAL",0,0,"4665",,terminal_output +2079,2189225,"TERMINAL",0,0,"5776",,terminal_output +2080,2190278,"TERMINAL",0,0,"6887",,terminal_output +2081,2191315,"TERMINAL",0,0,"7998",,terminal_output +2082,2192439,"TERMINAL",0,0,"850509",,terminal_output +2083,2193561,"TERMINAL",0,0,"91150",,terminal_output +2084,2194673,"TERMINAL",0,0,"20221",,terminal_output +2085,2195667,"TERMINAL",0,0,"1332",,terminal_output +2086,2196588,"TERMINAL",0,0,"2443",,terminal_output +2087,2197701,"TERMINAL",0,0,"3554",,terminal_output +2088,2198725,"TERMINAL",0,0,"4665",,terminal_output +2089,2199684,"TERMINAL",0,0,"5776",,terminal_output +2090,2200791,"TERMINAL",0,0,"6887",,terminal_output +2091,2201832,"TERMINAL",0,0,"7998",,terminal_output +2092,2202834,"TERMINAL",0,0,"82:002:009",,terminal_output +2093,2203999,"TERMINAL",0,0,"9111:00",,terminal_output +2094,2205023,"TERMINAL",0,0,"30221",,terminal_output +2095,2206011,"TERMINAL",0,0,"1332",,terminal_output +2096,2207139,"TERMINAL",0,0,"2554",,terminal_output +2097,2208081,"TERMINAL",0,0,"4665",,terminal_output +2098,2209190,"TERMINAL",0,0,"5776",,terminal_output +2099,2210164,"TERMINAL",0,0,"6887",,terminal_output +2100,2211231,"TERMINAL",0,0,"7998",,terminal_output +2101,2212267,"TERMINAL",0,0,"810109",,terminal_output +2102,2213383,"TERMINAL",0,0,"91110",,terminal_output +2103,2214409,"TERMINAL",0,0,"40221",,terminal_output +2104,2215430,"TERMINAL",0,0,"1332",,terminal_output +2105,2216570,"TERMINAL",0,0,"2443",,terminal_output +2106,2217587,"TERMINAL",0,0,"3554",,terminal_output +2107,2218599,"TERMINAL",0,0,"4665",,terminal_output +2108,2219636,"TERMINAL",0,0,"5776",,terminal_output +2109,2220817,"TERMINAL",0,0,"6887",,terminal_output +2110,2221811,"TERMINAL",0,0,"7998",,terminal_output +2111,2222849,"TERMINAL",0,0,"820209",,terminal_output +2112,2223828,"TERMINAL",0,0,"91120",,terminal_output +2113,2224921,"TERMINAL",0,0,"50221",,terminal_output +2114,2226022,"TERMINAL",0,0,"1332",,terminal_output +2115,2227036,"TERMINAL",0,0,"2443",,terminal_output +2116,2228026,"TERMINAL",0,0,"3554",,terminal_output +2117,2229158,"TERMINAL",0,0,"4776",,terminal_output +2118,2230099,"TERMINAL",0,0,"6887",,terminal_output +2119,2231182,"TERMINAL",0,0,"7998",,terminal_output +2120,2233630,"TERMINAL",0,0,"8313130",,terminal_output +2121,2234673,"TERMINAL",0,0,"7:00221",,terminal_output +2122,2235734,"TERMINAL",0,0,"1332",,terminal_output +2123,2236832,"TERMINAL",0,0,"2443",,terminal_output +2124,2238063,"TERMINAL",0,0,"3554",,terminal_output +2125,2238871,"TERMINAL",0,0,"4665",,terminal_output +2126,2239934,"TERMINAL",0,0,"5776",,terminal_output +2127,2241031,"TERMINAL",0,0,"6887",,terminal_output +2128,2242159,"TERMINAL",0,0,"7998",,terminal_output +2129,2243091,"TERMINAL",0,0,"8414140",,terminal_output +2130,2244085,"TERMINAL",0,0,"10221",,terminal_output +2131,2245132,"TERMINAL",0,0,"1332",,terminal_output +2132,2246177,"TERMINAL",0,0,"2443",,terminal_output +2133,2247222,"TERMINAL",0,0,"3554",,terminal_output +2134,2248265,"TERMINAL",0,0,"4665",,terminal_output +2135,2249325,"TERMINAL",0,0,"5776",,terminal_output +2136,2250460,"TERMINAL",0,0,"6887",,terminal_output +2137,2251478,"TERMINAL",0,0,"7998",,terminal_output +2138,2252603,"TERMINAL",0,0,"850509",,terminal_output +2139,2253527,"TERMINAL",0,0,"91150",,terminal_output +2140,2254652,"TERMINAL",0,0,"20221",,terminal_output +2141,2255677,"TERMINAL",0,0,"1332",,terminal_output +2142,2256702,"TERMINAL",0,0,"2443",,terminal_output +2143,2257698,"TERMINAL",0,0,"3554",,terminal_output +2144,2258753,"TERMINAL",0,0,"4665",,terminal_output +2145,2260050,"TERMINAL",0,0,"5776",,terminal_output +2146,2260904,"TERMINAL",0,0,"6887",,terminal_output +2147,2261926,"TERMINAL",0,0,"7998",,terminal_output +2148,2262932,"TERMINAL",0,0,"83:003:009",,terminal_output +2149,2264099,"TERMINAL",0,0,"9112:00",,terminal_output +2150,2265063,"TERMINAL",0,0,"30332",,terminal_output +2151,2266310,"TERMINAL",0,0,"2443",,terminal_output +2152,2267160,"TERMINAL",0,0,"3554",,terminal_output +2153,2268373,"TERMINAL",0,0,"4665",,terminal_output +2154,2269221,"TERMINAL",0,0,"5776",,terminal_output +2155,2270263,"TERMINAL",0,0,"6887",,terminal_output +2156,2271346,"TERMINAL",0,0,"7998",,terminal_output +2157,2272360,"TERMINAL",0,0,"810109",,terminal_output +2158,2273531,"TERMINAL",0,0,"91110",,terminal_output +2159,2274452,"TERMINAL",0,0,"40221",,terminal_output +2160,2275500,"TERMINAL",0,0,"1332",,terminal_output +2161,2276545,"TERMINAL",0,0,"2443",,terminal_output +2162,2277599,"TERMINAL",0,0,"3554",,terminal_output +2163,2278666,"TERMINAL",0,0,"4665",,terminal_output +2164,2279705,"TERMINAL",0,0,"5776",,terminal_output +2165,2280756,"TERMINAL",0,0,"6887",,terminal_output +2166,2281795,"TERMINAL",0,0,"7998",,terminal_output +2167,2282914,"TERMINAL",0,0,"820209",,terminal_output +2168,2283938,"TERMINAL",0,0,"91120",,terminal_output +2169,2285251,"TERMINAL",0,0,"50221",,terminal_output +2170,2286029,"TERMINAL",0,0,"1332",,terminal_output +2171,2287120,"TERMINAL",0,0,"2554",,terminal_output +2172,2288091,"TERMINAL",0,0,"4665",,terminal_output +2173,2289206,"TERMINAL",0,0,"5776",,terminal_output +2174,2290190,"TERMINAL",0,0,"6887",,terminal_output +2175,2291238,"TERMINAL",0,0,"7998",,terminal_output +2176,2292271,"TERMINAL",0,0,"830309",,terminal_output +2177,2293357,"TERMINAL",0,0,"91130",,terminal_output +2178,2294381,"TERMINAL",0,0,"8:00221",,terminal_output +2179,2295524,"TERMINAL",0,0,"1332",,terminal_output +2180,2296534,"TERMINAL",0,0,"2443",,terminal_output +2181,2297501,"TERMINAL",0,0,"3554",,terminal_output +2182,2298583,"TERMINAL",0,0,"4665",,terminal_output +2183,2299709,"TERMINAL",0,0,"5776",,terminal_output +2184,2300629,"TERMINAL",0,0,"6887",,terminal_output +2185,2301885,"TERMINAL",0,0,"7998",,terminal_output +2186,2302788,"TERMINAL",0,0,"840409",,terminal_output +2187,2304058,"TERMINAL",0,0,"92241",,terminal_output +2188,2305081,"TERMINAL",0,0,"11332",,terminal_output +2189,2306110,"TERMINAL",0,0,"2443",,terminal_output +2190,2307147,"TERMINAL",0,0,"3554",,terminal_output +2191,2308185,"TERMINAL",0,0,"4665",,terminal_output +2192,2309233,"TERMINAL",0,0,"5776",,terminal_output +2193,2310286,"TERMINAL",0,0,"6887",,terminal_output +2194,2311335,"TERMINAL",0,0,"7998",,terminal_output +2195,2312399,"TERMINAL",0,0,"850509",,terminal_output +2196,2313532,"TERMINAL",0,0,"91150",,terminal_output +2197,2314555,"TERMINAL",0,0,"20221",,terminal_output +2198,2315578,"TERMINAL",0,0,"1332",,terminal_output +2199,2316808,"TERMINAL",0,0,"2443",,terminal_output +2200,2317730,"TERMINAL",0,0,"3554",,terminal_output +2201,2318756,"TERMINAL",0,0,"4665",,terminal_output +2202,2319806,"TERMINAL",0,0,"5776",,terminal_output +2203,2320809,"TERMINAL",0,0,"6887",,terminal_output +2204,2321852,"TERMINAL",0,0,"7998",,terminal_output +2205,2323059,"TERMINAL",0,0,"84:004:009",,terminal_output +2206,2323923,"TERMINAL",0,0,"9113:00",,terminal_output +2207,2325132,"TERMINAL",0,0,"30221",,terminal_output +2208,2326131,"TERMINAL",0,0,"1443",,terminal_output +2209,2327076,"TERMINAL",0,0,"3554",,terminal_output +2210,2328119,"TERMINAL",0,0,"4665",,terminal_output +2211,2329202,"TERMINAL",0,0,"5776",,terminal_output +2212,2330232,"TERMINAL",0,0,"6887",,terminal_output +2213,2331303,"TERMINAL",0,0,"7998",,terminal_output +2214,2332318,"TERMINAL",0,0,"810109",,terminal_output +2215,2333355,"TERMINAL",0,0,"91110",,terminal_output +2216,2334415,"TERMINAL",0,0,"40221",,terminal_output +2217,2335505,"TERMINAL",0,0,"1332",,terminal_output +2218,2336545,"TERMINAL",0,0,"2443",,terminal_output +2219,2337642,"TERMINAL",0,0,"3554",,terminal_output +2220,2338592,"TERMINAL",0,0,"4665",,terminal_output +2221,2339639,"TERMINAL",0,0,"5776",,terminal_output +2222,2340683,"TERMINAL",0,0,"6887",,terminal_output +2223,2341779,"TERMINAL",0,0,"7998",,terminal_output +2224,2342922,"TERMINAL",0,0,"820209",,terminal_output +2225,2343946,"TERMINAL",0,0,"91120",,terminal_output +2226,2344882,"TERMINAL",0,0,"50221",,terminal_output +2227,2345904,"TERMINAL",0,0,"1332",,terminal_output +2228,2346938,"TERMINAL",0,0,"2443",,terminal_output +2229,2348095,"TERMINAL",0,0,"3554",,terminal_output +2230,2349079,"TERMINAL",0,0,"4776",,terminal_output +2231,2350194,"TERMINAL",0,0,"6887",,terminal_output +2232,2351130,"TERMINAL",0,0,"7998",,terminal_output +2233,2352181,"TERMINAL",0,0,"830309",,terminal_output +2234,2353223,"TERMINAL",0,0,"91130",,terminal_output +2235,2354285,"TERMINAL",0,0,"9:00221",,terminal_output +2236,2355330,"TERMINAL",0,0,"1332",,terminal_output +2237,2356451,"TERMINAL",0,0,"2443",,terminal_output +2238,2357433,"TERMINAL",0,0,"3554",,terminal_output +2239,2358499,"TERMINAL",0,0,"4665",,terminal_output +2240,2359512,"TERMINAL",0,0,"5776",,terminal_output +2241,2360770,"TERMINAL",0,0,"6887",,terminal_output +2242,2361761,"TERMINAL",0,0,"7CG 0:00CG 0:008",,terminal_output +2243,2362651,"TERMINAL",0,0,"89",,terminal_output +2244,2363708,"TERMINAL",0,0,"940",,terminal_output +2245,2364738,"TERMINAL",0,0,"101",,terminal_output +2246,2365864,"TERMINAL",0,0,"12",,terminal_output +2247,2366809,"TERMINAL",0,0,"23",,terminal_output +2248,2367905,"TERMINAL",0,0,"34",,terminal_output +2249,2368905,"TERMINAL",0,0,"45",,terminal_output +2250,2370089,"TERMINAL",0,0,"56",,terminal_output +2251,2371081,"TERMINAL",0,0,"67",,terminal_output +2252,2372077,"TERMINAL",0,0,"79",,terminal_output +2253,2373133,"TERMINAL",0,0,"950",,terminal_output +2254,2374186,"TERMINAL",0,0,"201",,terminal_output +2255,2375287,"TERMINAL",0,0,"12",,terminal_output +2256,2376212,"TERMINAL",0,0,"23",,terminal_output +2257,2377252,"TERMINAL",0,0,"34",,terminal_output +2258,2378295,"TERMINAL",0,0,"45",,terminal_output +2259,2379331,"TERMINAL",0,0,"56",,terminal_output +2260,2380384,"TERMINAL",0,0,"67",,terminal_output +2261,2381569,"TERMINAL",0,0,"78",,terminal_output +2262,2382489,"TERMINAL",0,0,"89",,terminal_output +2263,2383578,"TERMINAL",0,0,"94:00",,terminal_output +2264,2384596,"TERMINAL",0,0,"301",,terminal_output +2265,2385624,"TERMINAL",0,0,"12",,terminal_output +2266,2386687,"TERMINAL",0,0,"23",,terminal_output +2267,2387773,"TERMINAL",0,0,"34",,terminal_output +2268,2388803,"TERMINAL",0,0,"45",,terminal_output +2269,2389819,"TERMINAL",0,0,"56",,terminal_output +2270,2390845,"TERMINAL",0,0,"67",,terminal_output +2271,2391879,"TERMINAL",0,0,"78",,terminal_output +2272,2392929,"TERMINAL",0,0,"89",,terminal_output +2273,2394226,"TERMINAL",0,0,"910",,terminal_output +2274,2395073,"TERMINAL",0,0,"401",,terminal_output +2275,2396096,"TERMINAL",0,0,"13",,terminal_output +2276,2397098,"TERMINAL",0,0,"34",,terminal_output +2277,2398153,"TERMINAL",0,0,"45",,terminal_output +2278,2399197,"TERMINAL",0,0,"56",,terminal_output +2279,2400245,"TERMINAL",0,0,"67",,terminal_output +2280,2401393,"TERMINAL",0,0,"78",,terminal_output +2281,2402418,"TERMINAL",0,0,"89",,terminal_output +2282,2403452,"TERMINAL",0,0,"920",,terminal_output +2283,2404463,"TERMINAL",0,0,"501",,terminal_output +2284,2405595,"TERMINAL",0,0,"12",,terminal_output +2285,2406699,"TERMINAL",0,0,"23",,terminal_output +2286,2407592,"TERMINAL",0,0,"3PD(BeginTime)PD(BeginTime)4",,terminal_output +2287,2408764,"TERMINAL",0,0,"45",,terminal_output +2288,2409689,"TERMINAL",0,0,"56",,terminal_output +2289,2410721,"TERMINAL",0,0,"67",,terminal_output +2290,2411787,"TERMINAL",0,0,"78",,terminal_output +2291,2412818,"TERMINAL",0,0,"89",,terminal_output +2292,2413887,"TERMINAL",0,0,"930",,terminal_output +2293,2415010,"TERMINAL",0,0,"40:001",,terminal_output +2294,2416034,"TERMINAL",0,0,"12",,terminal_output +2295,2417106,"TERMINAL",0,0,"23",,terminal_output +2296,2418088,"TERMINAL",0,0,"35",,terminal_output +2297,2419104,"TERMINAL",0,0,"56",,terminal_output +2298,2420137,"TERMINAL",0,0,"67",,terminal_output +2299,2421176,"TERMINAL",0,0,"78",,terminal_output +2300,2422215,"TERMINAL",0,0,"89",,terminal_output +2301,2423256,"TERMINAL",0,0,"940",,terminal_output +2302,2424343,"TERMINAL",0,0,"101",,terminal_output +2303,2425459,"TERMINAL",0,0,"12",,terminal_output +2304,2426530,"TERMINAL",0,0,"23",,terminal_output +2305,2427454,"TERMINAL",0,0,"34",,terminal_output +2306,2428666,"TERMINAL",0,0,"45",,terminal_output +2307,2429691,"TERMINAL",0,0,"56",,terminal_output +2308,2430593,"TERMINAL",0,0,"67",,terminal_output +2309,2431702,"TERMINAL",0,0,"78",,terminal_output +2310,2432693,"TERMINAL",0,0,"89",,terminal_output +2311,2433855,"TERMINAL",0,0,"950",,terminal_output +2312,2434874,"TERMINAL",0,0,"201",,terminal_output +2313,2436007,"TERMINAL",0,0,"12",,terminal_output +2314,2436927,"TERMINAL",0,0,"23",,terminal_output +2315,2437951,"TERMINAL",0,0,"34",,terminal_output +2316,2438973,"TERMINAL",0,0,"45",,terminal_output +2317,2440131,"TERMINAL",0,0,"56",,terminal_output +2318,2441037,"TERMINAL",0,0,"68",,terminal_output +2319,2442148,"TERMINAL",0,0,"89",,terminal_output +2320,2443129,"TERMINAL",0,0,"95:00",,terminal_output +2321,2444207,"TERMINAL",0,0,"301",,terminal_output +2322,2445227,"TERMINAL",0,0,"12",,terminal_output +2323,2446280,"TERMINAL",0,0,"23",,terminal_output +2324,2447337,"TERMINAL",0,0,"34",,terminal_output +2325,2448412,"TERMINAL",0,0,"45",,terminal_output +2326,2449623,"TERMINAL",0,0,"56",,terminal_output +2327,2450645,"TERMINAL",0,0,"67",,terminal_output +2328,2451596,"TERMINAL",0,0,"78",,terminal_output +2329,2452695,"TERMINAL",0,0,"89",,terminal_output +2330,2453614,"TERMINAL",0,0,"910",,terminal_output +2331,2454849,"TERMINAL",0,0,"401",,terminal_output +2332,2455782,"TERMINAL",0,0,"12",,terminal_output +2333,2456755,"TERMINAL",0,0,"23",,terminal_output +2334,2457803,"TERMINAL",0,0,"34",,terminal_output +2335,2459042,"TERMINAL",0,0,"45",,terminal_output +2336,2460106,"TERMINAL",0,0,"56",,terminal_output +2337,2461129,"TERMINAL",0,0,"67",,terminal_output +2338,2461996,"TERMINAL",0,0,"78",,terminal_output +2339,2463171,"TERMINAL",0,0,"820",,terminal_output +2340,2464166,"TERMINAL",0,0,"501",,terminal_output +2341,2465228,"TERMINAL",0,0,"12",,terminal_output +2342,2466213,"TERMINAL",0,0,"23",,terminal_output +2343,2467234,"TERMINAL",0,0,"34",,terminal_output +2344,2468319,"TERMINAL",0,0,"45",,terminal_output +2345,2469330,"TERMINAL",0,0,"56",,terminal_output +2346,2470379,"TERMINAL",0,0,"67",,terminal_output +2347,2471485,"TERMINAL",0,0,"78",,terminal_output +2348,2472561,"TERMINAL",0,0,"89",,terminal_output +2349,2473721,"TERMINAL",0,0,"930",,terminal_output +2350,2474579,"TERMINAL",0,0,"1:001",,terminal_output +2351,2475635,"TERMINAL",0,0,"12",,terminal_output +2352,2476661,"TERMINAL",0,0,"23",,terminal_output +2353,2477911,"TERMINAL",0,0,"34",,terminal_output +2354,2478755,"TERMINAL",0,0,"45",,terminal_output +2355,2479831,"TERMINAL",0,0,"56",,terminal_output +2356,2480866,"TERMINAL",0,0,"67",,terminal_output +2357,2481980,"TERMINAL",0,0,"78",,terminal_output +2358,2483005,"TERMINAL",0,0,"89",,terminal_output +2359,2484027,"TERMINAL",0,0,"940",,terminal_output +2360,2485257,"TERMINAL",0,0,"102",,terminal_output +2361,2486314,"TERMINAL",0,0,"23",,terminal_output +2362,2487159,"TERMINAL",0,0,"34",,terminal_output +2363,2488330,"TERMINAL",0,0,"45",,terminal_output +2364,2489245,"TERMINAL",0,0,"56",,terminal_output +2365,2490331,"TERMINAL",0,0,"67",,terminal_output +2366,2491361,"TERMINAL",0,0,"78",,terminal_output +2367,2492550,"TERMINAL",0,0,"89",,terminal_output +2368,2493557,"TERMINAL",0,0,"950",,terminal_output +2369,2494580,"TERMINAL",0,0,"201",,terminal_output +2370,2495702,"TERMINAL",0,0,"12",,terminal_output +2371,2496645,"TERMINAL",0,0,"23",,terminal_output +2372,2497649,"TERMINAL",0,0,"34",,terminal_output +2373,2498707,"TERMINAL",0,0,"45",,terminal_output +2374,2499802,"TERMINAL",0,0,"56",,terminal_output +2375,2500789,"TERMINAL",0,0,"67",,terminal_output +2376,2501847,"TERMINAL",0,0,"78",,terminal_output +2377,2502870,"TERMINAL",0,0,"89",,terminal_output +2378,2504227,"TERMINAL",0,0,"96:00",,terminal_output +2379,2505123,"TERMINAL",0,0,"301",,terminal_output +2380,2506046,"TERMINAL",0,0,"12",,terminal_output +2381,2507208,"TERMINAL",0,0,"24",,terminal_output +2382,2508195,"TERMINAL",0,0,"45",,terminal_output +2383,2509227,"TERMINAL",0,0,"56",,terminal_output +2384,2510169,"TERMINAL",0,0,"67",,terminal_output +2385,2511217,"TERMINAL",0,0,"78",,terminal_output +2386,2512265,"TERMINAL",0,0,"89",,terminal_output +2387,2513304,"TERMINAL",0,0,"910",,terminal_output +2388,2514374,"TERMINAL",0,0,"401",,terminal_output +2389,2515388,"TERMINAL",0,0,"12",,terminal_output +2390,2516487,"TERMINAL",0,0,"23",,terminal_output +2391,2517641,"TERMINAL",0,0,"34",,terminal_output +2392,2518506,"TERMINAL",0,0,"45",,terminal_output +2393,2519562,"TERMINAL",0,0,"56",,terminal_output +2394,2520803,"TERMINAL",0,0,"67",,terminal_output +2395,2521813,"TERMINAL",0,0,"78",,terminal_output +2396,2522744,"TERMINAL",0,0,"89",,terminal_output +2397,2523762,"TERMINAL",0,0,"920",,terminal_output +2398,2524988,"TERMINAL",0,0,"501",,terminal_output +2399,2525912,"TERMINAL",0,0,"12",,terminal_output +2400,2526885,"TERMINAL",0,0,"23",,terminal_output +2401,2527962,"TERMINAL",0,0,"34",,terminal_output +2402,2529128,"TERMINAL",0,0,"488725",,terminal_output +2403,2530008,"TERMINAL",0,0,"56",,terminal_output +2404,2531056,"TERMINAL",0,0,"78",,terminal_output +2405,2532153,"TERMINAL",0,0,"89",,terminal_output +2406,2533610,"TERMINAL",0,0,"930",,terminal_output +2407,2534228,"TERMINAL",0,0,"2:001",,terminal_output +2408,2535239,"TERMINAL",0,0,"12",,terminal_output +2409,2536302,"TERMINAL",0,0,"23",,terminal_output +2410,2537482,"TERMINAL",0,0,"34",,terminal_output +2411,2538549,"TERMINAL",0,0,"4None)None)5",,terminal_output +2412,2539632,"TERMINAL",0,0,"56",,terminal_output +2413,2540663,"TERMINAL",0,0,"67",,terminal_output +2414,2541713,"TERMINAL",0,0,"78",,terminal_output +2415,2542705,"TERMINAL",0,0,"89",,terminal_output +2416,2543765,"TERMINAL",0,0,"940",,terminal_output +2417,2544756,"TERMINAL",0,0,"101",,terminal_output +2418,2545793,"TERMINAL",0,0,"12",,terminal_output +2419,2546907,"TERMINAL",0,0,"23",,terminal_output +2420,2547928,"TERMINAL",0,0,"34",,terminal_output +2421,2548955,"TERMINAL",0,0,"45",,terminal_output +2422,2550082,"TERMINAL",0,0,"56",,terminal_output +2423,2551025,"TERMINAL",0,0,"68",,terminal_output +2424,2552184,"TERMINAL",0,0,"89",,terminal_output +2425,2553134,"TERMINAL",0,0,"950",,terminal_output +2426,2554221,"TERMINAL",0,0,"201",,terminal_output +2427,2555302,"TERMINAL",0,0,"12",,terminal_output +2428,2556268,"TERMINAL",0,0,"23",,terminal_output +2429,2557355,"TERMINAL",0,0,"34",,terminal_output +2430,2558350,"TERMINAL",0,0,"45",,terminal_output +2431,2559400,"TERMINAL",0,0,"56",,terminal_output +2432,2560444,"TERMINAL",0,0,"67",,terminal_output +2433,2561585,"TERMINAL",0,0,"78",,terminal_output +2434,2562792,"TERMINAL",0,0,"89",,terminal_output +2435,2563641,"TERMINAL",0,0,"97:00",,terminal_output +2436,2564629,"TERMINAL",0,0,"301",,terminal_output +2437,2565749,"TERMINAL",0,0,"12",,terminal_output +2438,2566768,"TERMINAL",0,0,"23",,terminal_output +2439,2567903,"TERMINAL",0,0,"34",,terminal_output +2440,2568815,"TERMINAL",0,0,"45",,terminal_output +2441,2569857,"TERMINAL",0,0,"56",,terminal_output +2442,2570967,"TERMINAL",0,0,"67",,terminal_output +2443,2571998,"TERMINAL",0,0,"78",,terminal_output +2444,2572981,"TERMINAL",0,0,"89",,terminal_output +2445,2574073,"TERMINAL",0,0,"9Priority)Priority)11",,terminal_output +2446,2575269,"TERMINAL",0,0,"412",,terminal_output +2447,2576237,"TERMINAL",0,0,"23",,terminal_output +2448,2577164,"TERMINAL",0,0,"34",,terminal_output +2449,2578237,"TERMINAL",0,0,"45",,terminal_output +2450,2579265,"TERMINAL",0,0,"56",,terminal_output +2451,2580310,"TERMINAL",0,0,"67",,terminal_output +2452,2581357,"TERMINAL",0,0,"78",,terminal_output +2453,2582390,"TERMINAL",0,0,"89",,terminal_output +2454,2583444,"TERMINAL",0,0,"920",,terminal_output +2455,2584485,"TERMINAL",0,0,"501",,terminal_output +2456,2585534,"TERMINAL",0,0,"12",,terminal_output +2457,2586633,"TERMINAL",0,0,"23",,terminal_output +2458,2587615,"TERMINAL",0,0,"34",,terminal_output +2459,2588664,"TERMINAL",0,0,"45",,terminal_output +2460,2589810,"TERMINAL",0,0,"56",,terminal_output +2461,2590833,"TERMINAL",0,0,"67",,terminal_output +2462,2591810,"TERMINAL",0,0,"78",,terminal_output +2463,2592880,"TERMINAL",0,0,"89",,terminal_output +2464,2593941,"TERMINAL",0,0,"930",,terminal_output +2465,2595169,"TERMINAL",0,0,"3:001",,terminal_output +2466,2596161,"TERMINAL",0,0,"12",,terminal_output +2467,2597074,"TERMINAL",0,0,"24",,terminal_output +2468,2598142,"TERMINAL",0,0,"45",,terminal_output +2469,2599242,"TERMINAL",0,0,"56",,terminal_output +2470,2600375,"TERMINAL",0,0,"67",,terminal_output +2471,2601230,"TERMINAL",0,0,"78",,terminal_output +2472,2602265,"TERMINAL",0,0,"89",,terminal_output +2473,2603310,"TERMINAL",0,0,"940",,terminal_output +2474,2604358,"TERMINAL",0,0,"101",,terminal_output +2475,2605476,"TERMINAL",0,0,"12",,terminal_output +2476,2606450,"TERMINAL",0,0,"23",,terminal_output +2477,2607526,"TERMINAL",0,0,"34",,terminal_output +2478,2608553,"TERMINAL",0,0,"45",,terminal_output +2479,2609590,"TERMINAL",0,0,"56",,terminal_output +2480,2610637,"TERMINAL",0,0,"67",,terminal_output +2481,2611944,"TERMINAL",0,0,"78",,terminal_output +2482,2612718,"TERMINAL",0,0,"89",,terminal_output +2483,2613872,"TERMINAL",0,0,"950",,terminal_output +2484,2614898,"TERMINAL",0,0,"201",,terminal_output +2485,2615921,"TERMINAL",0,0,"12",,terminal_output +2486,2616948,"TERMINAL",0,0,"23",,terminal_output +2487,2618071,"TERMINAL",0,0,"34",,terminal_output +2488,2619106,"TERMINAL",0,0,"45",,terminal_output +2489,2620280,"TERMINAL",0,0,"57",,terminal_output +2490,2621086,"TERMINAL",0,0,"78",,terminal_output +2491,2622175,"TERMINAL",0,0,"89",,terminal_output +2492,2623298,"TERMINAL",0,0,"98:00",,terminal_output +2493,2624249,"TERMINAL",0,0,"301",,terminal_output +2494,2625267,"TERMINAL",0,0,"12",,terminal_output +2495,2626318,"TERMINAL",0,0,"23",,terminal_output +2496,2627538,"TERMINAL",0,0,"34",,terminal_output +2497,2628403,"TERMINAL",0,0,"45",,terminal_output +2498,2629645,"TERMINAL",0,0,"56",,terminal_output +2499,2630574,"TERMINAL",0,0,"67",,terminal_output +2500,2631623,"TERMINAL",0,0,"78",,terminal_output +2501,2632732,"TERMINAL",0,0,"89",,terminal_output +2502,2633662,"TERMINAL",0,0,"910",,terminal_output +2503,2634873,"TERMINAL",0,0,"401",,terminal_output +2504,2635785,"TERMINAL",0,0,"12",,terminal_output +2505,2636799,"TERMINAL",0,0,"23",,terminal_output +2506,2637875,"TERMINAL",0,0,"34",,terminal_output +2507,2638906,"TERMINAL",0,0,"45",,terminal_output +2508,2639985,"TERMINAL",0,0,"56",,terminal_output +2509,2641014,"TERMINAL",0,0,"67",,terminal_output +2510,2642180,"TERMINAL",0,0,"79",,terminal_output +2511,2643253,"TERMINAL",0,0,"920",,terminal_output +2512,2644223,"TERMINAL",0,0,"501",,terminal_output +2513,2645209,"TERMINAL",0,0,"12",,terminal_output +2514,2646219,"TERMINAL",0,0,"23",,terminal_output +2515,2647243,"TERMINAL",0,0,"34",,terminal_output +2516,2648591,"TERMINAL",0,0,"45",,terminal_output +2517,2649342,"TERMINAL",0,0,"56",,terminal_output +2518,2650438,"TERMINAL",0,0,"67",,terminal_output +2519,2651435,"TERMINAL",0,0,"78",,terminal_output +2520,2652581,"TERMINAL",0,0,"89",,terminal_output +2521,2653603,"TERMINAL",0,0,"930",,terminal_output +2522,2654630,"TERMINAL",0,0,"4:001",,terminal_output +2523,2655654,"TERMINAL",0,0,"12",,terminal_output +2524,2656676,"TERMINAL",0,0,"23",,terminal_output +2525,2657906,"TERMINAL",0,0,"34",,terminal_output +2526,2658957,"TERMINAL",0,0,"45",,terminal_output +2527,2659959,"TERMINAL",0,0,"56",,terminal_output +2528,2660924,"TERMINAL",0,0,"67",,terminal_output +2529,2661881,"TERMINAL",0,0,"78",,terminal_output +2530,2662924,"TERMINAL",0,0,"89",,terminal_output +2531,2664002,"TERMINAL",0,0,"940",,terminal_output +2532,2665033,"TERMINAL",0,0,"102",,terminal_output +2533,2666096,"TERMINAL",0,0,"23",,terminal_output +2534,2667125,"TERMINAL",0,0,"34",,terminal_output +2535,2668170,"TERMINAL",0,0,"45",,terminal_output +2536,2669222,"TERMINAL",0,0,"56",,terminal_output +2537,2670271,"TERMINAL",0,0,"67",,terminal_output +2538,2671318,"TERMINAL",0,0,"78",,terminal_output +2539,2672354,"TERMINAL",0,0,"89",,terminal_output +2540,2673408,"TERMINAL",0,0,"950",,terminal_output +2541,2674467,"TERMINAL",0,0,"201",,terminal_output +2542,2675621,"TERMINAL",0,0,"12",,terminal_output +2543,2676549,"TERMINAL",0,0,"23",,terminal_output +2544,2677788,"TERMINAL",0,0,"34",,terminal_output +2545,2678650,"TERMINAL",0,0,"45",,terminal_output +2546,2679719,"TERMINAL",0,0,"56",,terminal_output +2547,2680844,"TERMINAL",0,0,"67",,terminal_output +2548,2682004,"TERMINAL",0,0,"78",,terminal_output +2549,2682922,"TERMINAL",0,0,"89",,terminal_output +2550,2683947,"TERMINAL",0,0,"99:00",,terminal_output +2551,2684965,"TERMINAL",0,0,"301",,terminal_output +2552,2686074,"TERMINAL",0,0,"12",,terminal_output +2553,2686992,"TERMINAL",0,0,"23",,terminal_output +2554,2688223,"TERMINAL",0,0,"34",,terminal_output +2555,2689151,"TERMINAL",0,0,"56",,terminal_output +2556,2690205,"TERMINAL",0,0,"67",,terminal_output +2557,2691197,"TERMINAL",0,0,"78",,terminal_output +2558,2692212,"TERMINAL",0,0,"89",,terminal_output +2559,2693286,"TERMINAL",0,0,"910",,terminal_output +2560,2694273,"TERMINAL",0,0,"401",,terminal_output +2561,2695335,"TERMINAL",0,0,"12",,terminal_output +2562,2696427,"TERMINAL",0,0,"23",,terminal_output +2563,2697442,"TERMINAL",0,0,"34",,terminal_output +2564,2698566,"TERMINAL",0,0,"45",,terminal_output +2565,2699609,"TERMINAL",0,0,"56",,terminal_output +2566,2700610,"TERMINAL",0,0,"67",,terminal_output +2567,2701632,"TERMINAL",0,0,"78",,terminal_output +2568,2702877,"TERMINAL",0,0,"89",,terminal_output +2569,2703784,"TERMINAL",0,0,"920",,terminal_output +2570,2704814,"TERMINAL",0,0,"501",,terminal_output +2571,2705839,"TERMINAL",0,0,"12",,terminal_output +2572,2706846,"TERMINAL",0,0,"23",,terminal_output +2573,2707878,"TERMINAL",0,0,"34",,terminal_output +2574,2708927,"TERMINAL",0,0,"45",,terminal_output +2575,2709974,"TERMINAL",0,0,"56",,terminal_output +2576,2711050,"TERMINAL",0,0,"68",,terminal_output +2577,2712118,"TERMINAL",0,0,"89",,terminal_output +2578,2713115,"TERMINAL",0,0,"930",,terminal_output +2579,2714233,"TERMINAL",0,0,"5:001",,terminal_output +2580,2715215,"TERMINAL",0,0,"12",,terminal_output +2581,2716281,"TERMINAL",0,0,"23",,terminal_output +2582,2717298,"TERMINAL",0,0,"34",,terminal_output +2583,2718406,"TERMINAL",0,0,"45",,terminal_output +2584,2719380,"TERMINAL",0,0,"56",,terminal_output +2585,2720418,"TERMINAL",0,0,"67",,terminal_output +2586,2721486,"TERMINAL",0,0,"78",,terminal_output +2587,2722522,"TERMINAL",0,0,"89",,terminal_output +2588,2723554,"TERMINAL",0,0,"940",,terminal_output +2589,2724606,"TERMINAL",0,0,"101",,terminal_output +2590,2725737,"TERMINAL",0,0,"12",,terminal_output +2591,2726742,"TERMINAL",0,0,"23",,terminal_output +2592,2727753,"TERMINAL",0,0,"34",,terminal_output +2593,2728973,"TERMINAL",0,0,"45",,terminal_output +2594,2729897,"TERMINAL",0,0,"56",,terminal_output +2595,2730921,"TERMINAL",0,0,"67",,terminal_output +2596,2731916,"TERMINAL",0,0,"78",,terminal_output +2597,2732963,"TERMINAL",0,0,"89",,terminal_output +2598,2734096,"TERMINAL",0,0,"950",,terminal_output +2599,2735119,"TERMINAL",0,0,"202",,terminal_output +2600,2736191,"TERMINAL",0,0,"23",,terminal_output +2601,2737166,"TERMINAL",0,0,"34",,terminal_output +2602,2738260,"TERMINAL",0,0,"45",,terminal_output +2603,2739218,"TERMINAL",0,0,"56",,terminal_output +2604,2740270,"TERMINAL",0,0,"67",,terminal_output +2605,2741304,"TERMINAL",0,0,"78",,terminal_output +2606,2742338,"TERMINAL",0,0,"89",,terminal_output +2607,2743381,"TERMINAL",0,0,"91:00:00",,terminal_output +2608,2744469,"TERMINAL",0,0,"301",,terminal_output +2609,2745562,"TERMINAL",0,0,"12",,terminal_output +2610,2746499,"TERMINAL",0,0,"23",,terminal_output +2611,2747542,"TERMINAL",0,0,"34",,terminal_output +2612,2748943,"TERMINAL",0,0,"45",,terminal_output +2613,2749759,"TERMINAL",0,0,"56",,terminal_output +2614,2750753,"TERMINAL",0,0,"67",,terminal_output +2615,2751714,"TERMINAL",0,0,"78",,terminal_output +2616,2752756,"TERMINAL",0,0,"89",,terminal_output +2617,2753785,"TERMINAL",0,0,"910",,terminal_output +2618,2754930,"TERMINAL",0,0,"401",,terminal_output +2619,2755873,"TERMINAL",0,0,"12",,terminal_output +2620,2757031,"TERMINAL",0,0,"23",,terminal_output +2621,2758091,"TERMINAL",0,0,"34",,terminal_output +2622,2759085,"TERMINAL",0,0,"45",,terminal_output +2623,2760100,"TERMINAL",0,0,"57",,terminal_output +2624,2761125,"TERMINAL",0,0,"78",,terminal_output +2625,2762153,"TERMINAL",0,0,"89",,terminal_output +2626,2763384,"TERMINAL",0,0,"920",,terminal_output +2627,2764301,"TERMINAL",0,0,"501",,terminal_output +2628,2765335,"TERMINAL",0,0,"12",,terminal_output +2629,2766327,"TERMINAL",0,0,"23",,terminal_output +2630,2767408,"TERMINAL",0,0,"34",,terminal_output +2631,2768408,"TERMINAL",0,0,"45",,terminal_output +2632,2769451,"TERMINAL",0,0,"56",,terminal_output +2633,2770754,"TERMINAL",0,0,"67",,terminal_output +2634,2771553,"TERMINAL",0,0,"78",,terminal_output +2635,2772613,"TERMINAL",0,0,"89",,terminal_output +2636,2773721,"TERMINAL",0,0,"930",,terminal_output +2637,2774685,"TERMINAL",0,0,"6:001",,terminal_output +2638,2775731,"TERMINAL",0,0,"12",,terminal_output +2639,2776898,"TERMINAL",0,0,"23",,terminal_output +2640,2777922,"TERMINAL",0,0,"34",,terminal_output +2641,2778952,"TERMINAL",0,0,"45",,terminal_output +2642,2779968,"TERMINAL",0,0,"56",,terminal_output +2643,2780997,"TERMINAL",0,0,"67",,terminal_output +2644,2782235,"TERMINAL",0,0,"79",,terminal_output +2645,2783292,"TERMINAL",0,0,"940",,terminal_output +2646,2784239,"TERMINAL",0,0,"101",,terminal_output +2647,2785192,"TERMINAL",0,0,"12",,terminal_output +2648,2786357,"TERMINAL",0,0,"23",,terminal_output +2649,2787245,"TERMINAL",0,0,"34",,terminal_output +2650,2788359,"TERMINAL",0,0,"45",,terminal_output +2651,2789344,"TERMINAL",0,0,"56",,terminal_output +2652,2790410,"TERMINAL",0,0,"67",,terminal_output +2653,2791436,"TERMINAL",0,0,"78",,terminal_output +2654,2792595,"TERMINAL",0,0,"89",,terminal_output +2655,2793592,"TERMINAL",0,0,"950",,terminal_output +2656,2794617,"TERMINAL",0,0,"201",,terminal_output +2657,2795744,"TERMINAL",0,0,"12",,terminal_output +2658,2796764,"TERMINAL",0,0,"23",,terminal_output +2659,2797689,"TERMINAL",0,0,"34",,terminal_output +2660,2798813,"TERMINAL",0,0,"45",,terminal_output +2661,2799870,"TERMINAL",0,0,"56",,terminal_output +2662,2800959,"TERMINAL",0,0,"67",,terminal_output +2663,2801915,"TERMINAL",0,0,"78",,terminal_output +2664,2803146,"TERMINAL",0,0,"89",,terminal_output +2665,2804139,"TERMINAL",0,0,"91:00",,terminal_output +2666,2805159,"TERMINAL",0,0,"301",,terminal_output +2667,2806055,"TERMINAL",0,0,"23",,terminal_output +2668,2807145,"TERMINAL",0,0,"34",,terminal_output +2669,2808378,"TERMINAL",0,0,"45",,terminal_output +2670,2809273,"TERMINAL",0,0,"56",,terminal_output +2671,2810385,"TERMINAL",0,0,"67",,terminal_output +2672,2811407,"TERMINAL",0,0,"78",,terminal_output +2673,2812378,"TERMINAL",0,0,"89",,terminal_output +2674,2813405,"TERMINAL",0,0,"910",,terminal_output +2675,2814476,"TERMINAL",0,0,"401",,terminal_output +2676,2815500,"TERMINAL",0,0,"12",,terminal_output +2677,2816563,"TERMINAL",0,0,"23",,terminal_output +2678,2817649,"TERMINAL",0,0,"34",,terminal_output +2679,2818639,"TERMINAL",0,0,"45",,terminal_output +2680,2819702,"TERMINAL",0,0,"56",,terminal_output +2681,2820725,"TERMINAL",0,0,"67",,terminal_output +2682,2821979,"TERMINAL",0,0,"78",,terminal_output +2683,2822873,"TERMINAL",0,0,"89",,terminal_output +2684,2823926,"TERMINAL",0,0,"920",,terminal_output +2685,2825024,"TERMINAL",0,0,"501",,terminal_output +2686,2826151,"TERMINAL",0,0,"12",,terminal_output +2687,2827175,"TERMINAL",0,0,"23",,terminal_output +2688,2828202,"TERMINAL",0,0,"35",,terminal_output +2689,2829089,"TERMINAL",0,0,"56",,terminal_output +2690,2830383,"TERMINAL",0,0,"67",,terminal_output +2691,2831187,"TERMINAL",0,0,"78",,terminal_output +2692,2832265,"TERMINAL",0,0,"89",,terminal_output +2693,2833323,"TERMINAL",0,0,"930",,terminal_output +2694,2834327,"TERMINAL",0,0,"7:001",,terminal_output +2695,2835370,"TERMINAL",0,0,"12",,terminal_output +2696,2836421,"TERMINAL",0,0,"23",,terminal_output +2697,2837522,"TERMINAL",0,0,"34",,terminal_output +2698,2838516,"TERMINAL",0,0,"45",,terminal_output +2699,2839700,"TERMINAL",0,0,"56",,terminal_output +2700,2840693,"TERMINAL",0,0,"67",,terminal_output +2701,2841721,"TERMINAL",0,0,"78",,terminal_output +2702,2842703,"TERMINAL",0,0,"89",,terminal_output +2703,2843869,"TERMINAL",0,0,"940",,terminal_output +2704,2844797,"TERMINAL",0,0,"101",,terminal_output +2705,2846018,"TERMINAL",0,0,"12",,terminal_output +2706,2846922,"TERMINAL",0,0,"23",,terminal_output +2707,2847999,"TERMINAL",0,0,"34",,terminal_output +2708,2848985,"TERMINAL",0,0,"45",,terminal_output +2709,2850121,"TERMINAL",0,0,"57",,terminal_output +2710,2851139,"TERMINAL",0,0,"78",,terminal_output +2711,2852161,"TERMINAL",0,0,"89",,terminal_output +2712,2853181,"TERMINAL",0,0,"950",,terminal_output +2713,2854220,"TERMINAL",0,0,"201",,terminal_output +2714,2855282,"TERMINAL",0,0,"12",,terminal_output +2715,2856463,"TERMINAL",0,0,"23",,terminal_output +2716,2857338,"TERMINAL",0,0,"34",,terminal_output +2717,2858384,"TERMINAL",0,0,"45",,terminal_output +2718,2859432,"TERMINAL",0,0,"56",,terminal_output +2719,2860474,"TERMINAL",0,0,"67",,terminal_output +2720,2861519,"TERMINAL",0,0,"78",,terminal_output +2721,2862609,"TERMINAL",0,0,"89",,terminal_output +2722,2863606,"TERMINAL",0,0,"92:00",,terminal_output +2723,2864655,"TERMINAL",0,0,"301",,terminal_output +2724,2866096,"TERMINAL",0,0,"12",,terminal_output +2725,2866910,"TERMINAL",0,0,"23",,terminal_output +2726,2867834,"TERMINAL",0,0,"34",,terminal_output +2727,2868852,"TERMINAL",0,0,"45",,terminal_output +2728,2869876,"TERMINAL",0,0,"56",,terminal_output +2729,2871108,"TERMINAL",0,0,"67",,terminal_output +2730,2871936,"TERMINAL",0,0,"78",,terminal_output +2731,2873192,"TERMINAL",0,0,"89",,terminal_output +2732,2874306,"TERMINAL",0,0,"911",,terminal_output +2733,2875214,"TERMINAL",0,0,"412",,terminal_output +2734,2876122,"TERMINAL",0,0,"23",,terminal_output +2735,2877152,"TERMINAL",0,0,"34",,terminal_output +2736,2878196,"TERMINAL",0,0,"45",,terminal_output +2737,2879236,"TERMINAL",0,0,"56",,terminal_output +2738,2880305,"TERMINAL",0,0,"67",,terminal_output +2739,2881311,"TERMINAL",0,0,"78",,terminal_output +2740,2882475,"TERMINAL",0,0,"89",,terminal_output +2741,2883404,"TERMINAL",0,0,"920",,terminal_output +2742,2884444,"TERMINAL",0,0,"501",,terminal_output +2743,2885485,"TERMINAL",0,0,"12",,terminal_output +2744,2886534,"TERMINAL",0,0,"23",,terminal_output +2745,2887593,"TERMINAL",0,0,"34",,terminal_output +2746,2888628,"TERMINAL",0,0,"45",,terminal_output +2747,2889741,"TERMINAL",0,0,"56",,terminal_output +2748,2890763,"TERMINAL",0,0,"67",,terminal_output +2749,2891827,"TERMINAL",0,0,"78",,terminal_output +2750,2892823,"TERMINAL",0,0,"89",,terminal_output +2751,2893945,"TERMINAL",0,0,"930",,terminal_output +2752,2894963,"TERMINAL",0,0,"8:001",,terminal_output +2753,2896093,"TERMINAL",0,0,"12",,terminal_output +2754,2897215,"TERMINAL",0,0,"23",,terminal_output +2755,2898243,"TERMINAL",0,0,"35",,terminal_output +2756,2899164,"TERMINAL",0,0,"56",,terminal_output +2757,2900185,"TERMINAL",0,0,"67",,terminal_output +2758,2901216,"TERMINAL",0,0,"78",,terminal_output +2759,2902244,"TERMINAL",0,0,"89",,terminal_output +2760,2903502,"TERMINAL",0,0,"940",,terminal_output +2761,2904360,"TERMINAL",0,0,"101",,terminal_output +2762,2905406,"TERMINAL",0,0,"12",,terminal_output +2763,2906442,"TERMINAL",0,0,"23",,terminal_output +2764,2907488,"TERMINAL",0,0,"34",,terminal_output +2765,2908530,"TERMINAL",0,0,"45",,terminal_output +2766,2909708,"TERMINAL",0,0,"56",,terminal_output +2767,2910750,"TERMINAL",0,0,"67",,terminal_output +2768,2911961,"TERMINAL",0,0,"78",,terminal_output +2769,2912881,"TERMINAL",0,0,"89",,terminal_output +2770,2913803,"TERMINAL",0,0,"950",,terminal_output +2771,2914847,"TERMINAL",0,0,"201",,terminal_output +2772,2915860,"TERMINAL",0,0,"12",,terminal_output +2773,2916891,"TERMINAL",0,0,"23",,terminal_output +2774,2917930,"TERMINAL",0,0,"34",,terminal_output +2775,2918986,"TERMINAL",0,0,"45",,terminal_output +2776,2920154,"TERMINAL",0,0,"57",,terminal_output +2777,2921083,"TERMINAL",0,0,"78",,terminal_output +2778,2922124,"TERMINAL",0,0,"89",,terminal_output +2779,2923158,"TERMINAL",0,0,"93:00",,terminal_output +2780,2924207,"TERMINAL",0,0,"301",,terminal_output +2781,2925307,"TERMINAL",0,0,"12",,terminal_output +2782,2926393,"TERMINAL",0,0,"23",,terminal_output +2783,2927365,"TERMINAL",0,0,"34",,terminal_output +2784,2928394,"TERMINAL",0,0,"45",,terminal_output +2785,2929430,"TERMINAL",0,0,"56",,terminal_output +2786,2930483,"TERMINAL",0,0,"67",,terminal_output +2787,2931553,"TERMINAL",0,0,"78",,terminal_output +2788,2932598,"TERMINAL",0,0,"89",,terminal_output +2789,2933799,"TERMINAL",0,0,"910",,terminal_output +2790,2934665,"TERMINAL",0,0,"401",,terminal_output +2791,2935822,"TERMINAL",0,0,"12",,terminal_output +2792,2936757,"TERMINAL",0,0,"23",,terminal_output +2793,2937807,"TERMINAL",0,0,"34",,terminal_output +2794,2938884,"TERMINAL",0,0,"45",,terminal_output +2795,2940020,"TERMINAL",0,0,"56",,terminal_output +2796,2940933,"TERMINAL",0,0,"67",,terminal_output +2797,2942104,"TERMINAL",0,0,"78",,terminal_output +2798,2943031,"TERMINAL",0,0,"820",,terminal_output +2799,2944218,"TERMINAL",0,0,"501",,terminal_output +2800,2945287,"TERMINAL",0,0,"12",,terminal_output +2801,2946266,"TERMINAL",0,0,"23",,terminal_output +2802,2947215,"TERMINAL",0,0,"34",,terminal_output +2803,2948450,"TERMINAL",0,0,"45",,terminal_output +2804,2949387,"TERMINAL",0,0,"56",,terminal_output +2805,2950473,"TERMINAL",0,0,"67",,terminal_output +2806,2951401,"TERMINAL",0,0,"78",,terminal_output +2807,2952558,"TERMINAL",0,0,"89",,terminal_output +2808,2953501,"TERMINAL",0,0,"930",,terminal_output +2809,2954546,"TERMINAL",0,0,"9:001",,terminal_output +2810,2955638,"TERMINAL",0,0,"12",,terminal_output +2811,2956713,"TERMINAL",0,0,"23",,terminal_output +2812,2957738,"TERMINAL",0,0,"34",,terminal_output +2813,2958791,"TERMINAL",0,0,"45",,terminal_output +2814,2959752,"TERMINAL",0,0,"56",,terminal_output +2815,2960817,"TERMINAL",0,0,"67",,terminal_output +2816,2961886,"TERMINAL",0,0,"78",,terminal_output +2817,2962893,"TERMINAL",0,0,"89",,terminal_output +2818,2963975,"TERMINAL",0,0,"940",,terminal_output +2819,2964986,"TERMINAL",0,0,"101",,terminal_output +2820,2966036,"TERMINAL",0,0,"13",,terminal_output +2821,2967102,"TERMINAL",0,0,"34",,terminal_output +2822,2968125,"TERMINAL",0,0,"45",,terminal_output +2823,2969211,"TERMINAL",0,0,"56",,terminal_output +2824,2970335,"TERMINAL",0,0,"67",,terminal_output +2825,2971334,"TERMINAL",0,0,"78",,terminal_output +2826,2972368,"TERMINAL",0,0,"89",,terminal_output +2827,2973410,"TERMINAL",0,0,"950",,terminal_output +2828,2974415,"TERMINAL",0,0,"201",,terminal_output +2829,2975474,"TERMINAL",0,0,"12",,terminal_output +2830,2976522,"TERMINAL",0,0,"23",,terminal_output +2831,2977606,"TERMINAL",0,0,"34",,terminal_output +2832,2978605,"TERMINAL",0,0,"45",,terminal_output +2833,2979891,"TERMINAL",0,0,"56",,terminal_output +2834,2980776,"TERMINAL",0,0,"67",,terminal_output +2835,2981789,"TERMINAL",0,0,"78",,terminal_output +2836,2982823,"TERMINAL",0,0,"89",,terminal_output +2837,2983856,"TERMINAL",0,0,"94:00",,terminal_output +2838,2984924,"TERMINAL",0,0,"301",,terminal_output +2839,2986121,"TERMINAL",0,0,"12",,terminal_output +2840,2987026,"TERMINAL",0,0,"23",,terminal_output +2841,2988081,"TERMINAL",0,0,"35",,terminal_output +2842,2989209,"TERMINAL",0,0,"56",,terminal_output +2843,2990507,"TERMINAL",0,0,"67",,terminal_output +2844,2991182,"TERMINAL",0,0,"78",,terminal_output +2845,2992249,"TERMINAL",0,0,"89",,terminal_output +2846,2993277,"TERMINAL",0,0,"910",,terminal_output +2847,2994361,"TERMINAL",0,0,"401",,terminal_output +2848,2995374,"TERMINAL",0,0,"12",,terminal_output +2849,2996459,"TERMINAL",0,0,"23",,terminal_output +2850,2997470,"TERMINAL",0,0,"34",,terminal_output +2851,2998521,"TERMINAL",0,0,"45",,terminal_output +2852,2999566,"TERMINAL",0,0,"56",,terminal_output +2853,3000745,"TERMINAL",0,0,"67",,terminal_output +2854,3002180,"TERMINAL",0,0,"78",,terminal_output +2855,3002797,"TERMINAL",0,0,"89",,terminal_output +2856,3003816,"TERMINAL",0,0,"920",,terminal_output +2857,3004840,"TERMINAL",0,0,"501",,terminal_output +2858,3005866,"TERMINAL",0,0,"12",,terminal_output +2859,3006994,"TERMINAL",0,0,"23",,terminal_output +2860,3008116,"TERMINAL",0,0,"34",,terminal_output +2861,3009144,"TERMINAL",0,0,"45",,terminal_output +2862,3010165,"TERMINAL",0,0,"56",,terminal_output +2863,3011085,"TERMINAL",0,0,"68",,terminal_output +2864,3012152,"TERMINAL",0,0,"89",,terminal_output +2865,3013152,"TERMINAL",0,0,"930",,terminal_output +2866,3014241,"TERMINAL",0,0,"50:001",,terminal_output +2867,3015419,"TERMINAL",0,0,"12",,terminal_output +2868,3016358,"TERMINAL",0,0,"23",,terminal_output +2869,3017434,"TERMINAL",0,0,"34",,terminal_output +2870,3018461,"TERMINAL",0,0,"45",,terminal_output +2871,3019427,"TERMINAL",0,0,"56",,terminal_output +2872,3020509,"TERMINAL",0,0,"67",,terminal_output +2873,3021568,"TERMINAL",0,0,"78",,terminal_output +2874,3022662,"TERMINAL",0,0,"89",,terminal_output +2875,3023682,"TERMINAL",0,0,"940",,terminal_output +2876,3024650,"TERMINAL",0,0,"101",,terminal_output +2877,3025695,"TERMINAL",0,0,"12",,terminal_output +2878,3026958,"TERMINAL",0,0,"23",,terminal_output +2879,3027793,"TERMINAL",0,0,"34",,terminal_output +2880,3029006,"TERMINAL",0,0,"45",,terminal_output +2881,3030031,"TERMINAL",0,0,"56",,terminal_output +2882,3031064,"TERMINAL",0,0,"67",,terminal_output +2883,3032181,"TERMINAL",0,0,"78",,terminal_output +2884,3033041,"TERMINAL",0,0,"850",,terminal_output +2885,3034231,"TERMINAL",0,0,"201",,terminal_output +2886,3035114,"TERMINAL",0,0,"12",,terminal_output +2887,3036176,"TERMINAL",0,0,"23",,terminal_output +2888,3037401,"TERMINAL",0,0,"34",,terminal_output +2889,3038428,"TERMINAL",0,0,"45",,terminal_output +2890,3039327,"TERMINAL",0,0,"56",,terminal_output +2891,3040531,"TERMINAL",0,0,"67",,terminal_output +2892,3041397,"TERMINAL",0,0,"78",,terminal_output +2893,3042451,"TERMINAL",0,0,"89",,terminal_output +2894,3043551,"TERMINAL",0,0,"95:00",,terminal_output +2895,3044537,"TERMINAL",0,0,"301",,terminal_output +2896,3045698,"TERMINAL",0,0,"12",,terminal_output +2897,3046633,"TERMINAL",0,0,"23",,terminal_output +2898,3047678,"TERMINAL",0,0,"34",,terminal_output +2899,3048800,"TERMINAL",0,0,"45",,terminal_output +2900,3049795,"TERMINAL",0,0,"56",,terminal_output +2901,3050900,"TERMINAL",0,0,"67",,terminal_output +2902,3051935,"TERMINAL",0,0,"78",,terminal_output +2903,3052935,"TERMINAL",0,0,"89",,terminal_output +2904,3053990,"TERMINAL",0,0,"910",,terminal_output +2905,3055222,"TERMINAL",0,0,"402",,terminal_output +2906,3056143,"TERMINAL",0,0,"23",,terminal_output +2907,3057118,"TERMINAL",0,0,"34",,terminal_output +2908,3058190,"TERMINAL",0,0,"45",,terminal_output +2909,3059221,"TERMINAL",0,0,"56",,terminal_output +2910,3060547,"TERMINAL",0,0,"67",,terminal_output +2911,3061472,"TERMINAL",0,0,"78",,terminal_output +2912,3062491,"TERMINAL",0,0,"89",,terminal_output +2913,3063415,"TERMINAL",0,0,"920",,terminal_output +2914,3064441,"TERMINAL",0,0,"501",,terminal_output +2915,3065468,"TERMINAL",0,0,"12",,terminal_output +2916,3066516,"TERMINAL",0,0,"23",,terminal_output +2917,3067566,"TERMINAL",0,0,"34",,terminal_output +2918,3068619,"TERMINAL",0,0,"45",,terminal_output +2919,3069653,"TERMINAL",0,0,"56",,terminal_output +2920,3070889,"TERMINAL",0,0,"67",,terminal_output +2921,3071811,"TERMINAL",0,0,"78",,terminal_output +2922,3072863,"TERMINAL",0,0,"89",,terminal_output +2923,3073853,"TERMINAL",0,0,"930",,terminal_output +2924,3074913,"TERMINAL",0,0,"1:001",,terminal_output +2925,3076008,"TERMINAL",0,0,"12",,terminal_output +2926,3077074,"TERMINAL",0,0,"23",,terminal_output +2927,3078057,"TERMINAL",0,0,"35",,terminal_output +2928,3079078,"TERMINAL",0,0,"56",,terminal_output +2929,3080115,"TERMINAL",0,0,"67",,terminal_output +2930,3081235,"TERMINAL",0,0,"78",,terminal_output +2931,3082255,"TERMINAL",0,0,"89",,terminal_output +2932,3083387,"TERMINAL",0,0,"940",,terminal_output +2933,3084366,"TERMINAL",0,0,"101",,terminal_output +2934,3085429,"TERMINAL",0,0,"12",,terminal_output +2935,3086457,"TERMINAL",0,0,"23",,terminal_output +2936,3087449,"TERMINAL",0,0,"34",,terminal_output +2937,3088493,"TERMINAL",0,0,"45",,terminal_output +2938,3089541,"TERMINAL",0,0,"56",,terminal_output +2939,3090650,"TERMINAL",0,0,"67",,terminal_output +2940,3091638,"TERMINAL",0,0,"78",,terminal_output +2941,3092671,"TERMINAL",0,0,"89",,terminal_output +2942,3093723,"TERMINAL",0,0,"950",,terminal_output +2943,3094849,"TERMINAL",0,0,"201",,terminal_output +2944,3095873,"TERMINAL",0,0,"12",,terminal_output +2945,3097001,"TERMINAL",0,0,"23",,terminal_output +2946,3098026,"TERMINAL",0,0,"34",,terminal_output +2947,3098995,"TERMINAL",0,0,"45",,terminal_output +2948,3099988,"TERMINAL",0,0,"56",,terminal_output +2949,3101041,"TERMINAL",0,0,"68",,terminal_output +2950,3102166,"TERMINAL",0,0,"89",,terminal_output +2951,3103249,"TERMINAL",0,0,"96:00",,terminal_output +2952,3104246,"TERMINAL",0,0,"301",,terminal_output +2953,3105201,"TERMINAL",0,0,"12",,terminal_output +2954,3106320,"TERMINAL",0,0,"23",,terminal_output +2955,3107371,"TERMINAL",0,0,"34",,terminal_output +2956,3108369,"TERMINAL",0,0,"45",,terminal_output +2957,3109413,"TERMINAL",0,0,"56",,terminal_output +2958,3110430,"TERMINAL",0,0,"67",,terminal_output +2959,3111486,"TERMINAL",0,0,"78",,terminal_output +2960,3112541,"TERMINAL",0,0,"89",,terminal_output +2961,3113589,"TERMINAL",0,0,"910",,terminal_output +2962,3114715,"TERMINAL",0,0,"401",,terminal_output +2963,3115683,"TERMINAL",0,0,"12",,terminal_output +2964,3116734,"TERMINAL",0,0,"23",,terminal_output +2965,3117775,"TERMINAL",0,0,"34",,terminal_output +2966,3118819,"TERMINAL",0,0,"45",,terminal_output +2967,3119937,"TERMINAL",0,0,"56",,terminal_output +2968,3121065,"TERMINAL",0,0,"67",,terminal_output +2969,3122088,"TERMINAL",0,0,"78",,terminal_output +2970,3123221,"TERMINAL",0,0,"89",,terminal_output +2971,3124086,"TERMINAL",0,0,"921",,terminal_output +2972,3125260,"TERMINAL",0,0,"512",,terminal_output +2973,3126184,"TERMINAL",0,0,"23",,terminal_output +2974,3127184,"TERMINAL",0,0,"34",,terminal_output +2975,3128470,"TERMINAL",0,0,"45",,terminal_output +2976,3129282,"TERMINAL",0,0,"56",,terminal_output +2977,3130516,"TERMINAL",0,0,"67",,terminal_output +2978,3131513,"TERMINAL",0,0,"78",,terminal_output +2979,3132435,"TERMINAL",0,0,"89",,terminal_output +2980,3133490,"TERMINAL",0,0,"930",,terminal_output +2981,3134530,"TERMINAL",0,0,"2:001",,terminal_output +2982,3135574,"TERMINAL",0,0,"12",,terminal_output +2983,3136619,"TERMINAL",0,0,"23",,terminal_output +2984,3137657,"TERMINAL",0,0,"34",,terminal_output +2985,3138705,"TERMINAL",0,0,"45",,terminal_output +2986,3139805,"TERMINAL",0,0,"56",,terminal_output +2987,3141034,"TERMINAL",0,0,"67",,terminal_output +2988,3141851,"TERMINAL",0,0,"78",,terminal_output +2989,3143079,"TERMINAL",0,0,"89",,terminal_output +2990,3143949,"TERMINAL",0,0,"940",,terminal_output +2991,3145129,"TERMINAL",0,0,"101",,terminal_output +2992,3146050,"TERMINAL",0,0,"13",,terminal_output +2993,3147122,"TERMINAL",0,0,"34",,terminal_output +2994,3148218,"TERMINAL",0,0,"45",,terminal_output +2995,3149246,"TERMINAL",0,0,"56",,terminal_output +2996,3150352,"TERMINAL",0,0,"67",,terminal_output +2997,3151478,"TERMINAL",0,0,"78",,terminal_output +2998,3152310,"TERMINAL",0,0,"89",,terminal_output +2999,3153423,"TERMINAL",0,0,"950",,terminal_output +3000,3154416,"TERMINAL",0,0,"201",,terminal_output +3001,3155473,"TERMINAL",0,0,"12",,terminal_output +3002,3156473,"TERMINAL",0,0,"23",,terminal_output +3003,3157513,"TERMINAL",0,0,"34",,terminal_output +3004,3158558,"TERMINAL",0,0,"45",,terminal_output +3005,3159599,"TERMINAL",0,0,"56",,terminal_output +3006,3160635,"TERMINAL",0,0,"67",,terminal_output +3007,3161716,"TERMINAL",0,0,"78",,terminal_output +3008,3162746,"TERMINAL",0,0,"89",,terminal_output +3009,3163765,"TERMINAL",0,0,"97:00",,terminal_output +3010,3164996,"TERMINAL",0,0,"301",,terminal_output +3011,3165848,"TERMINAL",0,0,"12",,terminal_output +3012,3166896,"TERMINAL",0,0,"23",,terminal_output +3013,3167934,"TERMINAL",0,0,"34",,terminal_output +3014,3169213,"TERMINAL",0,0,"45",,terminal_output +3015,3170017,"TERMINAL",0,0,"57",,terminal_output +3016,3171241,"TERMINAL",0,0,"78",,terminal_output +3017,3172116,"TERMINAL",0,0,"89",,terminal_output +3018,3173297,"TERMINAL",0,0,"910",,terminal_output +3019,3174267,"TERMINAL",0,0,"401",,terminal_output +3020,3175337,"TERMINAL",0,0,"12",,terminal_output +3021,3176398,"TERMINAL",0,0,"23",,terminal_output +3022,3177421,"TERMINAL",0,0,"34",,terminal_output +3023,3178346,"TERMINAL",0,0,"45",,terminal_output +3024,3179415,"TERMINAL",0,0,"56",,terminal_output +3025,3180458,"TERMINAL",0,0,"67",,terminal_output +3026,3181481,"TERMINAL",0,0,"78",,terminal_output +3027,3182506,"TERMINAL",0,0,"89",,terminal_output +3028,3183837,"TERMINAL",0,0,"920",,terminal_output +3029,3184599,"TERMINAL",0,0,"501",,terminal_output +3030,3185614,"TERMINAL",0,0,"12",,terminal_output +3031,3186654,"TERMINAL",0,0,"23",,terminal_output +3032,3187830,"TERMINAL",0,0,"34",,terminal_output +3033,3188962,"TERMINAL",0,0,"45",,terminal_output +3034,3189885,"TERMINAL",0,0,"56",,terminal_output +3035,3190800,"TERMINAL",0,0,"67",,terminal_output +3036,3192037,"TERMINAL",0,0,"78",,terminal_output +3037,3192950,"TERMINAL",0,0,"89",,terminal_output +3038,3193890,"TERMINAL",0,0,"930",,terminal_output +3039,3194998,"TERMINAL",0,0,"3:001",,terminal_output +3040,3196127,"TERMINAL",0,0,"12",,terminal_output +3041,3197153,"TERMINAL",0,0,"24",,terminal_output +3042,3198172,"TERMINAL",0,0,"45",,terminal_output +3043,3199217,"TERMINAL",0,0,"56",,terminal_output +3044,3200221,"TERMINAL",0,0,"67",,terminal_output +3045,3201246,"TERMINAL",0,0,"78",,terminal_output +3046,3202245,"TERMINAL",0,0,"89",,terminal_output +3047,3203424,"TERMINAL",0,0,"940",,terminal_output +3048,3204325,"TERMINAL",0,0,"101",,terminal_output +3049,3205445,"TERMINAL",0,0,"12",,terminal_output +3050,3206447,"TERMINAL",0,0,"23",,terminal_output +3051,3207451,"TERMINAL",0,0,"34",,terminal_output +3052,3208620,"TERMINAL",0,0,"45",,terminal_output +3053,3209537,"TERMINAL",0,0,"56",,terminal_output +3054,3210576,"TERMINAL",0,0,"67",,terminal_output +3055,3211620,"TERMINAL",0,0,"78",,terminal_output +3056,3212664,"TERMINAL",0,0,"89",,terminal_output +3057,3213840,"TERMINAL",0,0,"950",,terminal_output +3058,3214868,"TERMINAL",0,0,"201",,terminal_output +3059,3215787,"TERMINAL",0,0,"12",,terminal_output +3060,3217014,"TERMINAL",0,0,"23",,terminal_output +3061,3218036,"TERMINAL",0,0,"34",,terminal_output +3062,3218970,"TERMINAL",0,0,"45",,terminal_output +3063,3219983,"TERMINAL",0,0,"56",,terminal_output +3064,3221010,"TERMINAL",0,0,"67",,terminal_output +3065,3222034,"TERMINAL",0,0,"79",,terminal_output +3066,3223084,"TERMINAL",0,0,"98:00",,terminal_output +3067,3224150,"TERMINAL",0,0,"301",,terminal_output +3068,3225157,"TERMINAL",0,0,"12",,terminal_output +3069,3226332,"TERMINAL",0,0,"23",,terminal_output +3070,3227357,"TERMINAL",0,0,"34",,terminal_output +3071,3228420,"TERMINAL",0,0,"45",,terminal_output +3072,3229318,"TERMINAL",0,0,"56",,terminal_output +3073,3230365,"TERMINAL",0,0,"67",,terminal_output +3074,3231553,"TERMINAL",0,0,"78",,terminal_output +3075,3232456,"TERMINAL",0,0,"89",,terminal_output +3076,3233501,"TERMINAL",0,0,"910",,terminal_output +3077,3234633,"TERMINAL",0,0,"401",,terminal_output +3078,3235594,"TERMINAL",0,0,"12",,terminal_output +3079,3236634,"TERMINAL",0,0,"23",,terminal_output +3080,3237673,"TERMINAL",0,0,"34",,terminal_output +3081,3238720,"TERMINAL",0,0,"45",,terminal_output +3082,3239757,"TERMINAL",0,0,"56",,terminal_output +3083,3240874,"TERMINAL",0,0,"67",,terminal_output +3084,3241902,"TERMINAL",0,0,"78",,terminal_output +3085,3242880,"TERMINAL",0,0,"89",,terminal_output +3086,3243946,"TERMINAL",0,0,"920",,terminal_output +3087,3245179,"TERMINAL",0,0,"501",,terminal_output +3088,3246199,"TERMINAL",0,0,"13",,terminal_output +3089,3247121,"TERMINAL",0,0,"34",,terminal_output +3090,3248145,"TERMINAL",0,0,"45",,terminal_output +3091,3249167,"TERMINAL",0,0,"56",,terminal_output +3092,3250293,"TERMINAL",0,0,"67",,terminal_output +3093,3251525,"TERMINAL",0,0,"78",,terminal_output +3094,3252260,"TERMINAL",0,0,"89",,terminal_output +3095,3253297,"TERMINAL",0,0,"930",,terminal_output +3096,3254335,"TERMINAL",0,0,"4:001",,terminal_output +3097,3255443,"TERMINAL",0,0,"12",,terminal_output +3098,3256443,"TERMINAL",0,0,"23",,terminal_output +3099,3257469,"TERMINAL",0,0,"34",,terminal_output +3100,3258494,"TERMINAL",0,0,"45",,terminal_output +3101,3259537,"TERMINAL",0,0,"56",,terminal_output +3102,3260572,"TERMINAL",0,0,"67",,terminal_output +3103,3261607,"TERMINAL",0,0,"78",,terminal_output +3104,3262644,"TERMINAL",0,0,"89",,terminal_output +3105,3263681,"TERMINAL",0,0,"940",,terminal_output +3106,3264893,"TERMINAL",0,0,"101",,terminal_output +3107,3265756,"TERMINAL",0,0,"12",,terminal_output +3108,3266892,"TERMINAL",0,0,"23",,terminal_output +3109,3267907,"TERMINAL",0,0,"34",,terminal_output +3110,3268872,"TERMINAL",0,0,"45",,terminal_output +3111,3270058,"TERMINAL",0,0,"56",,terminal_output +3112,3270986,"TERMINAL",0,0,"67",,terminal_output +3113,3271991,"TERMINAL",0,0,"78",,terminal_output +3114,3273336,"TERMINAL",0,0,"850",,terminal_output +3115,3274257,"TERMINAL",0,0,"201",,terminal_output +3116,3275282,"TERMINAL",0,0,"12",,terminal_output +3117,3276202,"TERMINAL",0,0,"23",,terminal_output +3118,3277210,"TERMINAL",0,0,"34",,terminal_output +3119,3278364,"TERMINAL",0,0,"45",,terminal_output +3120,3279287,"TERMINAL",0,0,"56",,terminal_output +3121,3280438,"TERMINAL",0,0,"67",,terminal_output +3122,3281636,"TERMINAL",0,0,"78",,terminal_output +3123,3282551,"TERMINAL",0,0,"89",,terminal_output +3124,3283475,"TERMINAL",0,0,"99:00",,terminal_output +3125,3284511,"TERMINAL",0,0,"301",,terminal_output +3126,3285629,"TERMINAL",0,0,"12",,terminal_output +3127,3286648,"TERMINAL",0,0,"23",,terminal_output +3128,3287609,"TERMINAL",0,0,"34",,terminal_output +3129,3288627,"TERMINAL",0,0,"45",,terminal_output +3130,3289664,"TERMINAL",0,0,"56",,terminal_output +3131,3290708,"TERMINAL",0,0,"67",,terminal_output +3132,3291737,"TERMINAL",0,0,"78",,terminal_output +3133,3292793,"TERMINAL",0,0,"89",,terminal_output +3134,3293813,"TERMINAL",0,0,"910",,terminal_output +3135,3294942,"TERMINAL",0,0,"401",,terminal_output +3136,3295967,"TERMINAL",0,0,"12",,terminal_output +3137,3296931,"TERMINAL",0,0,"23",,terminal_output +3138,3297965,"TERMINAL",0,0,"34",,terminal_output +3139,3299002,"TERMINAL",0,0,"45",,terminal_output +3140,3300064,"TERMINAL",0,0,"57",,terminal_output +3141,3301290,"TERMINAL",0,0,"78",,terminal_output +3142,3302213,"TERMINAL",0,0,"89",,terminal_output +3143,3303338,"TERMINAL",0,0,"920",,terminal_output +3144,3304268,"TERMINAL",0,0,"501",,terminal_output +3145,3305519,"TERMINAL",0,0,"12",,terminal_output +3146,3306310,"TERMINAL",0,0,"23",,terminal_output +3147,3307641,"TERMINAL",0,0,"34",,terminal_output +3148,3308366,"TERMINAL",0,0,"45",,terminal_output +3149,3309776,"TERMINAL",0,0,"56",,terminal_output +3150,3310915,"TERMINAL",0,0,"67",,terminal_output +3151,3311861,"TERMINAL",0,0,"78",,terminal_output +3152,3313058,"TERMINAL",0,0,"89",,terminal_output +3153,3313940,"TERMINAL",0,0,"930",,terminal_output +3154,3315018,"TERMINAL",0,0,"5:001",,terminal_output +3155,3316226,"TERMINAL",0,0,"13",,terminal_output +3156,3317267,"TERMINAL",0,0,"34",,terminal_output +3157,3318186,"TERMINAL",0,0,"45",,terminal_output +3158,3319218,"TERMINAL",0,0,"56",,terminal_output +3159,3320489,"TERMINAL",0,0,"67",,terminal_output +3160,3321467,"TERMINAL",0,0,"78",,terminal_output +3161,3322491,"TERMINAL",0,0,"89",,terminal_output +3162,3323615,"TERMINAL",0,0,"940",,terminal_output +3163,3324400,"TERMINAL",0,0,"101",,terminal_output +3164,3325486,"TERMINAL",0,0,"12",,terminal_output +3165,3326584,"TERMINAL",0,0,"23",,terminal_output +3166,3327529,"TERMINAL",0,0,"34",,terminal_output +3167,3328579,"TERMINAL",0,0,"45",,terminal_output +3168,3329616,"TERMINAL",0,0,"56",,terminal_output +3169,3330676,"TERMINAL",0,0,"67",,terminal_output +3170,3331811,"TERMINAL",0,0,"78",,terminal_output +3171,3332788,"TERMINAL",0,0,"89",,terminal_output +3172,3333779,"TERMINAL",0,0,"950",,terminal_output +3173,3334832,"TERMINAL",0,0,"201",,terminal_output +3174,3335908,"TERMINAL",0,0,"12",,terminal_output +3175,3336927,"TERMINAL",0,0,"23",,terminal_output +3176,3337977,"TERMINAL",0,0,"34",,terminal_output +3177,3339075,"TERMINAL",0,0,"45",,terminal_output +3178,3340044,"TERMINAL",0,0,"57",,terminal_output +3179,3341077,"TERMINAL",0,0,"78",,terminal_output +3180,3342255,"TERMINAL",0,0,"89",,terminal_output +3181,3343275,"TERMINAL",0,0,"910:00",,terminal_output +3182,3344231,"TERMINAL",0,0,"301",,terminal_output +3183,3345465,"TERMINAL",0,0,"12",,terminal_output +3184,3346348,"TERMINAL",0,0,"23",,terminal_output +3185,3347316,"TERMINAL",0,0,"34",,terminal_output +3186,3348420,"TERMINAL",0,0,"45",,terminal_output +3187,3349404,"TERMINAL",0,0,"56",,terminal_output +3188,3350690,"TERMINAL",0,0,"67",,terminal_output +3189,3351486,"TERMINAL",0,0,"78",,terminal_output +3190,3352526,"TERMINAL",0,0,"89",,terminal_output +3191,3353563,"TERMINAL",0,0,"910",,terminal_output +3192,3354603,"TERMINAL",0,0,"401",,terminal_output +3193,3355643,"TERMINAL",0,0,"12",,terminal_output +3194,3356903,"TERMINAL",0,0,"23",,terminal_output +3195,3357745,"TERMINAL",0,0,"34",,terminal_output +3196,3358977,"TERMINAL",0,0,"45",,terminal_output +3197,3359966,"TERMINAL",0,0,"56",,terminal_output +3198,3360889,"TERMINAL",0,0,"67",,terminal_output +3199,3362015,"TERMINAL",0,0,"78",,terminal_output +3200,3363037,"TERMINAL",0,0,"89",,terminal_output +3201,3363961,"TERMINAL",0,0,"920",,terminal_output +3202,3365014,"TERMINAL",0,0,"502",,terminal_output +3203,3366109,"TERMINAL",0,0,"23",,terminal_output +3204,3367127,"TERMINAL",0,0,"34",,terminal_output +3205,3368157,"TERMINAL",0,0,"45",,terminal_output +3206,3369186,"TERMINAL",0,0,"56",,terminal_output +3207,3370310,"TERMINAL",0,0,"67",,terminal_output +3208,3371333,"TERMINAL",0,0,"78",,terminal_output +3209,3372314,"TERMINAL",0,0,"89",,terminal_output +3210,3373515,"TERMINAL",0,0,"930",,terminal_output +3211,3374410,"TERMINAL",0,0,"6:001",,terminal_output +3212,3375466,"TERMINAL",0,0,"12",,terminal_output +3213,3376560,"TERMINAL",0,0,"23",,terminal_output +3214,3377682,"TERMINAL",0,0,"34",,terminal_output +3215,3378586,"TERMINAL",0,0,"45",,terminal_output +3216,3379630,"TERMINAL",0,0,"56",,terminal_output +3217,3380676,"TERMINAL",0,0,"67",,terminal_output +3218,3381781,"TERMINAL",0,0,"78",,terminal_output +3219,3382765,"TERMINAL",0,0,"89",,terminal_output +3220,3383822,"TERMINAL",0,0,"940",,terminal_output +3221,3384851,"TERMINAL",0,0,"101",,terminal_output +3222,3386078,"TERMINAL",0,0,"12",,terminal_output +3223,3386923,"TERMINAL",0,0,"23",,terminal_output +3224,3388126,"TERMINAL",0,0,"34",,terminal_output +3225,3389001,"TERMINAL",0,0,"45",,terminal_output +3226,3390073,"TERMINAL",0,0,"57",,terminal_output +3227,3391077,"TERMINAL",0,0,"78",,terminal_output +3228,3392127,"TERMINAL",0,0,"89",,terminal_output +3229,3393169,"TERMINAL",0,0,"950",,terminal_output +3230,3394221,"TERMINAL",0,0,"201",,terminal_output +3231,3395293,"TERMINAL",0,0,"12",,terminal_output +3232,3396319,"TERMINAL",0,0,"23",,terminal_output +3233,3397365,"TERMINAL",0,0,"34",,terminal_output +3234,3398420,"TERMINAL",0,0,"45",,terminal_output +3235,3399499,"TERMINAL",0,0,"56",,terminal_output +3236,3400516,"TERMINAL",0,0,"67",,terminal_output +3237,3401549,"TERMINAL",0,0,"78",,terminal_output +3238,3402597,"TERMINAL",0,0,"89",,terminal_output +3239,3403639,"TERMINAL",0,0,"91:00",,terminal_output +3240,3404687,"TERMINAL",0,0,"301",,terminal_output +3241,3405730,"TERMINAL",0,0,"12",,terminal_output +3242,3406967,"TERMINAL",0,0,"23",,terminal_output +3243,3407890,"TERMINAL",0,0,"34",,terminal_output +3244,3409016,"TERMINAL",0,0,"45",,terminal_output +3245,3409943,"TERMINAL",0,0,"56",,terminal_output +3246,3411065,"TERMINAL",0,0,"67",,terminal_output +3247,3412142,"TERMINAL",0,0,"78",,terminal_output +3248,3413058,"TERMINAL",0,0,"910",,terminal_output +3249,3414239,"TERMINAL",0,0,"401",,terminal_output +3250,3415184,"TERMINAL",0,0,"12",,terminal_output +3251,3416177,"TERMINAL",0,0,"23",,terminal_output +3252,3417251,"TERMINAL",0,0,"34",,terminal_output +3253,3418265,"TERMINAL",0,0,"45",,terminal_output +3254,3419332,"TERMINAL",0,0,"56",,terminal_output +3255,3420515,"TERMINAL",0,0,"67",,terminal_output +3256,3421412,"TERMINAL",0,0,"78",,terminal_output +3257,3422840,"TERMINAL",0,0,"89",,terminal_output +3258,3423558,"TERMINAL",0,0,"920",,terminal_output +3259,3424585,"TERMINAL",0,0,"501",,terminal_output +3260,3425569,"TERMINAL",0,0,"12",,terminal_output +3261,3426608,"TERMINAL",0,0,"23",,terminal_output +3262,3427657,"TERMINAL",0,0,"34",,terminal_output +3263,3428883,"TERMINAL",0,0,"45",,terminal_output +3264,3429809,"TERMINAL",0,0,"56",,terminal_output +3265,3430798,"TERMINAL",0,0,"67",,terminal_output +3266,3431843,"TERMINAL",0,0,"78",,terminal_output +3267,3432976,"TERMINAL",0,0,"89",,terminal_output +3268,3433938,"TERMINAL",0,0,"930",,terminal_output +3269,3435025,"TERMINAL",0,0,"7:001",,terminal_output +3270,3436049,"TERMINAL",0,0,"13",,terminal_output +3271,3438152,"TERMINAL",0,0,"35",,terminal_output +3272,3439260,"TERMINAL",0,0,"56",,terminal_output +3273,3440353,"TERMINAL",0,0,"67",,terminal_output +3274,3441373,"TERMINAL",0,0,"78",,terminal_output +3275,3442339,"TERMINAL",0,0,"89",,terminal_output +3276,3443423,"TERMINAL",0,0,"940",,terminal_output +3277,3444437,"TERMINAL",0,0,"101",,terminal_output +3278,3445574,"TERMINAL",0,0,"12",,terminal_output +3279,3446523,"TERMINAL",0,0,"23",,terminal_output +3280,3447619,"TERMINAL",0,0,"34",,terminal_output +3281,3448645,"TERMINAL",0,0,"45",,terminal_output +3282,3449642,"TERMINAL",0,0,"56",,terminal_output +3283,3450683,"TERMINAL",0,0,"67",,terminal_output +3284,3451716,"TERMINAL",0,0,"78",,terminal_output +3285,3452842,"TERMINAL",0,0,"89",,terminal_output +3286,3453803,"TERMINAL",0,0,"950",,terminal_output +3287,3454844,"TERMINAL",0,0,"201",,terminal_output +3288,3455878,"TERMINAL",0,0,"12",,terminal_output +3289,3456939,"TERMINAL",0,0,"23",,terminal_output +3290,3458066,"TERMINAL",0,0,"34",,terminal_output +3291,3459014,"TERMINAL",0,0,"46",,terminal_output +3292,3460113,"TERMINAL",0,0,"67",,terminal_output +3293,3461096,"TERMINAL",0,0,"78",,terminal_output +3294,3462148,"TERMINAL",0,0,"89",,terminal_output +3295,3463194,"TERMINAL",0,0,"92:00",,terminal_output +3296,3464244,"TERMINAL",0,0,"301",,terminal_output +3297,3465335,"TERMINAL",0,0,"12",,terminal_output +3298,3466313,"TERMINAL",0,0,"23",,terminal_output +3299,3467369,"TERMINAL",0,0,"34",,terminal_output +3300,3468648,"TERMINAL",0,0,"45",,terminal_output +3301,3469571,"TERMINAL",0,0,"56",,terminal_output +3302,3470513,"TERMINAL",0,0,"67",,terminal_output +3303,3471589,"TERMINAL",0,0,"78",,terminal_output +3304,3472610,"TERMINAL",0,0,"89",,terminal_output +3305,3473631,"TERMINAL",0,0,"910",,terminal_output +3306,3474659,"TERMINAL",0,0,"401",,terminal_output +3307,3475693,"TERMINAL",0,0,"12",,terminal_output +3308,3476735,"TERMINAL",0,0,"23",,terminal_output +3309,3477775,"TERMINAL",0,0,"34",,terminal_output +3310,3478853,"TERMINAL",0,0,"45",,terminal_output +3311,3479977,"TERMINAL",0,0,"56",,terminal_output +3312,3480924,"TERMINAL",0,0,"67",,terminal_output +3313,3481956,"TERMINAL",0,0,"78",,terminal_output +3314,3483054,"TERMINAL",0,0,"89",,terminal_output +3315,3484075,"TERMINAL",0,0,"921",,terminal_output +3316,3485104,"TERMINAL",0,0,"512",,terminal_output +3317,3486140,"TERMINAL",0,0,"23",,terminal_output +3318,3487263,"TERMINAL",0,0,"34",,terminal_output +3319,3488233,"TERMINAL",0,0,"45",,terminal_output +3320,3489272,"TERMINAL",0,0,"56",,terminal_output +3321,3490441,"TERMINAL",0,0,"67",,terminal_output +3322,3491361,"TERMINAL",0,0,"78",,terminal_output +3323,3492473,"TERMINAL",0,0,"89",,terminal_output +3324,3493703,"TERMINAL",0,0,"930",,terminal_output +3325,3494627,"TERMINAL",0,0,"8:001",,terminal_output +3326,3495650,"TERMINAL",0,0,"12",,terminal_output +3327,3496595,"TERMINAL",0,0,"23",,terminal_output +3328,3497641,"TERMINAL",0,0,"34",,terminal_output +3329,3498683,"TERMINAL",0,0,"45",,terminal_output +3330,3499722,"TERMINAL",0,0,"56",,terminal_output +3331,3500869,"TERMINAL",0,0,"67",,terminal_output +3332,3501802,"TERMINAL",0,0,"78",,terminal_output +3333,3502864,"TERMINAL",0,0,"89",,terminal_output +3334,3503942,"TERMINAL",0,0,"940",,terminal_output +3335,3504965,"TERMINAL",0,0,"101",,terminal_output +3336,3505998,"TERMINAL",0,0,"12",,terminal_output +3337,3507054,"TERMINAL",0,0,"24",,terminal_output +3338,3508141,"TERMINAL",0,0,"45",,terminal_output +3339,3509178,"TERMINAL",0,0,"56",,terminal_output +3340,3510188,"TERMINAL",0,0,"67",,terminal_output +3341,3511320,"TERMINAL",0,0,"78",,terminal_output +3342,3512503,"TERMINAL",0,0,"89",,terminal_output +3343,3513312,"TERMINAL",0,0,"950",,terminal_output +3344,3514359,"TERMINAL",0,0,"201",,terminal_output +3345,3515412,"TERMINAL",0,0,"12",,terminal_output +3346,3516572,"TERMINAL",0,0,"23",,terminal_output +3347,3517561,"TERMINAL",0,0,"34",,terminal_output +3348,3518599,"TERMINAL",0,0,"45",,terminal_output +3349,3519715,"TERMINAL",0,0,"56",,terminal_output +3350,3520633,"TERMINAL",0,0,"67",,terminal_output +3351,3521677,"TERMINAL",0,0,"78",,terminal_output +3352,3522729,"TERMINAL",0,0,"89",,terminal_output +3353,3523767,"TERMINAL",0,0,"93:00",,terminal_output +3354,3524818,"TERMINAL",0,0,"301",,terminal_output +3355,3525872,"TERMINAL",0,0,"12",,terminal_output +3356,3526913,"TERMINAL",0,0,"23",,terminal_output +3357,3528110,"TERMINAL",0,0,"34",,terminal_output +3358,3529157,"TERMINAL",0,0,"45",,terminal_output +3359,3530260,"TERMINAL",0,0,"57",,terminal_output +3360,3531194,"TERMINAL",0,0,"78",,terminal_output +3361,3532151,"TERMINAL",0,0,"89",,terminal_output +3362,3533332,"TERMINAL",0,0,"910",,terminal_output +3363,3534272,"TERMINAL",0,0,"401",,terminal_output +3364,3535482,"TERMINAL",0,0,"12",,terminal_output +3365,3536345,"TERMINAL",0,0,"23",,terminal_output +3366,3537469,"TERMINAL",0,0,"34",,terminal_output +3367,3538453,"TERMINAL",0,0,"45",,terminal_output +3368,3539476,"TERMINAL",0,0,"56",,terminal_output +3369,3540738,"TERMINAL",0,0,"67",,terminal_output +3370,3541728,"TERMINAL",0,0,"78",,terminal_output +3371,3542617,"TERMINAL",0,0,"89",,terminal_output +3372,3543658,"TERMINAL",0,0,"920",,terminal_output +3373,3544706,"TERMINAL",0,0,"501",,terminal_output +3374,3545750,"TERMINAL",0,0,"12",,terminal_output +3375,3546793,"TERMINAL",0,0,"23",,terminal_output +3376,3547835,"TERMINAL",0,0,"34",,terminal_output +3377,3548895,"TERMINAL",0,0,"45",,terminal_output +3378,3550124,"TERMINAL",0,0,"56",,terminal_output +3379,3551044,"TERMINAL",0,0,"67",,terminal_output +3380,3552069,"TERMINAL",0,0,"79",,terminal_output +3381,3553094,"TERMINAL",0,0,"930",,terminal_output +3382,3554118,"TERMINAL",0,0,"9:001",,terminal_output +3383,3555242,"TERMINAL",0,0,"12",,terminal_output +3384,3556215,"TERMINAL",0,0,"23",,terminal_output +3385,3557264,"TERMINAL",0,0,"34",,terminal_output +3386,3558418,"TERMINAL",0,0,"45",,terminal_output +3387,3559370,"TERMINAL",0,0,"56",,terminal_output +3388,3560470,"TERMINAL",0,0,"67",,terminal_output +3389,3561635,"TERMINAL",0,0,"78",,terminal_output +3390,3562597,"TERMINAL",0,0,"89",,terminal_output +3391,3563568,"TERMINAL",0,0,"940",,terminal_output +3392,3564622,"TERMINAL",0,0,"101",,terminal_output +3393,3565690,"TERMINAL",0,0,"12",,terminal_output +3394,3566713,"TERMINAL",0,0,"23",,terminal_output +3395,3567759,"TERMINAL",0,0,"34",,terminal_output +3396,3568762,"TERMINAL",0,0,"45",,terminal_output +3397,3569886,"TERMINAL",0,0,"56",,terminal_output +3398,3570856,"TERMINAL",0,0,"67",,terminal_output +3399,3571899,"TERMINAL",0,0,"78",,terminal_output +3400,3572941,"TERMINAL",0,0,"89",,terminal_output +3401,3574085,"TERMINAL",0,0,"950",,terminal_output +3402,3575108,"TERMINAL",0,0,"202",,terminal_output +3403,3576137,"TERMINAL",0,0,"23",,terminal_output +3404,3577300,"TERMINAL",0,0,"34",,terminal_output +3405,3578288,"TERMINAL",0,0,"45",,terminal_output +3406,3579260,"TERMINAL",0,0,"56",,terminal_output +3407,3580443,"TERMINAL",0,0,"67",,terminal_output +3408,3581312,"TERMINAL",0,0,"78",,terminal_output +3409,3582369,"TERMINAL",0,0,"89",,terminal_output +3410,3583507,"TERMINAL",0,0,"94:00",,terminal_output +3411,3584459,"TERMINAL",0,0,"301",,terminal_output +3412,3585584,"TERMINAL",0,0,"12",,terminal_output +3413,3586684,"TERMINAL",0,0,"23",,terminal_output +3414,3587604,"TERMINAL",0,0,"34",,terminal_output +3415,3588854,"TERMINAL",0,0,"45",,terminal_output +3416,3589687,"TERMINAL",0,0,"56",,terminal_output +3417,3590735,"TERMINAL",0,0,"67",,terminal_output +3418,3591777,"TERMINAL",0,0,"78",,terminal_output +3419,3592824,"TERMINAL",0,0,"89",,terminal_output +3420,3594051,"TERMINAL",0,0,"910",,terminal_output +3421,3594976,"TERMINAL",0,0,"401",,terminal_output +3422,3596103,"TERMINAL",0,0,"12",,terminal_output +3423,3596994,"TERMINAL",0,0,"23",,terminal_output +3424,3598038,"TERMINAL",0,0,"35",,terminal_output +3425,3599172,"TERMINAL",0,0,"56",,terminal_output +3426,3600134,"TERMINAL",0,0,"67",,terminal_output +3427,3601323,"TERMINAL",0,0,"78",,terminal_output +3428,3602223,"TERMINAL",0,0,"89",,terminal_output +3429,3603373,"TERMINAL",0,0,"920",,terminal_output +3430,3604342,"TERMINAL",0,0,"501",,terminal_output +3431,3605367,"TERMINAL",0,0,"12",,terminal_output +3432,3606549,"TERMINAL",0,0,"23",,terminal_output +3433,3607469,"TERMINAL",0,0,"34",,terminal_output +3434,3608595,"TERMINAL",0,0,"45",,terminal_output +3435,3609723,"TERMINAL",0,0,"56",,terminal_output +3436,3610743,"TERMINAL",0,0,"67",,terminal_output +3437,3611666,"TERMINAL",0,0,"78",,terminal_output +3438,3612696,"TERMINAL",0,0,"89",,terminal_output +3439,3613734,"TERMINAL",0,0,"930",,terminal_output +3440,3614802,"TERMINAL",0,0,"1:00:001",,terminal_output +3441,3615845,"TERMINAL",0,0,"12",,terminal_output +3442,3617096,"TERMINAL",0,0,"23",,terminal_output +3443,3617934,"TERMINAL",0,0,"34",,terminal_output +3444,3618977,"TERMINAL",0,0,"45",,terminal_output +3445,3620066,"TERMINAL",0,0,"57",,terminal_output +3446,3621110,"TERMINAL",0,0,"78",,terminal_output +3447,3622156,"TERMINAL",0,0,"89",,terminal_output +3448,3623154,"TERMINAL",0,0,"940",,terminal_output +3449,3624211,"TERMINAL",0,0,"101",,terminal_output +3450,3625255,"TERMINAL",0,0,"12",,terminal_output +3451,3626415,"TERMINAL",0,0,"23",,terminal_output +3452,3627333,"TERMINAL",0,0,"34",,terminal_output +3453,3628461,"TERMINAL",0,0,"45",,terminal_output +3454,3629429,"TERMINAL",0,0,"56",,terminal_output +3455,3630515,"TERMINAL",0,0,"67",,terminal_output +3456,3631533,"TERMINAL",0,0,"78",,terminal_output +3457,3632762,"TERMINAL",0,0,"89",,terminal_output +3458,3633784,"TERMINAL",0,0,"950",,terminal_output +3459,3634707,"TERMINAL",0,0,"201",,terminal_output +3460,3635731,"TERMINAL",0,0,"12",,terminal_output +3461,3636866,"TERMINAL",0,0,"23",,terminal_output +3462,3637793,"TERMINAL",0,0,"34",,terminal_output +3463,3638853,"TERMINAL",0,0,"45",,terminal_output +3464,3639884,"TERMINAL",0,0,"56",,terminal_output +3465,3640930,"TERMINAL",0,0,"67",,terminal_output +3466,3642182,"TERMINAL",0,0,"78",,terminal_output +3467,3643022,"TERMINAL",0,0,"85:00",,terminal_output +3468,3644279,"TERMINAL",0,0,"301",,terminal_output +3469,3645159,"TERMINAL",0,0,"12",,terminal_output +3470,3646183,"TERMINAL",0,0,"23",,terminal_output +3471,3647242,"TERMINAL",0,0,"34",,terminal_output +3472,3648442,"TERMINAL",0,0,"45",,terminal_output +3473,3649275,"TERMINAL",0,0,"56",,terminal_output +3474,3650359,"TERMINAL",0,0,"67",,terminal_output +3475,3651367,"TERMINAL",0,0,"78",,terminal_output +3476,3652425,"TERMINAL",0,0,"89",,terminal_output +3477,3653654,"TERMINAL",0,0,"910",,terminal_output +3478,3654510,"TERMINAL",0,0,"401",,terminal_output +3479,3655596,"TERMINAL",0,0,"12",,terminal_output +3480,3656824,"TERMINAL",0,0,"23",,terminal_output +3481,3657686,"TERMINAL",0,0,"34",,terminal_output +3482,3658675,"TERMINAL",0,0,"45",,terminal_output +3483,3659746,"TERMINAL",0,0,"56",,terminal_output +3484,3660824,"TERMINAL",0,0,"67",,terminal_output +3485,3661808,"TERMINAL",0,0,"78",,terminal_output +3486,3662851,"TERMINAL",0,0,"89",,terminal_output +3487,3663995,"TERMINAL",0,0,"920",,terminal_output +3488,3664943,"TERMINAL",0,0,"501",,terminal_output +3489,3666041,"TERMINAL",0,0,"12",,terminal_output +3490,3667066,"TERMINAL",0,0,"24",,terminal_output +3491,3668080,"TERMINAL",0,0,"45",,terminal_output +3492,3669286,"TERMINAL",0,0,"56",,terminal_output +3493,3670244,"TERMINAL",0,0,"67",,terminal_output +3494,3671369,"TERMINAL",0,0,"78",,terminal_output +3495,3672262,"TERMINAL",0,0,"89",,terminal_output +3496,3673757,"TERMINAL",0,0,"930",,terminal_output +3497,3674356,"TERMINAL",0,0,"1:001",,terminal_output +3498,3675566,"TERMINAL",0,0,"12",,terminal_output +3499,3676489,"TERMINAL",0,0,"23",,terminal_output +3500,3677613,"TERMINAL",0,0,"34",,terminal_output +3501,3678640,"TERMINAL",0,0,"45",,terminal_output +3502,3679764,"TERMINAL",0,0,"56",,terminal_output +3503,3680699,"TERMINAL",0,0,"67",,terminal_output +3504,3681681,"TERMINAL",0,0,"78",,terminal_output +3505,3682735,"TERMINAL",0,0,"89",,terminal_output +3506,3683863,"TERMINAL",0,0,"940",,terminal_output +3507,3684833,"TERMINAL",0,0,"101",,terminal_output +3508,3685908,"TERMINAL",0,0,"12",,terminal_output +3509,3687039,"TERMINAL",0,0,"23",,terminal_output +3510,3688059,"TERMINAL",0,0,"34",,terminal_output +3511,3688999,"TERMINAL",0,0,"45",,terminal_output +3512,3690052,"TERMINAL",0,0,"57",,terminal_output +3513,3691236,"TERMINAL",0,0,"78",,terminal_output +3514,3692151,"TERMINAL",0,0,"89",,terminal_output +3515,3693285,"TERMINAL",0,0,"950",,terminal_output +3516,3694292,"TERMINAL",0,0,"201",,terminal_output +3517,3695432,"TERMINAL",0,0,"12",,terminal_output +3518,3696353,"TERMINAL",0,0,"23",,terminal_output +3519,3697347,"TERMINAL",0,0,"34",,terminal_output +3520,3698402,"TERMINAL",0,0,"45",,terminal_output +3521,3699631,"TERMINAL",0,0,"56",,terminal_output +3522,3700691,"TERMINAL",0,0,"67",,terminal_output +3523,3701517,"TERMINAL",0,0,"78",,terminal_output +3524,3702599,"TERMINAL",0,0,"89",,terminal_output +3525,3703616,"TERMINAL",0,0,"96:00",,terminal_output +3526,3704684,"TERMINAL",0,0,"301",,terminal_output +3527,3705776,"TERMINAL",0,0,"12",,terminal_output +3528,3706801,"TERMINAL",0,0,"23",,terminal_output +3529,3707778,"TERMINAL",0,0,"34",,terminal_output +3530,3708833,"TERMINAL",0,0,"45",,terminal_output +3531,3709873,"TERMINAL",0,0,"56",,terminal_output +3532,3710921,"TERMINAL",0,0,"67",,terminal_output +3533,3712021,"TERMINAL",0,0,"78",,terminal_output +3534,3713044,"TERMINAL",0,0,"89",,terminal_output +3535,3714068,"TERMINAL",0,0,"911",,terminal_output +3536,3715092,"TERMINAL",0,0,"412",,terminal_output +3537,3716542,"TERMINAL",0,0,"23",,terminal_output +3538,3717189,"TERMINAL",0,0,"34",,terminal_output +3539,3718268,"TERMINAL",0,0,"45",,terminal_output +3540,3719463,"TERMINAL",0,0,"56",,terminal_output +3541,3720295,"TERMINAL",0,0,"67",,terminal_output +3542,3721340,"TERMINAL",0,0,"78",,terminal_output +3543,3722388,"TERMINAL",0,0,"89",,terminal_output +3544,3723434,"TERMINAL",0,0,"920",,terminal_output +3545,3724473,"TERMINAL",0,0,"501",,terminal_output +3546,3725524,"TERMINAL",0,0,"12",,terminal_output +3547,3726766,"TERMINAL",0,0,"23",,terminal_output +3548,3727719,"TERMINAL",0,0,"34",,terminal_output +3549,3728720,"TERMINAL",0,0,"45",,terminal_output +3550,3729839,"TERMINAL",0,0,"56",,terminal_output +3551,3730865,"TERMINAL",0,0,"67",,terminal_output +3552,3731801,"TERMINAL",0,0,"78",,terminal_output +3553,3732855,"TERMINAL",0,0,"89",,terminal_output +3554,3733892,"TERMINAL",0,0,"930",,terminal_output +3555,3734960,"TERMINAL",0,0,"2:001",,terminal_output +3556,3736189,"TERMINAL",0,0,"12",,terminal_output +3557,3737142,"TERMINAL",0,0,"24",,terminal_output +3558,3738235,"TERMINAL",0,0,"45",,terminal_output +3559,3739258,"TERMINAL",0,0,"56",,terminal_output +3560,3740291,"TERMINAL",0,0,"67",,terminal_output +3561,3741307,"TERMINAL",0,0,"78",,terminal_output +3562,3742259,"TERMINAL",0,0,"89",,terminal_output +3563,3743354,"TERMINAL",0,0,"940",,terminal_output +3564,3744347,"TERMINAL",0,0,"101",,terminal_output +3565,3745382,"TERMINAL",0,0,"12",,terminal_output +3566,3746422,"TERMINAL",0,0,"23",,terminal_output +3567,3748396,"TERMINAL",0,0,"35",,terminal_output +3568,3749453,"TERMINAL",0,0,"56",,terminal_output +3569,3750492,"TERMINAL",0,0,"67",,terminal_output +3570,3751537,"TERMINAL",0,0,"78",,terminal_output +3571,3752587,"TERMINAL",0,0,"89",,terminal_output +3572,3753637,"TERMINAL",0,0,"950",,terminal_output +3573,3754676,"TERMINAL",0,0,"201",,terminal_output +3574,3755755,"TERMINAL",0,0,"12",,terminal_output +3575,3756889,"TERMINAL",0,0,"23",,terminal_output +3576,3757805,"TERMINAL",0,0,"34",,terminal_output +3577,3758853,"TERMINAL",0,0,"45",,terminal_output +3578,3759895,"TERMINAL",0,0,"56",,terminal_output +3579,3760940,"TERMINAL",0,0,"67",,terminal_output +3580,3761992,"TERMINAL",0,0,"78",,terminal_output +3581,3763120,"TERMINAL",0,0,"87:00",,terminal_output +3582,3764078,"TERMINAL",0,0,"301",,terminal_output +3583,3765277,"TERMINAL",0,0,"12",,terminal_output +3584,3766293,"TERMINAL",0,0,"23",,terminal_output +3585,3767214,"TERMINAL",0,0,"34",,terminal_output +3586,3768449,"TERMINAL",0,0,"45",,terminal_output +3587,3769298,"TERMINAL",0,0,"56",,terminal_output +3588,3770492,"TERMINAL",0,0,"67",,terminal_output +3589,3771517,"TERMINAL",0,0,"78",,terminal_output +3590,3772444,"TERMINAL",0,0,"89",,terminal_output +3591,3773666,"TERMINAL",0,0,"910",,terminal_output +3592,3774588,"TERMINAL",0,0,"401",,terminal_output +3593,3775614,"TERMINAL",0,0,"12",,terminal_output +3594,3776636,"TERMINAL",0,0,"23",,terminal_output +3595,3777665,"TERMINAL",0,0,"34",,terminal_output +3596,3778729,"TERMINAL",0,0,"45",,terminal_output +3597,3779721,"TERMINAL",0,0,"56",,terminal_output +3598,3780760,"TERMINAL",0,0,"67",,terminal_output +3599,3781860,"TERMINAL",0,0,"78",,terminal_output +3600,3782850,"TERMINAL",0,0,"89",,terminal_output +3601,3783895,"TERMINAL",0,0,"920",,terminal_output +3602,3784943,"TERMINAL",0,0,"501",,terminal_output +3603,3785979,"TERMINAL",0,0,"12",,terminal_output +3604,3787027,"TERMINAL",0,0,"24",,terminal_output +3605,3788646,"TERMINAL",0,0,"45",,terminal_output +3606,3789127,"TERMINAL",0,0,"56",,terminal_output +3607,3790188,"TERMINAL",0,0,"67",,terminal_output +3608,3791380,"TERMINAL",0,0,"78",,terminal_output +3609,3792258,"TERMINAL",0,0,"89",,terminal_output +3610,3793296,"TERMINAL",0,0,"930",,terminal_output +3611,3794348,"TERMINAL",0,0,"3:001",,terminal_output +3612,3795476,"TERMINAL",0,0,"12",,terminal_output +3613,3796540,"TERMINAL",0,0,"23",,terminal_output +3614,3797629,"TERMINAL",0,0,"34",,terminal_output +3615,3798651,"TERMINAL",0,0,"45",,terminal_output +3616,3799573,"TERMINAL",0,0,"56",,terminal_output +3617,3800722,"TERMINAL",0,0,"67",,terminal_output +3618,3801758,"TERMINAL",0,0,"78",,terminal_output +3619,3802749,"TERMINAL",0,0,"89",,terminal_output +3620,3803772,"TERMINAL",0,0,"940",,terminal_output +3621,3804794,"TERMINAL",0,0,"101",,terminal_output +3622,3806127,"TERMINAL",0,0,"12",,terminal_output +3623,3807154,"TERMINAL",0,0,"23",,terminal_output +3624,3807851,"TERMINAL",0,0,"34",,terminal_output +3625,3808891,"TERMINAL",0,0,"45",,terminal_output +3626,3809927,"TERMINAL",0,0,"56",,terminal_output +3627,3810966,"TERMINAL",0,0,"67",,terminal_output +3628,3812011,"TERMINAL",0,0,"78",,terminal_output +3629,3813046,"TERMINAL",0,0,"850",,terminal_output +3630,3814114,"TERMINAL",0,0,"201",,terminal_output +3631,3815188,"TERMINAL",0,0,"12",,terminal_output +3632,3816266,"TERMINAL",0,0,"23",,terminal_output +3633,3817219,"TERMINAL",0,0,"34",,terminal_output +3634,3818317,"TERMINAL",0,0,"45",,terminal_output +3635,3819300,"TERMINAL",0,0,"56",,terminal_output +3636,3820464,"TERMINAL",0,0,"67",,terminal_output +3637,3821388,"TERMINAL",0,0,"78",,terminal_output +3638,3822741,"TERMINAL",0,0,"89",,terminal_output +3639,3823533,"TERMINAL",0,0,"98:00",,terminal_output +3640,3824557,"TERMINAL",0,0,"301",,terminal_output +3641,3825582,"TERMINAL",0,0,"12",,terminal_output +3642,3826607,"TERMINAL",0,0,"23",,terminal_output +3643,3827630,"TERMINAL",0,0,"34",,terminal_output +3644,3828643,"TERMINAL",0,0,"45",,terminal_output +3645,3829681,"TERMINAL",0,0,"56",,terminal_output +3646,3830766,"TERMINAL",0,0,"67",,terminal_output +3647,3831893,"TERMINAL",0,0,"78",,terminal_output +3648,3832855,"TERMINAL",0,0,"89",,terminal_output +3649,3833841,"TERMINAL",0,0,"910",,terminal_output +3650,3834892,"TERMINAL",0,0,"401",,terminal_output +3651,3835922,"TERMINAL",0,0,"12",,terminal_output +3652,3836958,"TERMINAL",0,0,"23",,terminal_output +3653,3838178,"TERMINAL",0,0,"34",,terminal_output +3654,3839042,"TERMINAL",0,0,"46",,terminal_output +3655,3840080,"TERMINAL",0,0,"67",,terminal_output +3656,3841113,"TERMINAL",0,0,"78",,terminal_output +3657,3842158,"TERMINAL",0,0,"89",,terminal_output +3658,3843406,"TERMINAL",0,0,"920",,terminal_output +3659,3844273,"TERMINAL",0,0,"501",,terminal_output +3660,3845281,"TERMINAL",0,0,"12",,terminal_output +3661,3846322,"TERMINAL",0,0,"23",,terminal_output +3662,3847396,"TERMINAL",0,0,"34",,terminal_output +3663,3848419,"TERMINAL",0,0,"45",,terminal_output +3664,3849432,"TERMINAL",0,0,"56",,terminal_output +3665,3850470,"TERMINAL",0,0,"67",,terminal_output +3666,3851506,"TERMINAL",0,0,"78",,terminal_output +3667,3852923,"TERMINAL",0,0,"89",,terminal_output +3668,3853579,"TERMINAL",0,0,"930",,terminal_output +3669,3854633,"TERMINAL",0,0,"4:001",,terminal_output +3670,3855831,"TERMINAL",0,0,"12",,terminal_output +3671,3856866,"TERMINAL",0,0,"23",,terminal_output +3672,3857950,"TERMINAL",0,0,"34",,terminal_output +3673,3858823,"TERMINAL",0,0,"45",,terminal_output +3674,3859997,"TERMINAL",0,0,"56",,terminal_output +3675,3860913,"TERMINAL",0,0,"67",,terminal_output +3676,3861973,"TERMINAL",0,0,"78",,terminal_output +3677,3862959,"TERMINAL",0,0,"89",,terminal_output +3678,3863995,"TERMINAL",0,0,"940",,terminal_output +3679,3865033,"TERMINAL",0,0,"102",,terminal_output +3680,3866137,"TERMINAL",0,0,"23",,terminal_output +3681,3867109,"TERMINAL",0,0,"34",,terminal_output +3682,3868145,"TERMINAL",0,0,"45",,terminal_output +3683,3869288,"TERMINAL",0,0,"56",,terminal_output +3684,3870333,"TERMINAL",0,0,"67",,terminal_output +3685,3871332,"TERMINAL",0,0,"78",,terminal_output +3686,3872368,"TERMINAL",0,0,"89",,terminal_output +3687,3873407,"TERMINAL",0,0,"950",,terminal_output +3688,3874447,"TERMINAL",0,0,"201",,terminal_output +3689,3875490,"TERMINAL",0,0,"12",,terminal_output +3690,3876533,"TERMINAL",0,0,"23",,terminal_output +3691,3877575,"TERMINAL",0,0,"34",,terminal_output +3692,3878612,"TERMINAL",0,0,"45",,terminal_output +3693,3879656,"TERMINAL",0,0,"56",,terminal_output +3694,3880694,"TERMINAL",0,0,"67",,terminal_output +3695,3881751,"TERMINAL",0,0,"78",,terminal_output +3696,3882827,"TERMINAL",0,0,"89",,terminal_output +3697,3883847,"TERMINAL",0,0,"99:00",,terminal_output +3698,3884856,"TERMINAL",0,0,"301",,terminal_output +3699,3885884,"TERMINAL",0,0,"12",,terminal_output +3700,3886920,"TERMINAL",0,0,"23",,terminal_output +3701,3887965,"TERMINAL",0,0,"34",,terminal_output +3702,3889002,"TERMINAL",0,0,"45",,terminal_output +3703,3890128,"TERMINAL",0,0,"57",,terminal_output +3704,3891085,"TERMINAL",0,0,"78",,terminal_output +3705,3892190,"TERMINAL",0,0,"89",,terminal_output +3706,3893186,"TERMINAL",0,0,"910",,terminal_output +3707,3894253,"TERMINAL",0,0,"401",,terminal_output +3708,3895321,"TERMINAL",0,0,"12",,terminal_output +3709,3896345,"TERMINAL",0,0,"23",,terminal_output +3710,3897367,"TERMINAL",0,0,"34",,terminal_output +3711,3898390,"TERMINAL",0,0,"45",,terminal_output +3712,3899402,"TERMINAL",0,0,"56",,terminal_output +3713,3900542,"TERMINAL",0,0,"67",,terminal_output +3714,3901549,"TERMINAL",0,0,"78",,terminal_output +3715,3902693,"TERMINAL",0,0,"89",,terminal_output +3716,3903714,"TERMINAL",0,0,"920",,terminal_output +3717,3904741,"TERMINAL",0,0,"501",,terminal_output +3718,3905659,"TERMINAL",0,0,"12",,terminal_output +3719,3906932,"TERMINAL",0,0,"23",,terminal_output +3720,3907914,"TERMINAL",0,0,"34",,terminal_output +3721,3908770,"TERMINAL",0,0,"45",,terminal_output +3722,3909859,"TERMINAL",0,0,"56",,terminal_output +3723,3910881,"TERMINAL",0,0,"67",,terminal_output +3724,3911905,"TERMINAL",0,0,"78",,terminal_output +3725,3912934,"TERMINAL",0,0,"89",,terminal_output +3726,3913976,"TERMINAL",0,0,"930",,terminal_output +3727,3915015,"TERMINAL",0,0,"5:002",,terminal_output +3728,3916054,"TERMINAL",0,0,"23",,terminal_output +3729,3917129,"TERMINAL",0,0,"34",,terminal_output +3730,3918358,"TERMINAL",0,0,"45",,terminal_output +3731,3919298,"TERMINAL",0,0,"56",,terminal_output +3732,3920243,"TERMINAL",0,0,"67",,terminal_output +3733,3921327,"TERMINAL",0,0,"78",,terminal_output +3734,3922350,"TERMINAL",0,0,"89",,terminal_output +3735,3923585,"TERMINAL",0,0,"940",,terminal_output +3736,3924431,"TERMINAL",0,0,"101",,terminal_output +3737,3925526,"TERMINAL",0,0,"12",,terminal_output +3738,3926654,"TERMINAL",0,0,"23",,terminal_output +3739,3927554,"TERMINAL",0,0,"34",,terminal_output +3740,3928698,"TERMINAL",0,0,"45",,terminal_output +3741,3929661,"TERMINAL",0,0,"56",,terminal_output +3742,3930887,"TERMINAL",0,0,"67",,terminal_output +3743,3931834,"TERMINAL",0,0,"78",,terminal_output +3744,3932798,"TERMINAL",0,0,"89",,terminal_output +3745,3933839,"TERMINAL",0,0,"950",,terminal_output +3746,3934949,"TERMINAL",0,0,"201",,terminal_output +3747,3935978,"TERMINAL",0,0,"12",,terminal_output +3748,3937004,"TERMINAL",0,0,"23",,terminal_output +3749,3938032,"TERMINAL",0,0,"35",,terminal_output +3750,3939143,"TERMINAL",0,0,"56",,terminal_output +3751,3940115,"TERMINAL",0,0,"67",,terminal_output +3752,3941296,"TERMINAL",0,0,"78",,terminal_output +3753,3942220,"TERMINAL",0,0,"89",,terminal_output +3754,3943251,"TERMINAL",0,0,"920:00",,terminal_output +3755,3944295,"TERMINAL",0,0,"301",,terminal_output +3756,3945393,"TERMINAL",0,0,"12",,terminal_output +3757,3946621,"TERMINAL",0,0,"23",,terminal_output +3758,3947440,"TERMINAL",0,0,"34",,terminal_output +3759,3948667,"TERMINAL",0,0,"45",,terminal_output +3760,3949564,"TERMINAL",0,0,"56",,terminal_output +3761,3950613,"TERMINAL",0,0,"67",,terminal_output +3762,3951619,"TERMINAL",0,0,"78",,terminal_output +3763,3952668,"TERMINAL",0,0,"89",,terminal_output +3764,3953700,"TERMINAL",0,0,"910",,terminal_output +3765,3954741,"TERMINAL",0,0,"401",,terminal_output +3766,3955971,"TERMINAL",0,0,"12",,terminal_output +3767,3956862,"TERMINAL",0,0,"23",,terminal_output +3768,3957895,"TERMINAL",0,0,"34",,terminal_output +3769,3958932,"TERMINAL",0,0,"45",,terminal_output +3770,3960137,"TERMINAL",0,0,"56",,terminal_output +3771,3961028,"TERMINAL",0,0,"68",,terminal_output +3772,3962079,"TERMINAL",0,0,"89",,terminal_output +3773,3963123,"TERMINAL",0,0,"920",,terminal_output +3774,3964233,"TERMINAL",0,0,"501",,terminal_output +3775,3965220,"TERMINAL",0,0,"12",,terminal_output +3776,3966261,"TERMINAL",0,0,"23",,terminal_output +3777,3967410,"TERMINAL",0,0,"34",,terminal_output +3778,3968433,"TERMINAL",0,0,"45",,terminal_output +3779,3969430,"TERMINAL",0,0,"56",,terminal_output +3780,3970684,"TERMINAL",0,0,"67",,terminal_output +3781,3971706,"TERMINAL",0,0,"78",,terminal_output +3782,3972568,"TERMINAL",0,0,"89",,terminal_output +3783,3973655,"TERMINAL",0,0,"930",,terminal_output +3784,3974651,"TERMINAL",0,0,"6:001",,terminal_output +3785,3975840,"TERMINAL",0,0,"12",,terminal_output +3786,3976747,"TERMINAL",0,0,"23",,terminal_output +3787,3977789,"TERMINAL",0,0,"34",,terminal_output +3788,3979051,"TERMINAL",0,0,"45",,terminal_output +3789,3980209,"TERMINAL",0,0,"56",,terminal_output +3790,3980931,"TERMINAL",0,0,"67",,terminal_output +3791,3981995,"TERMINAL",0,0,"78",,terminal_output +3792,3983025,"TERMINAL",0,0,"840",,terminal_output +3793,3984073,"TERMINAL",0,0,"101",,terminal_output +3794,3985130,"TERMINAL",0,0,"12",,terminal_output +3795,3986252,"TERMINAL",0,0,"23",,terminal_output +3796,3987278,"TERMINAL",0,0,"34",,terminal_output +3797,3988319,"TERMINAL",0,0,"45",,terminal_output +3798,3989731,"TERMINAL",0,0,"572886",,terminal_output +3799,3990755,"TERMINAL",0,0,"67",,terminal_output +3800,3991644,"TERMINAL",0,0,"78",,terminal_output +3801,3992816,"TERMINAL",0,0,"89",,terminal_output +3802,3993828,"TERMINAL",0,0,"950",,terminal_output +3803,3994885,"TERMINAL",0,0,"201",,terminal_output +3804,3995841,"TERMINAL",0,0,"12",,terminal_output +3805,3997006,"TERMINAL",0,0,"23",,terminal_output +3806,3997924,"TERMINAL",0,0,"34",,terminal_output +3807,3999056,"TERMINAL",0,0,"45",,terminal_output +3808,3999997,"TERMINAL",0,0,"56",,terminal_output +3809,4001045,"TERMINAL",0,0,"68",,terminal_output +3810,4002095,"TERMINAL",0,0,"89",,terminal_output +3811,4003137,"TERMINAL",0,0,"91:00",,terminal_output +3812,4004181,"TERMINAL",0,0,"301",,terminal_output +3813,4005227,"TERMINAL",0,0,"12",,terminal_output +3814,4006325,"TERMINAL",0,0,"23",,terminal_output +3815,4007342,"TERMINAL",0,0,"34",,terminal_output +3816,4008573,"TERMINAL",0,0,"45",,terminal_output +3817,4009415,"TERMINAL",0,0,"56",,terminal_output +3818,4010518,"TERMINAL",0,0,"67",,terminal_output +3819,4011545,"TERMINAL",0,0,"78",,terminal_output +3820,4012575,"TERMINAL",0,0,"89",,terminal_output +3821,4013609,"TERMINAL",0,0,"910",,terminal_output +3822,4014719,"TERMINAL",0,0,"401",,terminal_output +3823,4015708,"TERMINAL",0,0,"12",,terminal_output +3824,4016765,"TERMINAL",0,0,"23",,terminal_output +3825,4017759,"TERMINAL",0,0,"34",,terminal_output +3826,4018838,"TERMINAL",0,0,"45",,terminal_output +3827,4019942,"TERMINAL",0,0,"56",,terminal_output +3828,4020894,"TERMINAL",0,0,"67",,terminal_output +3829,4021920,"TERMINAL",0,0,"78",,terminal_output +3830,4022964,"TERMINAL",0,0,"89",,terminal_output +3831,4024011,"TERMINAL",0,0,"920",,terminal_output +3832,4025268,"TERMINAL",0,0,"512",,terminal_output +3833,4026290,"TERMINAL",0,0,"23",,terminal_output +3834,4027209,"TERMINAL",0,0,"34",,terminal_output +3835,4028234,"TERMINAL",0,0,"45",,terminal_output +3836,4029293,"TERMINAL",0,0,"56",,terminal_output +3837,4030386,"TERMINAL",0,0,"67",,terminal_output +3838,4031518,"TERMINAL",0,0,"78",,terminal_output +3839,4032449,"TERMINAL",0,0,"89",,terminal_output +3840,4033560,"TERMINAL",0,0,"930",,terminal_output +3841,4034458,"TERMINAL",0,0,"7:001",,terminal_output +3842,4035609,"TERMINAL",0,0,"12",,terminal_output +3843,4036549,"TERMINAL",0,0,"23",,terminal_output +3844,4037584,"TERMINAL",0,0,"34",,terminal_output +3845,4038635,"TERMINAL",0,0,"45",,terminal_output +3846,4039704,"TERMINAL",0,0,"56",,terminal_output +3847,4040696,"TERMINAL",0,0,"67",,terminal_output +3848,4041755,"TERMINAL",0,0,"78",,terminal_output +3849,4042981,"TERMINAL",0,0,"89",,terminal_output +3850,4043820,"TERMINAL",0,0,"940",,terminal_output +3851,4044873,"TERMINAL",0,0,"101",,terminal_output +3852,4045954,"TERMINAL",0,0,"12",,terminal_output +3853,4046952,"TERMINAL",0,0,"23",,terminal_output +3854,4047995,"TERMINAL",0,0,"34",,terminal_output +3855,4049042,"TERMINAL",0,0,"46",,terminal_output +3856,4050153,"TERMINAL",0,0,"67",,terminal_output +3857,4051197,"TERMINAL",0,0,"78",,terminal_output +3858,4052168,"TERMINAL",0,0,"89",,terminal_output +3859,4053224,"TERMINAL",0,0,"950",,terminal_output +3860,4054300,"TERMINAL",0,0,"201",,terminal_output +3861,4055376,"TERMINAL",0,0,"12",,terminal_output +3862,4056498,"TERMINAL",0,0,"23",,terminal_output +3863,4057362,"TERMINAL",0,0,"34",,terminal_output +3864,4058415,"TERMINAL",0,0,"45",,terminal_output +3865,4059449,"TERMINAL",0,0,"56",,terminal_output +3866,4060599,"TERMINAL",0,0,"67",,terminal_output +3867,4061622,"TERMINAL",0,0,"78",,terminal_output +3868,4062603,"TERMINAL",0,0,"89",,terminal_output +3869,4063609,"TERMINAL",0,0,"92:00",,terminal_output +3870,4064642,"TERMINAL",0,0,"301",,terminal_output +3871,4065684,"TERMINAL",0,0,"12",,terminal_output +3872,4066736,"TERMINAL",0,0,"23",,terminal_output +3873,4067769,"TERMINAL",0,0,"34",,terminal_output +3874,4068916,"TERMINAL",0,0,"45",,terminal_output +3875,4069862,"TERMINAL",0,0,"56",,terminal_output +3876,4070881,"TERMINAL",0,0,"67",,terminal_output +3877,4071919,"TERMINAL",0,0,"78",,terminal_output +3878,4072948,"TERMINAL",0,0,"89",,terminal_output +3879,4073993,"TERMINAL",0,0,"910",,terminal_output +3880,4075060,"TERMINAL",0,0,"402",,terminal_output +3881,4076059,"TERMINAL",0,0,"23",,terminal_output +3882,4077287,"TERMINAL",0,0,"34",,terminal_output +3883,4078135,"TERMINAL",0,0,"45",,terminal_output +3884,4079260,"TERMINAL",0,0,"56",,terminal_output +3885,4080230,"TERMINAL",0,0,"67",,terminal_output +3886,4081256,"TERMINAL",0,0,"78",,terminal_output +3887,4082292,"TERMINAL",0,0,"89",,terminal_output +3888,4083343,"TERMINAL",0,0,"920",,terminal_output +3889,4084400,"TERMINAL",0,0,"501",,terminal_output +3890,4085477,"TERMINAL",0,0,"12",,terminal_output +3891,4086472,"TERMINAL",0,0,"23",,terminal_output +3892,4087511,"TERMINAL",0,0,"34",,terminal_output +3893,4088548,"TERMINAL",0,0,"45",,terminal_output +3894,4089674,"TERMINAL",0,0,"56",,terminal_output +3895,4090700,"TERMINAL",0,0,"67",,terminal_output +3896,4091723,"TERMINAL",0,0,"78",,terminal_output +3897,4092700,"TERMINAL",0,0,"89",,terminal_output +3898,4093874,"TERMINAL",0,0,"930",,terminal_output +3899,4094780,"TERMINAL",0,0,"8:001",,terminal_output +3900,4095820,"TERMINAL",0,0,"12",,terminal_output +3901,4096920,"TERMINAL",0,0,"23",,terminal_output +3902,4097895,"TERMINAL",0,0,"34",,terminal_output +3903,4098995,"TERMINAL",0,0,"45",,terminal_output +3904,4099977,"TERMINAL",0,0,"56",,terminal_output +3905,4101013,"TERMINAL",0,0,"68",,terminal_output +3906,4102050,"TERMINAL",0,0,"89",,terminal_output +3907,4103093,"TERMINAL",0,0,"940",,terminal_output +3908,4104129,"TERMINAL",0,0,"101",,terminal_output +3909,4105164,"TERMINAL",0,0,"12",,terminal_output +3910,4106203,"TERMINAL",0,0,"23",,terminal_output +3911,4107247,"TERMINAL",0,0,"34",,terminal_output +3912,4108362,"TERMINAL",0,0,"45",,terminal_output +3913,4109359,"TERMINAL",0,0,"56",,terminal_output +3914,4110371,"TERMINAL",0,0,"67",,terminal_output +3915,4111407,"TERMINAL",0,0,"78",,terminal_output +3916,4112446,"TERMINAL",0,0,"89",,terminal_output +3917,4113537,"TERMINAL",0,0,"950",,terminal_output +3918,4114764,"TERMINAL",0,0,"201",,terminal_output +3919,4115584,"TERMINAL",0,0,"12",,terminal_output +3920,4116607,"TERMINAL",0,0,"23",,terminal_output +3921,4117721,"TERMINAL",0,0,"34",,terminal_output +3922,4118760,"TERMINAL",0,0,"45",,terminal_output +3923,4119784,"TERMINAL",0,0,"56",,terminal_output +3924,4120959,"TERMINAL",0,0,"67",,terminal_output +3925,4121810,"TERMINAL",0,0,"78",,terminal_output +3926,4122878,"TERMINAL",0,0,"89",,terminal_output +3927,4123884,"TERMINAL",0,0,"93:00",,terminal_output +3928,4124928,"TERMINAL",0,0,"301",,terminal_output +3929,4125978,"TERMINAL",0,0,"12",,terminal_output +3930,4127006,"TERMINAL",0,0,"23",,terminal_output +3931,4128044,"TERMINAL",0,0,"35",,terminal_output +3932,4129099,"TERMINAL",0,0,"56",,terminal_output +3933,4130432,"TERMINAL",0,0,"67",,terminal_output +3934,4131154,"TERMINAL",0,0,"78",,terminal_output +3935,4132188,"TERMINAL",0,0,"89",,terminal_output +3936,4133230,"TERMINAL",0,0,"910",,terminal_output +3937,4134274,"TERMINAL",0,0,"401",,terminal_output +3938,4135309,"TERMINAL",0,0,"12",,terminal_output +3939,4136370,"TERMINAL",0,0,"23",,terminal_output +3940,4137598,"TERMINAL",0,0,"34",,terminal_output +3941,4138425,"TERMINAL",0,0,"45",,terminal_output +3942,4139648,"TERMINAL",0,0,"56",,terminal_output +3943,4140577,"TERMINAL",0,0,"67",,terminal_output +3944,4141593,"TERMINAL",0,0,"78",,terminal_output +3945,4142616,"TERMINAL",0,0,"89",,terminal_output +3946,4143745,"TERMINAL",0,0,"920",,terminal_output +3947,4144696,"TERMINAL",0,0,"501",,terminal_output +3948,4145741,"TERMINAL",0,0,"12",,terminal_output +3949,4146779,"TERMINAL",0,0,"23",,terminal_output +3950,4147885,"TERMINAL",0,0,"34",,terminal_output +3951,4148899,"TERMINAL",0,0,"45",,terminal_output +3952,4149924,"TERMINAL",0,0,"56",,terminal_output +3953,4150943,"TERMINAL",0,0,"67",,terminal_output +3954,4151978,"TERMINAL",0,0,"78",,terminal_output +3955,4153019,"TERMINAL",0,0,"830",,terminal_output +3956,4154058,"TERMINAL",0,0,"9:001",,terminal_output +3957,4155095,"TERMINAL",0,0,"12",,terminal_output +3958,4156142,"TERMINAL",0,0,"23",,terminal_output +3959,4157170,"TERMINAL",0,0,"34",,terminal_output +3960,4158283,"TERMINAL",0,0,"45",,terminal_output +3961,4159303,"TERMINAL",0,0,"56",,terminal_output +3962,4160288,"TERMINAL",0,0,"67",,terminal_output +3963,4161459,"TERMINAL",0,0,"78",,terminal_output +3964,4162379,"TERMINAL",0,0,"89",,terminal_output +3965,4163416,"TERMINAL",0,0,"940",,terminal_output +3966,4164436,"TERMINAL",0,0,"101",,terminal_output +3967,4165558,"TERMINAL",0,0,"12",,terminal_output +3968,4166577,"TERMINAL",0,0,"23",,terminal_output +3969,4167601,"TERMINAL",0,0,"34",,terminal_output +3970,4168729,"TERMINAL",0,0,"45",,terminal_output +3971,4169676,"TERMINAL",0,0,"56",,terminal_output +3972,4170808,"TERMINAL",0,0,"67",,terminal_output +3973,4171800,"TERMINAL",0,0,"78",,terminal_output +3974,4172795,"TERMINAL",0,0,"89",,terminal_output +3975,4173834,"TERMINAL",0,0,"950",,terminal_output +3976,4174911,"TERMINAL",0,0,"201",,terminal_output +3977,4176002,"TERMINAL",0,0,"12",,terminal_output +3978,4176948,"TERMINAL",0,0,"23",,terminal_output +3979,4178049,"TERMINAL",0,0,"34",,terminal_output +3980,4179040,"TERMINAL",0,0,"46",,terminal_output +3981,4180094,"TERMINAL",0,0,"67",,terminal_output +3982,4181138,"TERMINAL",0,0,"78",,terminal_output +3983,4182179,"TERMINAL",0,0,"89",,terminal_output +3984,4183316,"TERMINAL",0,0,"94:00",,terminal_output +3985,4184313,"TERMINAL",0,0,"301",,terminal_output +3986,4185318,"TERMINAL",0,0,"12",,terminal_output +3987,4186341,"TERMINAL",0,0,"23",,terminal_output +3988,4187493,"TERMINAL",0,0,"34",,terminal_output +3989,4188508,"TERMINAL",0,0,"45",,terminal_output +3990,4189460,"TERMINAL",0,0,"56",,terminal_output +3991,4190576,"TERMINAL",0,0,"67",,terminal_output +3992,4191735,"TERMINAL",0,0,"78",,terminal_output +3993,4192606,"TERMINAL",0,0,"89",,terminal_output +3994,4193714,"TERMINAL",0,0,"910",,terminal_output +3995,4194738,"TERMINAL",0,0,"401",,terminal_output +3996,4195765,"TERMINAL",0,0,"12",,terminal_output +3997,4196787,"TERMINAL",0,0,"23",,terminal_output +3998,4197792,"TERMINAL",0,0,"34",,terminal_output +3999,4198838,"TERMINAL",0,0,"45",,terminal_output +4000,4199959,"TERMINAL",0,0,"56",,terminal_output +4001,4200988,"TERMINAL",0,0,"67",,terminal_output +4002,4202009,"TERMINAL",0,0,"78",,terminal_output +4003,4203035,"TERMINAL",0,0,"89",,terminal_output +4004,4204031,"TERMINAL",0,0,"921",,terminal_output +4005,4205072,"TERMINAL",0,0,"512",,terminal_output +4006,4206126,"TERMINAL",0,0,"23",,terminal_output +4007,4207235,"TERMINAL",0,0,"34",,terminal_output +4008,4208224,"TERMINAL",0,0,"45",,terminal_output +4009,4209300,"TERMINAL",0,0,"56",,terminal_output +4010,4210510,"TERMINAL",0,0,"67",,terminal_output +4011,4211366,"TERMINAL",0,0,"78",,terminal_output +4012,4212454,"TERMINAL",0,0,"89",,terminal_output +4013,4213476,"TERMINAL",0,0,"930",,terminal_output +4014,4214708,"TERMINAL",0,0,"10:001",,terminal_output +4015,4215703,"TERMINAL",0,0,"12",,terminal_output +4016,4216656,"TERMINAL",0,0,"23",,terminal_output +4017,4217909,"TERMINAL",0,0,"34",,terminal_output +4018,4218802,"TERMINAL",0,0,"45",,terminal_output +4019,4219921,"TERMINAL",0,0,"56",,terminal_output +4020,4220857,"TERMINAL",0,0,"67",,terminal_output +4021,4221915,"TERMINAL",0,0,"78",,terminal_output +4022,4222932,"TERMINAL",0,0,"89",,terminal_output +4023,4223985,"TERMINAL",0,0,"940",,terminal_output +4024,4224953,"TERMINAL",0,0,"101",,terminal_output +4025,4226075,"TERMINAL",0,0,"12",,terminal_output +4026,4227110,"TERMINAL",0,0,"24",,terminal_output +4027,4228041,"TERMINAL",0,0,"45",,terminal_output +4028,4229089,"TERMINAL",0,0,"56",,terminal_output +4029,4230126,"TERMINAL",0,0,"67",,terminal_output +4030,4231200,"TERMINAL",0,0,"78",,terminal_output +4031,4232217,"TERMINAL",0,0,"89",,terminal_output +4032,4233349,"TERMINAL",0,0,"950",,terminal_output +4033,4234298,"TERMINAL",0,0,"201",,terminal_output +4034,4235401,"TERMINAL",0,0,"12",,terminal_output +4035,4236372,"TERMINAL",0,0,"23",,terminal_output +4036,4237445,"TERMINAL",0,0,"34",,terminal_output +4037,4238571,"TERMINAL",0,0,"45",,terminal_output +4038,4239589,"TERMINAL",0,0,"56",,terminal_output +4039,4240615,"TERMINAL",0,0,"67",,terminal_output +4040,4241638,"TERMINAL",0,0,"78",,terminal_output +4041,4242661,"TERMINAL",0,0,"89",,terminal_output +4042,4243686,"TERMINAL",0,0,"95:00",,terminal_output +4043,4244710,"TERMINAL",0,0,"301",,terminal_output +4044,4245976,"TERMINAL",0,0,"12",,terminal_output +4045,4246862,"TERMINAL",0,0,"23",,terminal_output +4046,4247988,"TERMINAL",0,0,"34",,terminal_output +4047,4249013,"TERMINAL",0,0,"45",,terminal_output +4048,4249966,"TERMINAL",0,0,"56",,terminal_output +4049,4251061,"TERMINAL",0,0,"67",,terminal_output +4050,4251994,"TERMINAL",0,0,"78",,terminal_output +4051,4253042,"TERMINAL",0,0,"810",,terminal_output +4052,4254085,"TERMINAL",0,0,"401",,terminal_output +4053,4255135,"TERMINAL",0,0,"12",,terminal_output +4054,4256387,"TERMINAL",0,0,"23",,terminal_output +4055,4257229,"TERMINAL",0,0,"34",,terminal_output +4056,4258337,"TERMINAL",0,0,"45",,terminal_output +4057,4259364,"TERMINAL",0,0,"56",,terminal_output +4058,4260382,"TERMINAL",0,0,"67",,terminal_output +4059,4261424,"TERMINAL",0,0,"78",,terminal_output +4060,4262527,"TERMINAL",0,0,"89",,terminal_output +4061,4263518,"TERMINAL",0,0,"920",,terminal_output +4062,4264575,"TERMINAL",0,0,"501",,terminal_output +4063,4265805,"TERMINAL",0,0,"12",,terminal_output +4064,4266673,"TERMINAL",0,0,"23",,terminal_output +4065,4267749,"TERMINAL",0,0,"34",,terminal_output +4066,4268774,"TERMINAL",0,0,"45",,terminal_output +4067,4269932,"TERMINAL",0,0,"56",,terminal_output +4068,4270959,"TERMINAL",0,0,"67",,terminal_output +4069,4271942,"TERMINAL",0,0,"78",,terminal_output +4070,4272925,"TERMINAL",0,0,"89",,terminal_output +4071,4273980,"TERMINAL",0,0,"930",,terminal_output +4072,4275126,"TERMINAL",0,0,"1:002",,terminal_output +4073,4276062,"TERMINAL",0,0,"23",,terminal_output +4074,4277123,"TERMINAL",0,0,"34",,terminal_output +4075,4278147,"TERMINAL",0,0,"45",,terminal_output +4076,4279196,"TERMINAL",0,0,"56",,terminal_output +4077,4280450,"TERMINAL",0,0,"67",,terminal_output +4078,4281283,"TERMINAL",0,0,"78",,terminal_output +4079,4282327,"TERMINAL",0,0,"89",,terminal_output +4080,4283419,"TERMINAL",0,0,"940",,terminal_output +4081,4284398,"TERMINAL",0,0,"101",,terminal_output +4082,4285446,"TERMINAL",0,0,"12",,terminal_output +4083,4286487,"TERMINAL",0,0,"23",,terminal_output +4084,4287581,"TERMINAL",0,0,"34",,terminal_output +4085,4288639,"TERMINAL",0,0,"45",,terminal_output +4086,4289601,"TERMINAL",0,0,"56",,terminal_output +4087,4290689,"TERMINAL",0,0,"67",,terminal_output +4088,4291719,"TERMINAL",0,0,"78",,terminal_output +4089,4292746,"TERMINAL",0,0,"89",,terminal_output +4090,4293752,"TERMINAL",0,0,"950",,terminal_output +4091,4294994,"TERMINAL",0,0,"201",,terminal_output +4092,4295845,"TERMINAL",0,0,"12",,terminal_output +4093,4296881,"TERMINAL",0,0,"23",,terminal_output +4094,4297968,"TERMINAL",0,0,"34",,terminal_output +4095,4298971,"TERMINAL",0,0,"45",,terminal_output +4096,4300110,"TERMINAL",0,0,"56",,terminal_output +4097,4301143,"TERMINAL",0,0,"78",,terminal_output +4098,4302098,"TERMINAL",0,0,"89",,terminal_output +4099,4303283,"TERMINAL",0,0,"96:00",,terminal_output +4100,4304264,"TERMINAL",0,0,"301",,terminal_output +4101,4305211,"TERMINAL",0,0,"12",,terminal_output +4102,4306241,"TERMINAL",0,0,"23",,terminal_output +4103,4307284,"TERMINAL",0,0,"34",,terminal_output +4104,4308403,"TERMINAL",0,0,"45",,terminal_output +4105,4309374,"TERMINAL",0,0,"56",,terminal_output +4106,4310409,"TERMINAL",0,0,"67",,terminal_output +4107,4311781,"TERMINAL",0,0,"78",,terminal_output +4108,4312837,"TERMINAL",0,0,"89",,terminal_output +4109,4313965,"TERMINAL",0,0,"910",,terminal_output +4110,4315060,"TERMINAL",0,0,"401",,terminal_output +4111,4315931,"TERMINAL",0,0,"12",,terminal_output +4112,4316987,"TERMINAL",0,0,"23",,terminal_output +4113,4318021,"TERMINAL",0,0,"35",,terminal_output +4114,4319069,"TERMINAL",0,0,"56",,terminal_output +4115,4320101,"TERMINAL",0,0,"67",,terminal_output +4116,4321148,"TERMINAL",0,0,"78",,terminal_output +4117,4322203,"TERMINAL",0,0,"89",,terminal_output +4118,4323230,"TERMINAL",0,0,"920",,terminal_output +4119,4324275,"TERMINAL",0,0,"501",,terminal_output +4120,4325315,"TERMINAL",0,0,"12",,terminal_output +4121,4326364,"TERMINAL",0,0,"23",,terminal_output +4122,4327407,"TERMINAL",0,0,"34",,terminal_output +4123,4328573,"TERMINAL",0,0,"45",,terminal_output +4124,4329597,"TERMINAL",0,0,"56",,terminal_output +4125,4330624,"TERMINAL",0,0,"67",,terminal_output +4126,4331562,"TERMINAL",0,0,"78",,terminal_output +4127,4332669,"TERMINAL",0,0,"89",,terminal_output +4128,4333798,"TERMINAL",0,0,"930",,terminal_output +4129,4334694,"TERMINAL",0,0,"2:001",,terminal_output +4130,4335846,"TERMINAL",0,0,"12",,terminal_output +4131,4336871,"TERMINAL",0,0,"23",,terminal_output +4132,4338003,"TERMINAL",0,0,"34",,terminal_output +4133,4339020,"TERMINAL",0,0,"45",,terminal_output +4134,4340044,"TERMINAL",0,0,"56",,terminal_output +4135,4341172,"TERMINAL",0,0,"67",,terminal_output +4136,4341993,"TERMINAL",0,0,"78",,terminal_output +4137,4343039,"TERMINAL",0,0,"840",,terminal_output +4138,4344087,"TERMINAL",0,0,"101",,terminal_output +4139,4345130,"TERMINAL",0,0,"12",,terminal_output +4140,4346172,"TERMINAL",0,0,"23",,terminal_output +4141,4347216,"TERMINAL",0,0,"34",,terminal_output +4142,4348443,"TERMINAL",0,0,"45",,terminal_output +4143,4349299,"TERMINAL",0,0,"56",,terminal_output +4144,4350397,"TERMINAL",0,0,"67",,terminal_output +4145,4351383,"TERMINAL",0,0,"78",,terminal_output +4146,4352537,"TERMINAL",0,0,"89",,terminal_output +4147,4353461,"TERMINAL",0,0,"950",,terminal_output +4148,4354497,"TERMINAL",0,0,"201",,terminal_output +4149,4355538,"TERMINAL",0,0,"12",,terminal_output +4150,4356632,"TERMINAL",0,0,"23",,terminal_output +4151,4357690,"TERMINAL",0,0,"34",,terminal_output +4152,4358655,"TERMINAL",0,0,"45",,terminal_output +4153,4359697,"TERMINAL",0,0,"56",,terminal_output +4154,4360741,"TERMINAL",0,0,"67",,terminal_output +4155,4362095,"TERMINAL",0,0,"78",,terminal_output +4156,4362808,"TERMINAL",0,0,"89",,terminal_output +4157,4363857,"TERMINAL",0,0,"97:00",,terminal_output +4158,4364972,"TERMINAL",0,0,"301",,terminal_output +4159,4365984,"TERMINAL",0,0,"12",,terminal_output +4160,4366980,"TERMINAL",0,0,"23",,terminal_output +4161,4368112,"TERMINAL",0,0,"34",,terminal_output +4162,4369126,"TERMINAL",0,0,"46",,terminal_output +4163,4370086,"TERMINAL",0,0,"67",,terminal_output +4164,4371178,"TERMINAL",0,0,"78",,terminal_output +4165,4372175,"TERMINAL",0,0,"89",,terminal_output +4166,4373217,"TERMINAL",0,0,"910",,terminal_output +4167,4374255,"TERMINAL",0,0,"401",,terminal_output +4168,4375293,"TERMINAL",0,0,"12",,terminal_output +4169,4376500,"TERMINAL",0,0,"23",,terminal_output +4170,4377371,"TERMINAL",0,0,"34",,terminal_output +4171,4378414,"TERMINAL",0,0,"45",,terminal_output +4172,4379450,"TERMINAL",0,0,"56",,terminal_output +4173,4380493,"TERMINAL",0,0,"67",,terminal_output +4174,4381719,"TERMINAL",0,0,"78",,terminal_output +4175,4382749,"TERMINAL",0,0,"89",,terminal_output +4176,4383770,"TERMINAL",0,0,"920",,terminal_output +4177,4384695,"TERMINAL",0,0,"501",,terminal_output +4178,4385722,"TERMINAL",0,0,"12",,terminal_output +4179,4386975,"TERMINAL",0,0,"23",,terminal_output +4180,4387868,"TERMINAL",0,0,"34",,terminal_output +4181,4388843,"TERMINAL",0,0,"45",,terminal_output +4182,4390016,"TERMINAL",0,0,"56",,terminal_output +4183,4391004,"TERMINAL",0,0,"67",,terminal_output +4184,4392065,"TERMINAL",0,0,"78",,terminal_output +4185,4393100,"TERMINAL",0,0,"830",,terminal_output +4186,4394113,"TERMINAL",0,0,"3:001",,terminal_output +4187,4395117,"TERMINAL",0,0,"12",,terminal_output +4188,4396152,"TERMINAL",0,0,"23",,terminal_output +4189,4397208,"TERMINAL",0,0,"34",,terminal_output +4190,4398420,"TERMINAL",0,0,"45",,terminal_output +4191,4399285,"TERMINAL",0,0,"56",,terminal_output +4192,4400323,"TERMINAL",0,0,"67",,terminal_output +4193,4401382,"TERMINAL",0,0,"78",,terminal_output +4194,4402613,"TERMINAL",0,0,"89",,terminal_output +4195,4403639,"TERMINAL",0,0,"940",,terminal_output +4196,4404515,"TERMINAL",0,0,"101",,terminal_output +4197,4405561,"TERMINAL",0,0,"12",,terminal_output +4198,4406612,"TERMINAL",0,0,"23",,terminal_output +4199,4407652,"TERMINAL",0,0,"34",,terminal_output +4200,4408758,"TERMINAL",0,0,"45",,terminal_output +4201,4409781,"TERMINAL",0,0,"56",,terminal_output +4202,4410803,"TERMINAL",0,0,"67",,terminal_output +4203,4411832,"TERMINAL",0,0,"78",,terminal_output +4204,4412886,"TERMINAL",0,0,"89",,terminal_output +4205,4413928,"TERMINAL",0,0,"950",,terminal_output +4206,4415002,"TERMINAL",0,0,"201",,terminal_output +4207,4416028,"TERMINAL",0,0,"13",,terminal_output +4208,4417049,"TERMINAL",0,0,"34",,terminal_output +4209,4418087,"TERMINAL",0,0,"45",,terminal_output +4210,4419130,"TERMINAL",0,0,"56",,terminal_output +4211,4420159,"TERMINAL",0,0,"67",,terminal_output +4212,4421197,"TERMINAL",0,0,"78",,terminal_output +4213,4422241,"TERMINAL",0,0,"89",,terminal_output +4214,4423294,"TERMINAL",0,0,"98:00",,terminal_output +4215,4424376,"TERMINAL",0,0,"301",,terminal_output +4216,4425376,"TERMINAL",0,0,"12",,terminal_output +4217,4426471,"TERMINAL",0,0,"23",,terminal_output +4218,4427599,"TERMINAL",0,0,"34",,terminal_output +4219,4428524,"TERMINAL",0,0,"45",,terminal_output +4220,4429545,"TERMINAL",0,0,"56",,terminal_output +4221,4430772,"TERMINAL",0,0,"67",,terminal_output +4222,4431658,"TERMINAL",0,0,"78",,terminal_output +4223,4432682,"TERMINAL",0,0,"89",,terminal_output +4224,4433843,"TERMINAL",0,0,"910",,terminal_output +4225,4435005,"TERMINAL",0,0,"401",,terminal_output +4226,4436024,"TERMINAL",0,0,"12",,terminal_output +4227,4436867,"TERMINAL",0,0,"23",,terminal_output +4228,4437919,"TERMINAL",0,0,"34",,terminal_output +4229,4438965,"TERMINAL",0,0,"45",,terminal_output +4230,4440011,"TERMINAL",0,0,"56",,terminal_output +4231,4441223,"TERMINAL",0,0,"78",,terminal_output +4232,4442102,"TERMINAL",0,0,"89",,terminal_output +4233,4443166,"TERMINAL",0,0,"920",,terminal_output +4234,4444238,"TERMINAL",0,0,"501",,terminal_output +4235,4445237,"TERMINAL",0,0,"12",,terminal_output +4236,4446318,"TERMINAL",0,0,"23",,terminal_output +4237,4447331,"TERMINAL",0,0,"34",,terminal_output +4238,4448489,"TERMINAL",0,0,"45",,terminal_output +4239,4449458,"TERMINAL",0,0,"56",,terminal_output +4240,4450501,"TERMINAL",0,0,"67",,terminal_output +4241,4451765,"TERMINAL",0,0,"78",,terminal_output +4242,4452788,"TERMINAL",0,0,"89",,terminal_output +4243,4453714,"TERMINAL",0,0,"930",,terminal_output +4244,4454687,"TERMINAL",0,0,"4:001",,terminal_output +4245,4455748,"TERMINAL",0,0,"12",,terminal_output +4246,4456790,"TERMINAL",0,0,"23",,terminal_output +4247,4457907,"TERMINAL",0,0,"34",,terminal_output +4248,4458877,"TERMINAL",0,0,"45",,terminal_output +4249,4460059,"TERMINAL",0,0,"56",,terminal_output +4250,4461092,"TERMINAL",0,0,"67",,terminal_output +4251,4462031,"TERMINAL",0,0,"79",,terminal_output +4252,4463067,"TERMINAL",0,0,"940",,terminal_output +4253,4464145,"TERMINAL",0,0,"101",,terminal_output +4254,4465178,"TERMINAL",0,0,"12",,terminal_output +4255,4466207,"TERMINAL",0,0,"23",,terminal_output +4256,4467252,"TERMINAL",0,0,"34",,terminal_output +4257,4468298,"TERMINAL",0,0,"45",,terminal_output +4258,4469362,"TERMINAL",0,0,"56",,terminal_output +4259,4470390,"TERMINAL",0,0,"67",,terminal_output +4260,4471461,"TERMINAL",0,0,"78",,terminal_output +4261,4472505,"TERMINAL",0,0,"89",,terminal_output +4262,4473590,"TERMINAL",0,0,"950",,terminal_output +4263,4474602,"TERMINAL",0,0,"201",,terminal_output +4264,4475718,"TERMINAL",0,0,"12",,terminal_output +4265,4476684,"TERMINAL",0,0,"23",,terminal_output +4266,4477876,"TERMINAL",0,0,"34",,terminal_output +4267,4478762,"TERMINAL",0,0,"45",,terminal_output +4268,4479927,"TERMINAL",0,0,"56",,terminal_output +4269,4480952,"TERMINAL",0,0,"67",,terminal_output +4270,4482080,"TERMINAL",0,0,"78",,terminal_output +4271,4483043,"TERMINAL",0,0,"89",,terminal_output +4272,4484057,"TERMINAL",0,0,"99:00",,terminal_output +4273,4485033,"TERMINAL",0,0,"302",,terminal_output +4274,4486115,"TERMINAL",0,0,"23",,terminal_output +4275,4487225,"TERMINAL",0,0,"34",,terminal_output +4276,4488176,"TERMINAL",0,0,"45",,terminal_output +4277,4489250,"TERMINAL",0,0,"56",,terminal_output +4278,4490258,"TERMINAL",0,0,"67",,terminal_output +4279,4491443,"TERMINAL",0,0,"78",,terminal_output +4280,4492367,"TERMINAL",0,0,"89",,terminal_output +4281,4493443,"TERMINAL",0,0,"910",,terminal_output +4282,4494570,"TERMINAL",0,0,"401",,terminal_output +4283,4495568,"TERMINAL",0,0,"12",,terminal_output +4284,4496727,"TERMINAL",0,0,"23",,terminal_output +4285,4497669,"TERMINAL",0,0,"34",,terminal_output +4286,4498766,"TERMINAL",0,0,"45",,terminal_output +4287,4499790,"TERMINAL",0,0,"56",,terminal_output +4288,4500814,"TERMINAL",0,0,"67",,terminal_output +4289,4501859,"TERMINAL",0,0,"78",,terminal_output +4290,4502965,"TERMINAL",0,0,"89",,terminal_output +4291,4504032,"TERMINAL",0,0,"920",,terminal_output +4292,4505116,"TERMINAL",0,0,"501",,terminal_output +4293,4506347,"TERMINAL",0,0,"13",,terminal_output +4294,4507434,"TERMINAL",0,0,"34",,terminal_output +4295,4508154,"TERMINAL",0,0,"45",,terminal_output +4296,4509198,"TERMINAL",0,0,"56",,terminal_output +4297,4510257,"TERMINAL",0,0,"67",,terminal_output +4298,4511325,"TERMINAL",0,0,"78",,terminal_output +4299,4512386,"TERMINAL",0,0,"89",,terminal_output +4300,4513410,"TERMINAL",0,0,"930",,terminal_output +4301,4514432,"TERMINAL",0,0,"5:001",,terminal_output +4302,4515560,"TERMINAL",0,0,"12",,terminal_output +4303,4516687,"TERMINAL",0,0,"23",,terminal_output +4304,4517713,"TERMINAL",0,0,"34",,terminal_output +4305,4518734,"TERMINAL",0,0,"45",,terminal_output +4306,4519869,"TERMINAL",0,0,"56",,terminal_output +4307,4520706,"TERMINAL",0,0,"67",,terminal_output +4308,4521754,"TERMINAL",0,0,"78",,terminal_output +4309,4522831,"TERMINAL",0,0,"89",,terminal_output +4310,4523855,"TERMINAL",0,0,"940",,terminal_output +4311,4524886,"TERMINAL",0,0,"101",,terminal_output +4312,4526135,"TERMINAL",0,0,"12",,terminal_output +4313,4527027,"TERMINAL",0,0,"23",,terminal_output +4314,4528028,"TERMINAL",0,0,"35",,terminal_output +4315,4529079,"TERMINAL",0,0,"56",,terminal_output +4316,4530136,"TERMINAL",0,0,"67",,terminal_output +4317,4531177,"TERMINAL",0,0,"78",,terminal_output +4318,4532233,"TERMINAL",0,0,"89",,terminal_output +4319,4533277,"TERMINAL",0,0,"950",,terminal_output +4320,4534310,"TERMINAL",0,0,"201",,terminal_output +4321,4535358,"TERMINAL",0,0,"12",,terminal_output +4322,4536451,"TERMINAL",0,0,"23",,terminal_output +4323,4537452,"TERMINAL",0,0,"34",,terminal_output +4324,4538701,"TERMINAL",0,0,"45",,terminal_output +4325,4539543,"TERMINAL",0,0,"56",,terminal_output +4326,4540611,"TERMINAL",0,0,"67",,terminal_output +4327,4541627,"TERMINAL",0,0,"78",,terminal_output +4328,4542696,"TERMINAL",0,0,"89",,terminal_output +4329,4543926,"TERMINAL",0,0,"930:00",,terminal_output +4330,4544949,"TERMINAL",0,0,"301",,terminal_output +4331,4545803,"TERMINAL",0,0,"12",,terminal_output +4332,4546869,"TERMINAL",0,0,"23",,terminal_output +4333,4548022,"TERMINAL",0,0,"34",,terminal_output +4334,4548951,"TERMINAL",0,0,"45",,terminal_output +4335,4549990,"TERMINAL",0,0,"56",,terminal_output +4336,4551071,"TERMINAL",0,0,"68",,terminal_output +4337,4552089,"TERMINAL",0,0,"89",,terminal_output +4338,4553128,"TERMINAL",0,0,"910",,terminal_output +4339,4554233,"TERMINAL",0,0,"401",,terminal_output +4340,4555220,"TERMINAL",0,0,"12",,terminal_output +4341,4556262,"TERMINAL",0,0,"23",,terminal_output +4342,4557309,"TERMINAL",0,0,"34",,terminal_output +4343,4558363,"TERMINAL",0,0,"45",,terminal_output +4344,4559397,"TERMINAL",0,0,"56",,terminal_output +4345,4560520,"TERMINAL",0,0,"67",,terminal_output +4346,4561539,"TERMINAL",0,0,"78",,terminal_output +4347,4562561,"TERMINAL",0,0,"89",,terminal_output +4348,4563737,"TERMINAL",0,0,"920",,terminal_output +4349,4564917,"TERMINAL",0,0,"501",,terminal_output +4350,4565738,"TERMINAL",0,0,"12",,terminal_output +4351,4566761,"TERMINAL",0,0,"23",,terminal_output +4352,4567759,"TERMINAL",0,0,"34",,terminal_output +4353,4568912,"TERMINAL",0,0,"45",,terminal_output +4354,4569936,"TERMINAL",0,0,"56",,terminal_output +4355,4570874,"TERMINAL",0,0,"67",,terminal_output +4356,4572329,"TERMINAL",0,0,"78",,terminal_output +4357,4572954,"TERMINAL",0,0,"89",,terminal_output +4358,4573990,"TERMINAL",0,0,"930",,terminal_output +4359,4575092,"TERMINAL",0,0,"6:002",,terminal_output +4360,4576187,"TERMINAL",0,0,"23",,terminal_output +4361,4577219,"TERMINAL",0,0,"34",,terminal_output +4362,4578231,"TERMINAL",0,0,"45",,terminal_output +4363,4579226,"TERMINAL",0,0,"56",,terminal_output +4364,4580274,"TERMINAL",0,0,"67",,terminal_output +4365,4581410,"TERMINAL",0,0,"78",,terminal_output +4366,4582535,"TERMINAL",0,0,"89",,terminal_output +4367,4583556,"TERMINAL",0,0,"940",,terminal_output +4368,4584458,"TERMINAL",0,0,"101",,terminal_output +4369,4585602,"TERMINAL",0,0,"12",,terminal_output +4370,4586628,"TERMINAL",0,0,"23",,terminal_output +4371,4587626,"TERMINAL",0,0,"34",,terminal_output +4372,4588681,"TERMINAL",0,0,"45",,terminal_output +4373,4589699,"TERMINAL",0,0,"56",,terminal_output +4374,4590770,"TERMINAL",0,0,"67",,terminal_output +4375,4591849,"TERMINAL",0,0,"78",,terminal_output +4376,4592873,"TERMINAL",0,0,"89",,terminal_output +4377,4593900,"TERMINAL",0,0,"950",,terminal_output +4378,4594987,"TERMINAL",0,0,"201",,terminal_output +4379,4596075,"TERMINAL",0,0,"12",,terminal_output +4380,4597079,"TERMINAL",0,0,"24",,terminal_output +4381,4598092,"TERMINAL",0,0,"45",,terminal_output +4382,4599119,"TERMINAL",0,0,"56",,terminal_output +4383,4600156,"TERMINAL",0,0,"67",,terminal_output +4384,4601206,"TERMINAL",0,0,"78",,terminal_output +4385,4602243,"TERMINAL",0,0,"89",,terminal_output +4386,4603284,"TERMINAL",0,0,"91:00",,terminal_output +4387,4604335,"TERMINAL",0,0,"301",,terminal_output +4388,4605371,"TERMINAL",0,0,"12",,terminal_output +4389,4606493,"TERMINAL",0,0,"23",,terminal_output +4390,4607519,"TERMINAL",0,0,"34",,terminal_output +4391,4608643,"TERMINAL",0,0,"45",,terminal_output +4392,4609667,"TERMINAL",0,0,"56",,terminal_output +4393,4610793,"TERMINAL",0,0,"67",,terminal_output +4394,4611643,"TERMINAL",0,0,"78",,terminal_output +4395,4612685,"TERMINAL",0,0,"89",,terminal_output +4396,4613765,"TERMINAL",0,0,"910",,terminal_output +4397,4614992,"TERMINAL",0,0,"401",,terminal_output +4398,4616021,"TERMINAL",0,0,"12",,terminal_output +4399,4616938,"TERMINAL",0,0,"23",,terminal_output +4400,4617905,"TERMINAL",0,0,"34",,terminal_output +4401,4618985,"TERMINAL",0,0,"45",,terminal_output +4402,4620112,"TERMINAL",0,0,"56",,terminal_output +4403,4621015,"TERMINAL",0,0,"68",,terminal_output +4404,4622084,"TERMINAL",0,0,"89",,terminal_output +4405,4623190,"TERMINAL",0,0,"920",,terminal_output +4406,4624212,"TERMINAL",0,0,"501",,terminal_output +4407,4625201,"TERMINAL",0,0,"12",,terminal_output +4408,4626259,"TERMINAL",0,0,"23",,terminal_output +4409,4627303,"TERMINAL",0,0,"34",,terminal_output +4410,4628343,"TERMINAL",0,0,"45",,terminal_output +4411,4629398,"TERMINAL",0,0,"56",,terminal_output +4412,4630428,"TERMINAL",0,0,"67",,terminal_output +4413,4631584,"TERMINAL",0,0,"78",,terminal_output +4414,4632706,"TERMINAL",0,0,"89",,terminal_output +4415,4633729,"TERMINAL",0,0,"930",,terminal_output +4416,4634615,"TERMINAL",0,0,"7:001",,terminal_output +4417,4635794,"TERMINAL",0,0,"12",,terminal_output +4418,4636706,"TERMINAL",0,0,"23",,terminal_output +4419,4637747,"TERMINAL",0,0,"34",,terminal_output +4420,4638954,"TERMINAL",0,0,"45",,terminal_output +4421,4639861,"TERMINAL",0,0,"56",,terminal_output +4422,4641002,"TERMINAL",0,0,"67",,terminal_output +4423,4642127,"TERMINAL",0,0,"78",,terminal_output +4424,4642985,"TERMINAL",0,0,"89",,terminal_output +4425,4644031,"TERMINAL",0,0,"941",,terminal_output +4426,4645107,"TERMINAL",0,0,"112",,terminal_output +4427,4646231,"TERMINAL",0,0,"23",,terminal_output +4428,4647168,"TERMINAL",0,0,"34",,terminal_output +4429,4648228,"TERMINAL",0,0,"45",,terminal_output +4430,4649247,"TERMINAL",0,0,"56",,terminal_output +4431,4650319,"TERMINAL",0,0,"67",,terminal_output +4432,4651448,"TERMINAL",0,0,"78",,terminal_output +4433,4652372,"TERMINAL",0,0,"89",,terminal_output +4434,4653595,"TERMINAL",0,0,"950",,terminal_output +4435,4654455,"TERMINAL",0,0,"201",,terminal_output +4436,4655491,"TERMINAL",0,0,"12",,terminal_output +4437,4656567,"TERMINAL",0,0,"23",,terminal_output +4438,4657589,"TERMINAL",0,0,"34",,terminal_output +4439,4658628,"TERMINAL",0,0,"45",,terminal_output +4440,4659669,"TERMINAL",0,0,"56",,terminal_output +4441,4660763,"TERMINAL",0,0,"67",,terminal_output +4442,4661893,"TERMINAL",0,0,"78",,terminal_output +4443,4662815,"TERMINAL",0,0,"89",,terminal_output +4444,4663884,"TERMINAL",0,0,"92:00",,terminal_output +4445,4664962,"TERMINAL",0,0,"301",,terminal_output +4446,4665990,"TERMINAL",0,0,"12",,terminal_output +4447,4667113,"TERMINAL",0,0,"23",,terminal_output +4448,4668136,"TERMINAL",0,0,"35",,terminal_output +4449,4669165,"TERMINAL",0,0,"56",,terminal_output +4450,4670286,"TERMINAL",0,0,"67",,terminal_output +4451,4671214,"TERMINAL",0,0,"78",,terminal_output +4452,4672238,"TERMINAL",0,0,"89",,terminal_output +4453,4673289,"TERMINAL",0,0,"910",,terminal_output +4454,4674338,"TERMINAL",0,0,"401",,terminal_output +4455,4675509,"TERMINAL",0,0,"12",,terminal_output +4456,4676419,"TERMINAL",0,0,"23",,terminal_output +4457,4677586,"TERMINAL",0,0,"34",,terminal_output +4458,4678518,"TERMINAL",0,0,"45",,terminal_output +4459,4679608,"TERMINAL",0,0,"56",,terminal_output +4460,4680605,"TERMINAL",0,0,"67",,terminal_output +4461,4681756,"TERMINAL",0,0,"78",,terminal_output +4462,4682782,"TERMINAL",0,0,"89",,terminal_output +4463,4683735,"TERMINAL",0,0,"920",,terminal_output +4464,4684931,"TERMINAL",0,0,"501",,terminal_output +4465,4685854,"TERMINAL",0,0,"12",,terminal_output +4466,4686895,"TERMINAL",0,0,"23",,terminal_output +4467,4687930,"TERMINAL",0,0,"34",,terminal_output +4468,4688971,"TERMINAL",0,0,"45",,terminal_output +4469,4690024,"TERMINAL",0,0,"57",,terminal_output +4470,4691074,"TERMINAL",0,0,"78",,terminal_output +4471,4692201,"TERMINAL",0,0,"89",,terminal_output +4472,4693327,"TERMINAL",0,0,"930",,terminal_output +4473,4694239,"TERMINAL",0,0,"8:001",,terminal_output +4474,4695255,"TERMINAL",0,0,"12",,terminal_output +4475,4696285,"TERMINAL",0,0,"23",,terminal_output +4476,4697353,"TERMINAL",0,0,"34",,terminal_output +4477,4698448,"TERMINAL",0,0,"45",,terminal_output +4478,4699435,"TERMINAL",0,0,"56",,terminal_output +4479,4700487,"TERMINAL",0,0,"67",,terminal_output +4480,4701624,"TERMINAL",0,0,"78",,terminal_output +4481,4702750,"TERMINAL",0,0,"89",,terminal_output +4482,4703619,"TERMINAL",0,0,"940",,terminal_output +4483,4704674,"TERMINAL",0,0,"101",,terminal_output +4484,4705822,"TERMINAL",0,0,"12",,terminal_output +4485,4706949,"TERMINAL",0,0,"23",,terminal_output +4486,4707814,"TERMINAL",0,0,"34",,terminal_output +4487,4708857,"TERMINAL",0,0,"45",,terminal_output +4488,4709920,"TERMINAL",0,0,"56",,terminal_output +4489,4710948,"TERMINAL",0,0,"67",,terminal_output +4490,4711966,"TERMINAL",0,0,"78",,terminal_output +4491,4713006,"TERMINAL",0,0,"89",,terminal_output +4492,4714144,"TERMINAL",0,0,"2051",,terminal_output +4493,4715091,"TERMINAL",0,0,"12",,terminal_output +4494,4716185,"TERMINAL",0,0,"23",,terminal_output +4495,4717290,"TERMINAL",0,0,"34",,terminal_output +4496,4718218,"TERMINAL",0,0,"45",,terminal_output +4497,4719263,"TERMINAL",0,0,"56",,terminal_output +4498,4720299,"TERMINAL",0,0,"67",,terminal_output +4499,4721354,"TERMINAL",0,0,"78",,terminal_output +4500,4722397,"TERMINAL",0,0,"89",,terminal_output +4501,4723443,"TERMINAL",0,0,"93:00",,terminal_output +4502,4724662,"TERMINAL",0,0,"301",,terminal_output +4503,4725583,"TERMINAL",0,0,"12",,terminal_output +4504,4726608,"TERMINAL",0,0,"23",,terminal_output +4505,4727735,"TERMINAL",0,0,"34",,terminal_output +4506,4728773,"TERMINAL",0,0,"45",,terminal_output +4507,4729784,"TERMINAL",0,0,"56",,terminal_output +4508,4730912,"TERMINAL",0,0,"67",,terminal_output +4509,4731868,"TERMINAL",0,0,"78",,terminal_output +4510,4732833,"TERMINAL",0,0,"89",,terminal_output +4511,4734085,"TERMINAL",0,0,"910",,terminal_output +4512,4735006,"TERMINAL",0,0,"401",,terminal_output +4513,4736029,"TERMINAL",0,0,"12",,terminal_output +4514,4737054,"TERMINAL",0,0,"24",,terminal_output +4515,4738090,"TERMINAL",0,0,"45",,terminal_output +4516,4739207,"TERMINAL",0,0,"56",,terminal_output +4517,4740236,"TERMINAL",0,0,"67",,terminal_output +4518,4741254,"TERMINAL",0,0,"78",,terminal_output +4519,4742266,"TERMINAL",0,0,"89",,terminal_output +4520,4743312,"TERMINAL",0,0,"920",,terminal_output +4521,4744386,"TERMINAL",0,0,"501",,terminal_output +4522,4745397,"TERMINAL",0,0,"12",,terminal_output +4523,4746575,"TERMINAL",0,0,"23",,terminal_output +4524,4747601,"TERMINAL",0,0,"34",,terminal_output +4525,4748565,"TERMINAL",0,0,"45",,terminal_output +4526,4749586,"TERMINAL",0,0,"56",,terminal_output +4527,4750777,"TERMINAL",0,0,"67",,terminal_output +4528,4751698,"TERMINAL",0,0,"78",,terminal_output +4529,4752925,"TERMINAL",0,0,"89",,terminal_output +4530,4753951,"TERMINAL",0,0,"930",,terminal_output +4531,4754975,"TERMINAL",0,0,"9:001",,terminal_output +4532,4755896,"TERMINAL",0,0,"12",,terminal_output +4533,4757025,"TERMINAL",0,0,"23",,terminal_output +4534,4757946,"TERMINAL",0,0,"34",,terminal_output +4535,4759080,"TERMINAL",0,0,"45",,terminal_output +4536,4760095,"TERMINAL",0,0,"57",,terminal_output +4537,4761099,"TERMINAL",0,0,"78",,terminal_output +4538,4762279,"TERMINAL",0,0,"89",,terminal_output +4539,4763176,"TERMINAL",0,0,"940",,terminal_output +4540,4764228,"TERMINAL",0,0,"101",,terminal_output +4541,4765265,"TERMINAL",0,0,"12",,terminal_output +4542,4766307,"TERMINAL",0,0,"23",,terminal_output +4543,4767365,"TERMINAL",0,0,"34",,terminal_output +4544,4768591,"TERMINAL",0,0,"45",,terminal_output +4545,4769432,"TERMINAL",0,0,"56",,terminal_output +4546,4770544,"TERMINAL",0,0,"67",,terminal_output +4547,4771561,"TERMINAL",0,0,"78",,terminal_output +4548,4772595,"TERMINAL",0,0,"89",,terminal_output +4549,4773714,"TERMINAL",0,0,"950",,terminal_output +4550,4774738,"TERMINAL",0,0,"201",,terminal_output +4551,4775769,"TERMINAL",0,0,"12",,terminal_output +4552,4776890,"TERMINAL",0,0,"23",,terminal_output +4553,4777812,"TERMINAL",0,0,"34",,terminal_output +4554,4778834,"TERMINAL",0,0,"45",,terminal_output +4555,4779897,"TERMINAL",0,0,"56",,terminal_output +4556,4781087,"TERMINAL",0,0,"67",,terminal_output +4557,4782009,"TERMINAL",0,0,"78",,terminal_output +4558,4782981,"TERMINAL",0,0,"89",,terminal_output +4559,4784200,"TERMINAL",0,0,"94:01",,terminal_output +4560,4785184,"TERMINAL",0,0,"312",,terminal_output +4561,4786309,"TERMINAL",0,0,"23",,terminal_output +4562,4787339,"TERMINAL",0,0,"34",,terminal_output +4563,4788263,"TERMINAL",0,0,"45",,terminal_output +4564,4789245,"TERMINAL",0,0,"56",,terminal_output +4565,4790304,"TERMINAL",0,0,"67",,terminal_output +4566,4791329,"TERMINAL",0,0,"78",,terminal_output +4567,4792451,"TERMINAL",0,0,"89",,terminal_output +4568,4793478,"TERMINAL",0,0,"910",,terminal_output +4569,4794452,"TERMINAL",0,0,"401",,terminal_output +4570,4795496,"TERMINAL",0,0,"12",,terminal_output +4571,4796548,"TERMINAL",0,0,"23",,terminal_output +4572,4797624,"TERMINAL",0,0,"34",,terminal_output +4573,4798704,"TERMINAL",0,0,"45",,terminal_output +4574,4799652,"TERMINAL",0,0,"56",,terminal_output +4575,4800850,"TERMINAL",0,0,"67",,terminal_output +4576,4801732,"TERMINAL",0,0,"78",,terminal_output +4577,4802897,"TERMINAL",0,0,"89",,terminal_output +4578,4803827,"TERMINAL",0,0,"920",,terminal_output +4579,4804948,"TERMINAL",0,0,"501",,terminal_output +4580,4805970,"TERMINAL",0,0,"12",,terminal_output +4581,4806949,"TERMINAL",0,0,"23",,terminal_output +4582,4808016,"TERMINAL",0,0,"34",,terminal_output +4583,4809041,"TERMINAL",0,0,"46",,terminal_output +4584,4810169,"TERMINAL",0,0,"67",,terminal_output +4585,4811113,"TERMINAL",0,0,"78",,terminal_output +4586,4812222,"TERMINAL",0,0,"89",,terminal_output +4587,4813241,"TERMINAL",0,0,"930",,terminal_output +4588,4814229,"TERMINAL",0,0,"20:001",,terminal_output +4589,4815284,"TERMINAL",0,0,"12",,terminal_output +4590,4816325,"TERMINAL",0,0,"23",,terminal_output +4591,4817360,"TERMINAL",0,0,"34",,terminal_output +4592,4818430,"TERMINAL",0,0,"45",,terminal_output +4593,4819452,"TERMINAL",0,0,"56",,terminal_output +4594,4820509,"TERMINAL",0,0,"67",,terminal_output +4595,4821647,"TERMINAL",0,0,"78",,terminal_output +4596,4822719,"TERMINAL",0,0,"89",,terminal_output +4597,4823675,"TERMINAL",0,0,"940",,terminal_output +4598,4824656,"TERMINAL",0,0,"101",,terminal_output +4599,4825704,"TERMINAL",0,0,"12",,terminal_output +4600,4826738,"TERMINAL",0,0,"23",,terminal_output +4601,4827886,"TERMINAL",0,0,"34",,terminal_output +4602,4828916,"TERMINAL",0,0,"45",,terminal_output +4603,4829858,"TERMINAL",0,0,"56",,terminal_output +4604,4830897,"TERMINAL",0,0,"67",,terminal_output +4605,4831934,"TERMINAL",0,0,"78",,terminal_output +4606,4833002,"TERMINAL",0,0,"89",,terminal_output +4607,4834031,"TERMINAL",0,0,"950",,terminal_output +4608,4835113,"TERMINAL",0,0,"212",,terminal_output +4609,4836280,"TERMINAL",0,0,"23",,terminal_output +4610,4837204,"TERMINAL",0,0,"34",,terminal_output +4611,4838327,"TERMINAL",0,0,"45",,terminal_output +4612,4839209,"TERMINAL",0,0,"56",,terminal_output +4613,4840241,"TERMINAL",0,0,"67",,terminal_output +4614,4841283,"TERMINAL",0,0,"78",,terminal_output +4615,4842319,"TERMINAL",0,0,"89",,terminal_output +4616,4843365,"TERMINAL",0,0,"95:00",,terminal_output +4617,4844402,"TERMINAL",0,0,"301",,terminal_output +4618,4845444,"TERMINAL",0,0,"12",,terminal_output +4619,4846481,"TERMINAL",0,0,"23",,terminal_output +4620,4847649,"TERMINAL",0,0,"34",,terminal_output +4621,4848672,"TERMINAL",0,0,"45",,terminal_output +4622,4849697,"TERMINAL",0,0,"56",,terminal_output +4623,4850733,"TERMINAL",0,0,"67",,terminal_output +4624,4851678,"TERMINAL",0,0,"78",,terminal_output +4625,4852710,"TERMINAL",0,0,"89",,terminal_output +4626,4853791,"TERMINAL",0,0,"910",,terminal_output +4627,4854831,"TERMINAL",0,0,"401",,terminal_output +4628,4855838,"TERMINAL",0,0,"12",,terminal_output +4629,4856864,"TERMINAL",0,0,"23",,terminal_output +4630,4857993,"TERMINAL",0,0,"34",,terminal_output +4631,4859114,"TERMINAL",0,0,"45",,terminal_output +4632,4860146,"TERMINAL",0,0,"56",,terminal_output +4633,4861061,"TERMINAL",0,0,"68",,terminal_output +4634,4862085,"TERMINAL",0,0,"89",,terminal_output +4635,4863323,"TERMINAL",0,0,"920",,terminal_output +4636,4864218,"TERMINAL",0,0,"501",,terminal_output +4637,4865219,"TERMINAL",0,0,"12",,terminal_output +4638,4866295,"TERMINAL",0,0,"23",,terminal_output +4639,4867292,"TERMINAL",0,0,"34",,terminal_output +4640,4868334,"TERMINAL",0,0,"45",,terminal_output +4641,4869383,"TERMINAL",0,0,"56",,terminal_output +4642,4870417,"TERMINAL",0,0,"67",,terminal_output +4643,4871607,"TERMINAL",0,0,"78",,terminal_output +4644,4872505,"TERMINAL",0,0,"89",,terminal_output +4645,4873525,"TERMINAL",0,0,"930",,terminal_output +4646,4874681,"TERMINAL",0,0,"1:001",,terminal_output +4647,4875614,"TERMINAL",0,0,"12",,terminal_output +4648,4876736,"TERMINAL",0,0,"23",,terminal_output +4649,4877865,"TERMINAL",0,0,"34",,terminal_output +4650,4878745,"TERMINAL",0,0,"45",,terminal_output +4651,4879800,"TERMINAL",0,0,"56",,terminal_output +4652,4880836,"TERMINAL",0,0,"67",,terminal_output +4653,4881893,"TERMINAL",0,0,"78",,terminal_output +4654,4882928,"TERMINAL",0,0,"89",,terminal_output +4655,4883977,"TERMINAL",0,0,"940",,terminal_output +4656,4885026,"TERMINAL",0,0,"102",,terminal_output +4657,4886067,"TERMINAL",0,0,"23",,terminal_output +4658,4887308,"TERMINAL",0,0,"34",,terminal_output +4659,4888155,"TERMINAL",0,0,"45",,terminal_output +4660,4889228,"TERMINAL",0,0,"56",,terminal_output +4661,4890245,"TERMINAL",0,0,"67",,terminal_output +4662,4891286,"TERMINAL",0,0,"78",,terminal_output +4663,4892339,"TERMINAL",0,0,"89",,terminal_output +4664,4893523,"TERMINAL",0,0,"950",,terminal_output +4665,4894449,"TERMINAL",0,0,"201",,terminal_output +4666,4895524,"TERMINAL",0,0,"12",,terminal_output +4667,4896513,"TERMINAL",0,0,"23",,terminal_output +4668,4897564,"TERMINAL",0,0,"34",,terminal_output +4669,4898607,"TERMINAL",0,0,"45",,terminal_output +4670,4899658,"TERMINAL",0,0,"56",,terminal_output +4671,4900807,"TERMINAL",0,0,"67",,terminal_output +4672,4901748,"TERMINAL",0,0,"78",,terminal_output +4673,4902933,"TERMINAL",0,0,"89",,terminal_output +4674,4903971,"TERMINAL",0,0,"96:00",,terminal_output +4675,4904883,"TERMINAL",0,0,"301",,terminal_output +4676,4905921,"TERMINAL",0,0,"12",,terminal_output +4677,4906959,"TERMINAL",0,0,"23",,terminal_output +4678,4908151,"TERMINAL",0,0,"35",,terminal_output +4679,4909219,"TERMINAL",0,0,"56",,terminal_output +4680,4910313,"TERMINAL",0,0,"67",,terminal_output +4681,4911285,"TERMINAL",0,0,"78",,terminal_output +4682,4912331,"TERMINAL",0,0,"89",,terminal_output +4683,4913385,"TERMINAL",0,0,"910",,terminal_output +4684,4914432,"TERMINAL",0,0,"401",,terminal_output +4685,4915469,"TERMINAL",0,0,"12",,terminal_output +4686,4916642,"TERMINAL",0,0,"23",,terminal_output +4687,4917618,"TERMINAL",0,0,"34",,terminal_output +4688,4918602,"TERMINAL",0,0,"45",,terminal_output +4689,4919648,"TERMINAL",0,0,"56",,terminal_output +4690,4920864,"TERMINAL",0,0,"67",,terminal_output +4691,4921786,"TERMINAL",0,0,"78",,terminal_output +4692,4922776,"TERMINAL",0,0,"89",,terminal_output +4693,4923934,"TERMINAL",0,0,"920",,terminal_output +4694,4924875,"TERMINAL",0,0,"501",,terminal_output +4695,4925955,"TERMINAL",0,0,"12",,terminal_output +4696,4927009,"TERMINAL",0,0,"23",,terminal_output +4697,4928033,"TERMINAL",0,0,"35",,terminal_output +4698,4929050,"TERMINAL",0,0,"56",,terminal_output +4699,4930096,"TERMINAL",0,0,"67",,terminal_output +4700,4931136,"TERMINAL",0,0,"78",,terminal_output +4701,4932410,"TERMINAL",0,0,"89",,terminal_output +4702,4933358,"TERMINAL",0,0,"930",,terminal_output +4703,4934284,"TERMINAL",0,0,"2:001",,terminal_output +4704,4935330,"TERMINAL",0,0,"12",,terminal_output +4705,4936378,"TERMINAL",0,0,"23",,terminal_output +4706,4937454,"TERMINAL",0,0,"34",,terminal_output +4707,4938478,"TERMINAL",0,0,"45",,terminal_output +4708,4939601,"TERMINAL",0,0,"56",,terminal_output +4709,4940630,"TERMINAL",0,0,"67",,terminal_output +4710,4941597,"TERMINAL",0,0,"78",,terminal_output +4711,4942643,"TERMINAL",0,0,"89",,terminal_output +4712,4944284,"TERMINAL",0,0,"941",,terminal_output +4713,4945332,"TERMINAL",0,0,"112",,terminal_output +4714,4946384,"TERMINAL",0,0,"23",,terminal_output +4715,4947416,"TERMINAL",0,0,"34",,terminal_output +4716,4948512,"TERMINAL",0,0,"45",,terminal_output +4717,4949535,"TERMINAL",0,0,"56",,terminal_output +4718,4950661,"TERMINAL",0,0,"67",,terminal_output +4719,4951787,"TERMINAL",0,0,"78",,terminal_output +4720,4952813,"TERMINAL",0,0,"89",,terminal_output +4721,4953938,"TERMINAL",0,0,"950",,terminal_output +4722,4954962,"TERMINAL",0,0,"201",,terminal_output +4723,4956089,"TERMINAL",0,0,"12",,terminal_output +4724,4957011,"TERMINAL",0,0,"23",,terminal_output +4725,4958035,"TERMINAL",0,0,"34",,terminal_output +4726,4959179,"TERMINAL",0,0,"46",,terminal_output +4727,4960286,"TERMINAL",0,0,"67",,terminal_output +4728,4961320,"TERMINAL",0,0,"78",,terminal_output +4729,4962234,"TERMINAL",0,0,"89",,terminal_output +4730,4963247,"TERMINAL",0,0,"97:00",,terminal_output +4731,4964272,"TERMINAL",0,0,"301",,terminal_output +4732,4965311,"TERMINAL",0,0,"12",,terminal_output +4733,4966434,"TERMINAL",0,0,"23",,terminal_output +4734,4967398,"TERMINAL",0,0,"34",,terminal_output +4735,4968442,"TERMINAL",0,0,"45",,terminal_output +4736,4969609,"TERMINAL",0,0,"56",,terminal_output +4737,4970537,"TERMINAL",0,0,"67",,terminal_output +4738,4971581,"TERMINAL",0,0,"78",,terminal_output +4739,4972768,"TERMINAL",0,0,"89",,terminal_output +4740,4973703,"TERMINAL",0,0,"910",,terminal_output +4741,4974930,"TERMINAL",0,0,"401",,terminal_output +4742,4975764,"TERMINAL",0,0,"12",,terminal_output +4743,4976814,"TERMINAL",0,0,"23",,terminal_output +4744,4977856,"TERMINAL",0,0,"34",,terminal_output +4745,4978932,"TERMINAL",0,0,"45",,terminal_output +4746,4980053,"TERMINAL",0,0,"56",,terminal_output +4747,4980981,"TERMINAL",0,0,"67",,terminal_output +4748,4982239,"TERMINAL",0,0,"79",,terminal_output +4749,4983067,"TERMINAL",0,0,"920",,terminal_output +4750,4984249,"TERMINAL",0,0,"501",,terminal_output +4751,4985282,"TERMINAL",0,0,"12",,terminal_output +4752,4986267,"TERMINAL",0,0,"23",,terminal_output +4753,4987255,"TERMINAL",0,0,"34",,terminal_output +4754,4988350,"TERMINAL",0,0,"45",,terminal_output +4755,4989345,"TERMINAL",0,0,"56",,terminal_output +4756,4990388,"TERMINAL",0,0,"67",,terminal_output +4757,4991437,"TERMINAL",0,0,"78",,terminal_output +4758,4992545,"TERMINAL",0,0,"89",,terminal_output +4759,4993531,"TERMINAL",0,0,"930",,terminal_output +4760,4994583,"TERMINAL",0,0,"3:001",,terminal_output +4761,4995823,"TERMINAL",0,0,"12",,terminal_output +4762,4996753,"TERMINAL",0,0,"23",,terminal_output +4763,4997716,"TERMINAL",0,0,"34",,terminal_output +4764,4998768,"TERMINAL",0,0,"45",,terminal_output +4765,4999819,"TERMINAL",0,0,"56",,terminal_output +4766,5000855,"TERMINAL",0,0,"67",,terminal_output +4767,5001969,"TERMINAL",0,0,"78",,terminal_output +4768,5002950,"TERMINAL",0,0,"89",,terminal_output +4769,5003993,"TERMINAL",0,0,"940",,terminal_output +4770,5005139,"TERMINAL",0,0,"102",,terminal_output +4771,5006091,"TERMINAL",0,0,"23",,terminal_output +4772,5007287,"TERMINAL",0,0,"34",,terminal_output +4773,5008189,"TERMINAL",0,0,"45",,terminal_output +4774,5009243,"TERMINAL",0,0,"56",,terminal_output +4775,5010285,"TERMINAL",0,0,"67",,terminal_output +4776,5011388,"TERMINAL",0,0,"78",,terminal_output +4777,5012377,"TERMINAL",0,0,"89",,terminal_output +4778,5013433,"TERMINAL",0,0,"950",,terminal_output +4779,5014462,"TERMINAL",0,0,"201",,terminal_output +4780,5015684,"TERMINAL",0,0,"12",,terminal_output +4781,5016554,"TERMINAL",0,0,"23",,terminal_output +4782,5017630,"TERMINAL",0,0,"34",,terminal_output +4783,5018655,"TERMINAL",0,0,"45",,terminal_output +4784,5019783,"TERMINAL",0,0,"56",,terminal_output +4785,5020806,"TERMINAL",0,0,"67",,terminal_output +4786,5021839,"TERMINAL",0,0,"78",,terminal_output +4787,5022852,"TERMINAL",0,0,"89",,terminal_output +4788,5023888,"TERMINAL",0,0,"98:00",,terminal_output +4789,5024936,"TERMINAL",0,0,"301",,terminal_output +4790,5026035,"TERMINAL",0,0,"12",,terminal_output +4791,5027031,"TERMINAL",0,0,"24",,terminal_output +4792,5028187,"TERMINAL",0,0,"45",,terminal_output +4793,5029206,"TERMINAL",0,0,"56",,terminal_output +4794,5030205,"TERMINAL",0,0,"67",,terminal_output +4795,5031366,"TERMINAL",0,0,"78",,terminal_output +4796,5032282,"TERMINAL",0,0,"89",,terminal_output +4797,5033299,"TERMINAL",0,0,"910",,terminal_output +4798,5034326,"TERMINAL",0,0,"401",,terminal_output +4799,5035453,"TERMINAL",0,0,"12",,terminal_output +4800,5036477,"TERMINAL",0,0,"23",,terminal_output +4801,5037595,"TERMINAL",0,0,"34",,terminal_output +4802,5038467,"TERMINAL",0,0,"45",,terminal_output +4803,5039547,"TERMINAL",0,0,"56",,terminal_output +4804,5040544,"TERMINAL",0,0,"67",,terminal_output +4805,5041584,"TERMINAL",0,0,"78",,terminal_output +4806,5042719,"TERMINAL",0,0,"89",,terminal_output +4807,5043658,"TERMINAL",0,0,"920",,terminal_output +4808,5044870,"TERMINAL",0,0,"501",,terminal_output +4809,5045792,"TERMINAL",0,0,"12",,terminal_output +4810,5046796,"TERMINAL",0,0,"23",,terminal_output +4811,5047826,"TERMINAL",0,0,"34",,terminal_output +4812,5049066,"TERMINAL",0,0,"45",,terminal_output +4813,5049906,"TERMINAL",0,0,"56",,terminal_output +4814,5051116,"TERMINAL",0,0,"67",,terminal_output +4815,5052041,"TERMINAL",0,0,"78",,terminal_output +4816,5053061,"TERMINAL",0,0,"830",,terminal_output +4817,5054189,"TERMINAL",0,0,"4:001",,terminal_output +4818,5055417,"TERMINAL",0,0,"12",,terminal_output +4819,5056134,"TERMINAL",0,0,"23",,terminal_output +4820,5057163,"TERMINAL",0,0,"34",,terminal_output +4821,5058289,"TERMINAL",0,0,"45",,terminal_output +4822,5059279,"TERMINAL",0,0,"56",,terminal_output +4823,5060334,"TERMINAL",0,0,"67",,terminal_output +4824,5061359,"TERMINAL",0,0,"78",,terminal_output +4825,5062367,"TERMINAL",0,0,"89",,terminal_output +4826,5063406,"TERMINAL",0,0,"940",,terminal_output +4827,5064445,"TERMINAL",0,0,"101",,terminal_output +4828,5065672,"TERMINAL",0,0,"12",,terminal_output +4829,5066523,"TERMINAL",0,0,"23",,terminal_output +4830,5067603,"TERMINAL",0,0,"34",,terminal_output +4831,5068626,"TERMINAL",0,0,"45",,terminal_output +4832,5069655,"TERMINAL",0,0,"56",,terminal_output +4833,5070790,"TERMINAL",0,0,"67",,terminal_output +4834,5071800,"TERMINAL",0,0,"78",,terminal_output +4835,5072752,"TERMINAL",0,0,"89",,terminal_output +4836,5073852,"TERMINAL",0,0,"950",,terminal_output +4837,5074896,"TERMINAL",0,0,"201",,terminal_output +4838,5075908,"TERMINAL",0,0,"12",,terminal_output +4839,5077023,"TERMINAL",0,0,"23",,terminal_output +4840,5078046,"TERMINAL",0,0,"34",,terminal_output +4841,5079070,"TERMINAL",0,0,"45",,terminal_output +4842,5080116,"TERMINAL",0,0,"57",,terminal_output +4843,5081065,"TERMINAL",0,0,"78",,terminal_output +4844,5082106,"TERMINAL",0,0,"89",,terminal_output +4845,5083166,"TERMINAL",0,0,"99:00",,terminal_output +4846,5084192,"TERMINAL",0,0,"301",,terminal_output +4847,5085216,"TERMINAL",0,0,"12",,terminal_output +4848,5086446,"TERMINAL",0,0,"23",,terminal_output +4849,5087287,"TERMINAL",0,0,"34",,terminal_output +4850,5088405,"TERMINAL",0,0,"45",,terminal_output +4851,5089397,"TERMINAL",0,0,"56",,terminal_output +4852,5090398,"TERMINAL",0,0,"67",,terminal_output +4853,5091435,"TERMINAL",0,0,"78",,terminal_output +4854,5093100,"TERMINAL",0,0,"89",,terminal_output +4855,5094171,"TERMINAL",0,0,"910",,terminal_output +4856,5094762,"TERMINAL",0,0,"401",,terminal_output +4857,5095624,"TERMINAL",0,0,"12",,terminal_output +4858,5096679,"TERMINAL",0,0,"23",,terminal_output +4859,5097720,"TERMINAL",0,0,"34",,terminal_output +4860,5098739,"TERMINAL",0,0,"45",,terminal_output +4861,5099857,"TERMINAL",0,0,"56",,terminal_output +4862,5100882,"TERMINAL",0,0,"67",,terminal_output +4863,5101906,"TERMINAL",0,0,"78",,terminal_output +4864,5102883,"TERMINAL",0,0,"89",,terminal_output +4865,5103953,"TERMINAL",0,0,"920",,terminal_output +4866,5105086,"TERMINAL",0,0,"501",,terminal_output +4867,5106209,"TERMINAL",0,0,"12",,terminal_output +4868,5107233,"TERMINAL",0,0,"24",,terminal_output +4869,5108295,"TERMINAL",0,0,"45",,terminal_output +4870,5109207,"TERMINAL",0,0,"56",,terminal_output +4871,5110200,"TERMINAL",0,0,"67",,terminal_output +4872,5111226,"TERMINAL",0,0,"78",,terminal_output +4873,5112353,"TERMINAL",0,0,"89",,terminal_output +4874,5113478,"TERMINAL",0,0,"930",,terminal_output +4875,5114312,"TERMINAL",0,0,"5:001",,terminal_output +4876,5115433,"TERMINAL",0,0,"12",,terminal_output +4877,5116383,"TERMINAL",0,0,"23",,terminal_output +4878,5117421,"TERMINAL",0,0,"34",,terminal_output +4879,5118518,"TERMINAL",0,0,"45",,terminal_output +4880,5119497,"TERMINAL",0,0,"56",,terminal_output +4881,5120530,"TERMINAL",0,0,"67",,terminal_output +4882,5121568,"TERMINAL",0,0,"78",,terminal_output +4883,5122799,"TERMINAL",0,0,"89",,terminal_output +4884,5123821,"TERMINAL",0,0,"940",,terminal_output +4885,5124710,"TERMINAL",0,0,"101",,terminal_output +4886,5125867,"TERMINAL",0,0,"12",,terminal_output +4887,5126765,"TERMINAL",0,0,"23",,terminal_output +4888,5127805,"TERMINAL",0,0,"34",,terminal_output +4889,5128840,"TERMINAL",0,0,"45",,terminal_output +4890,5129968,"TERMINAL",0,0,"56",,terminal_output +4891,5131192,"TERMINAL",0,0,"67",,terminal_output +4892,5132119,"TERMINAL",0,0,"78",,terminal_output +4893,5133141,"TERMINAL",0,0,"89",,terminal_output +4894,5134059,"TERMINAL",0,0,"951",,terminal_output +4895,5135190,"TERMINAL",0,0,"212",,terminal_output +4896,5136214,"TERMINAL",0,0,"23",,terminal_output +4897,5137237,"TERMINAL",0,0,"34",,terminal_output +4898,5138302,"TERMINAL",0,0,"45",,terminal_output +4899,5139233,"TERMINAL",0,0,"56",,terminal_output +4900,5140334,"TERMINAL",0,0,"67",,terminal_output +4901,5141338,"TERMINAL",0,0,"78",,terminal_output +4902,5142391,"TERMINAL",0,0,"89",,terminal_output +4903,5143420,"TERMINAL",0,0,"940:00",,terminal_output +4904,5144441,"TERMINAL",0,0,"301",,terminal_output +4905,5145491,"TERMINAL",0,0,"12",,terminal_output +4906,5146519,"TERMINAL",0,0,"23",,terminal_output +4907,5147712,"TERMINAL",0,0,"34",,terminal_output +4908,5148604,"TERMINAL",0,0,"45",,terminal_output +4909,5149839,"TERMINAL",0,0,"56",,terminal_output +4910,5150685,"TERMINAL",0,0,"67",,terminal_output +4911,5151780,"TERMINAL",0,0,"78",,terminal_output +4912,5152758,"TERMINAL",0,0,"89",,terminal_output +4913,5153983,"TERMINAL",0,0,"910",,terminal_output +4914,5154951,"TERMINAL",0,0,"401",,terminal_output +4915,5155973,"TERMINAL",0,0,"12",,terminal_output +4916,5157011,"TERMINAL",0,0,"23",,terminal_output +4917,5157975,"TERMINAL",0,0,"34",,terminal_output +4918,5159074,"TERMINAL",0,0,"46",,terminal_output +4919,5160070,"TERMINAL",0,0,"67",,terminal_output +4920,5161210,"TERMINAL",0,0,"78",,terminal_output +4921,5162229,"TERMINAL",0,0,"89",,terminal_output +4922,5163348,"TERMINAL",0,0,"920",,terminal_output +4923,5164392,"TERMINAL",0,0,"501",,terminal_output +4924,5165433,"TERMINAL",0,0,"12",,terminal_output +4925,5166451,"TERMINAL",0,0,"23",,terminal_output +4926,5167551,"TERMINAL",0,0,"34",,terminal_output +4927,5168542,"TERMINAL",0,0,"45",,terminal_output +4928,5169702,"TERMINAL",0,0,"56",,terminal_output +4929,5170826,"TERMINAL",0,0,"67",,terminal_output +4930,5171849,"TERMINAL",0,0,"78",,terminal_output +4931,5172711,"TERMINAL",0,0,"89",,terminal_output +4932,5173796,"TERMINAL",0,0,"930",,terminal_output +4933,5174817,"TERMINAL",0,0,"6:001",,terminal_output +4934,5175828,"TERMINAL",0,0,"12",,terminal_output +4935,5176976,"TERMINAL",0,0,"23",,terminal_output +4936,5177998,"TERMINAL",0,0,"34",,terminal_output +4937,5179114,"TERMINAL",0,0,"45",,terminal_output +4938,5179983,"TERMINAL",0,0,"56",,terminal_output +4939,5181110,"TERMINAL",0,0,"68",,terminal_output +4940,5182090,"TERMINAL",0,0,"89",,terminal_output +4941,5183109,"TERMINAL",0,0,"940",,terminal_output +4942,5184138,"TERMINAL",0,0,"101",,terminal_output +4943,5185178,"TERMINAL",0,0,"12",,terminal_output +4944,5186319,"TERMINAL",0,0,"23",,terminal_output +4945,5187256,"TERMINAL",0,0,"34",,terminal_output +4946,5188292,"TERMINAL",0,0,"45",,terminal_output +4947,5189333,"TERMINAL",0,0,"56",,terminal_output +4948,5190486,"TERMINAL",0,0,"67",,terminal_output +4949,5191416,"TERMINAL",0,0,"78",,terminal_output +4950,5192453,"TERMINAL",0,0,"89",,terminal_output +4951,5193490,"TERMINAL",0,0,"950",,terminal_output +4952,5194526,"TERMINAL",0,0,"201",,terminal_output +4953,5195706,"TERMINAL",0,0,"12",,terminal_output +4954,5196621,"TERMINAL",0,0,"23",,terminal_output +4955,5197758,"TERMINAL",0,0,"34",,terminal_output +4956,5198886,"TERMINAL",0,0,"45",,terminal_output +4957,5199910,"TERMINAL",0,0,"56",,terminal_output +4958,5200931,"TERMINAL",0,0,"67",,terminal_output +4959,5201824,"TERMINAL",0,0,"78",,terminal_output +4960,5202975,"TERMINAL",0,0,"89",,terminal_output +4961,5203861,"TERMINAL",0,0,"91:00",,terminal_output +4962,5204904,"TERMINAL",0,0,"301",,terminal_output +4963,5205947,"TERMINAL",0,0,"12",,terminal_output +4964,5206977,"TERMINAL",0,0,"23",,terminal_output +4965,5208017,"TERMINAL",0,0,"35",,terminal_output +4966,5209119,"TERMINAL",0,0,"56",,terminal_output +4967,5210141,"TERMINAL",0,0,"67",,terminal_output +4968,5211168,"TERMINAL",0,0,"78",,terminal_output +4969,5212191,"TERMINAL",0,0,"89",,terminal_output +4970,5213351,"TERMINAL",0,0,"910",,terminal_output +4971,5214276,"TERMINAL",0,0,"401",,terminal_output +4972,5215325,"TERMINAL",0,0,"12",,terminal_output +4973,5216344,"TERMINAL",0,0,"23",,terminal_output +4974,5217381,"TERMINAL",0,0,"34",,terminal_output +4975,5218417,"TERMINAL",0,0,"45",,terminal_output +4976,5219479,"TERMINAL",0,0,"56",,terminal_output +4977,5220524,"TERMINAL",0,0,"67",,terminal_output +4978,5221616,"TERMINAL",0,0,"78",,terminal_output +4979,5222636,"TERMINAL",0,0,"89",,terminal_output +4980,5223662,"TERMINAL",0,0,"920",,terminal_output +4981,5224787,"TERMINAL",0,0,"501",,terminal_output +4982,5225914,"TERMINAL",0,0,"12",,terminal_output +4983,5226810,"TERMINAL",0,0,"23",,terminal_output +4984,5227857,"TERMINAL",0,0,"34",,terminal_output +4985,5228884,"TERMINAL",0,0,"45",,terminal_output +4986,5229907,"TERMINAL",0,0,"56",,terminal_output +4987,5231033,"TERMINAL",0,0,"67",,terminal_output +4988,5232062,"TERMINAL",0,0,"78",,terminal_output +4989,5233430,"TERMINAL",0,0,"830",,terminal_output +4990,5234204,"TERMINAL",0,0,"7:001",,terminal_output +4991,5235138,"TERMINAL",0,0,"12",,terminal_output +4992,5236131,"TERMINAL",0,0,"23",,terminal_output +4993,5237182,"TERMINAL",0,0,"34",,terminal_output +4994,5238353,"TERMINAL",0,0,"45",,terminal_output +4995,5239278,"TERMINAL",0,0,"56",,terminal_output +4996,5240457,"TERMINAL",0,0,"67",,terminal_output +4997,5241481,"TERMINAL",0,0,"78",,terminal_output +4998,5242414,"TERMINAL",0,0,"89",,terminal_output +4999,5243438,"TERMINAL",0,0,"940",,terminal_output +5000,5244482,"TERMINAL",0,0,"101",,terminal_output +5001,5245521,"TERMINAL",0,0,"12",,terminal_output +5002,5246564,"TERMINAL",0,0,"23",,terminal_output +5003,5247596,"TERMINAL",0,0,"34",,terminal_output +5004,5248644,"TERMINAL",0,0,"45",,terminal_output +5005,5249694,"TERMINAL",0,0,"56",,terminal_output +5006,5250803,"TERMINAL",0,0,"67",,terminal_output +5007,5251929,"TERMINAL",0,0,"78",,terminal_output +5008,5252850,"TERMINAL",0,0,"89",,terminal_output +5009,5254077,"TERMINAL",0,0,"950",,terminal_output +5010,5255107,"TERMINAL",0,0,"201",,terminal_output +5011,5256024,"TERMINAL",0,0,"12",,terminal_output +5012,5257149,"TERMINAL",0,0,"23",,terminal_output +5013,5258172,"TERMINAL",0,0,"45",,terminal_output +5014,5259215,"TERMINAL",0,0,"56",,terminal_output +5015,5260244,"TERMINAL",0,0,"67",,terminal_output +5016,5261244,"TERMINAL",0,0,"78",,terminal_output +5017,5262268,"TERMINAL",0,0,"89",,terminal_output +5018,5263296,"TERMINAL",0,0,"92:00",,terminal_output +5019,5264538,"TERMINAL",0,0,"301",,terminal_output +5020,5265549,"TERMINAL",0,0,"12",,terminal_output +5021,5266429,"TERMINAL",0,0,"23",,terminal_output +5022,5267478,"TERMINAL",0,0,"34",,terminal_output +5023,5268518,"TERMINAL",0,0,"45",,terminal_output +5024,5269567,"TERMINAL",0,0,"56",,terminal_output +5025,5270665,"TERMINAL",0,0,"67",,terminal_output +5026,5271648,"TERMINAL",0,0,"78",,terminal_output +5027,5272717,"TERMINAL",0,0,"89",,terminal_output +5028,5273739,"TERMINAL",0,0,"910",,terminal_output +5029,5274775,"TERMINAL",0,0,"401",,terminal_output +5030,5275993,"TERMINAL",0,0,"12",,terminal_output +5031,5276849,"TERMINAL",0,0,"23",,terminal_output +5032,5278039,"TERMINAL",0,0,"34",,terminal_output +5033,5278945,"TERMINAL",0,0,"45",,terminal_output +5034,5279985,"TERMINAL",0,0,"56",,terminal_output +5035,5281219,"TERMINAL",0,0,"67",,terminal_output +5036,5282068,"TERMINAL",0,0,"89",,terminal_output +5037,5283095,"TERMINAL",0,0,"920",,terminal_output +5038,5284182,"TERMINAL",0,0,"501",,terminal_output +5039,5285309,"TERMINAL",0,0,"12",,terminal_output +5040,5286369,"TERMINAL",0,0,"23",,terminal_output +5041,5287255,"TERMINAL",0,0,"34",,terminal_output +5042,5288290,"TERMINAL",0,0,"45",,terminal_output +5043,5289363,"TERMINAL",0,0,"56",,terminal_output +5044,5290359,"TERMINAL",0,0,"67",,terminal_output +5045,5291405,"TERMINAL",0,0,"78",,terminal_output +5046,5292443,"TERMINAL",0,0,"89",,terminal_output +5047,5293484,"TERMINAL",0,0,"930",,terminal_output +5048,5294630,"TERMINAL",0,0,"8:001",,terminal_output +5049,5295555,"TERMINAL",0,0,"12",,terminal_output +5050,5296607,"TERMINAL",0,0,"23",,terminal_output +5051,5297645,"TERMINAL",0,0,"34",,terminal_output +5052,5298684,"TERMINAL",0,0,"45",,terminal_output +5053,5299725,"TERMINAL",0,0,"56",,terminal_output +5054,5300971,"TERMINAL",0,0,"67",,terminal_output +5055,5301814,"TERMINAL",0,0,"78",,terminal_output +5056,5302860,"TERMINAL",0,0,"89",,terminal_output +5057,5303916,"TERMINAL",0,0,"940",,terminal_output +5058,5304929,"TERMINAL",0,0,"101",,terminal_output +5059,5306709,"TERMINAL",0,0,"12",,terminal_output +5060,5307002,"TERMINAL",0,0,"23",,terminal_output +5061,5308148,"TERMINAL",0,0,"35",,terminal_output +5062,5309082,"TERMINAL",0,0,"56",,terminal_output +5063,5310120,"TERMINAL",0,0,"67",,terminal_output +5064,5311161,"TERMINAL",0,0,"78",,terminal_output +5065,5313400,"TERMINAL",0,0,"850",,terminal_output +5066,5314421,"TERMINAL",0,0,"201",,terminal_output +5067,5315464,"TERMINAL",0,0,"12",,terminal_output +5068,5316495,"TERMINAL",0,0,"23",,terminal_output +5069,5317534,"TERMINAL",0,0,"34",,terminal_output +5070,5318595,"TERMINAL",0,0,"45",,terminal_output +5071,5319663,"TERMINAL",0,0,"56",,terminal_output +5072,5320736,"TERMINAL",0,0,"67",,terminal_output +5073,5321699,"TERMINAL",0,0,"78",,terminal_output +5074,5322768,"TERMINAL",0,0,"89",,terminal_output +5075,5323808,"TERMINAL",0,0,"93:00",,terminal_output +5076,5324805,"TERMINAL",0,0,"301",,terminal_output +5077,5325959,"TERMINAL",0,0,"12",,terminal_output +5078,5327087,"TERMINAL",0,0,"23",,terminal_output +5079,5327932,"TERMINAL",0,0,"34",,terminal_output +5080,5328984,"TERMINAL",0,0,"45",,terminal_output +5081,5330058,"TERMINAL",0,0,"57",,terminal_output +5082,5331084,"TERMINAL",0,0,"78",,terminal_output +5083,5332209,"TERMINAL",0,0,"89",,terminal_output +5084,5333158,"TERMINAL",0,0,"910",,terminal_output +5085,5334199,"TERMINAL",0,0,"401",,terminal_output +5086,5335241,"TERMINAL",0,0,"12",,terminal_output +5087,5336281,"TERMINAL",0,0,"23",,terminal_output +5088,5337343,"TERMINAL",0,0,"34",,terminal_output +5089,5338376,"TERMINAL",0,0,"45",,terminal_output +5090,5339427,"TERMINAL",0,0,"56",,terminal_output +5091,5340478,"TERMINAL",0,0,"67",,terminal_output +5092,5341533,"TERMINAL",0,0,"78",,terminal_output +5093,5342754,"TERMINAL",0,0,"89",,terminal_output +5094,5343591,"TERMINAL",0,0,"920",,terminal_output +5095,5344837,"TERMINAL",0,0,"501",,terminal_output +5096,5345675,"TERMINAL",0,0,"12",,terminal_output +5097,5346718,"TERMINAL",0,0,"23",,terminal_output +5098,5347772,"TERMINAL",0,0,"34",,terminal_output +5099,5348816,"TERMINAL",0,0,"45",,terminal_output +5100,5349856,"TERMINAL",0,0,"56",,terminal_output +5101,5350897,"TERMINAL",0,0,"67",,terminal_output +5102,5351984,"TERMINAL",0,0,"78",,terminal_output +5103,5353098,"TERMINAL",0,0,"89",,terminal_output +5104,5354126,"TERMINAL",0,0,"931",,terminal_output +5105,5355073,"TERMINAL",0,0,"9:012",,terminal_output +5106,5356127,"TERMINAL",0,0,"23",,terminal_output +5107,5357164,"TERMINAL",0,0,"34",,terminal_output +5108,5358222,"TERMINAL",0,0,"45",,terminal_output +5109,5359249,"TERMINAL",0,0,"56",,terminal_output +5110,5360280,"TERMINAL",0,0,"67",,terminal_output +5111,5361399,"TERMINAL",0,0,"78",,terminal_output +5112,5362363,"TERMINAL",0,0,"89",,terminal_output +5113,5363427,"TERMINAL",0,0,"940",,terminal_output +5114,5364459,"TERMINAL",0,0,"101",,terminal_output +5115,5365508,"TERMINAL",0,0,"12",,terminal_output +5116,5366548,"TERMINAL",0,0,"23",,terminal_output +5117,5367844,"TERMINAL",0,0,"34",,terminal_output +5118,5368670,"TERMINAL",0,0,"45",,terminal_output +5119,5369800,"TERMINAL",0,0,"56",,terminal_output +5120,5370720,"TERMINAL",0,0,"67",,terminal_output +5121,5371905,"TERMINAL",0,0,"78",,terminal_output +5122,5372864,"TERMINAL",0,0,"89",,terminal_output +5123,5373985,"TERMINAL",0,0,"950",,terminal_output +5124,5375014,"TERMINAL",0,0,"201",,terminal_output +5125,5376114,"TERMINAL",0,0,"12",,terminal_output +5126,5376992,"TERMINAL",0,0,"23",,terminal_output +5127,5378036,"TERMINAL",0,0,"35",,terminal_output +5128,5379084,"TERMINAL",0,0,"56",,terminal_output +5129,5380166,"TERMINAL",0,0,"67",,terminal_output +5130,5381391,"TERMINAL",0,0,"78",,terminal_output +5131,5382281,"TERMINAL",0,0,"89",,terminal_output +5132,5383269,"TERMINAL",0,0,"94:00",,terminal_output +5133,5384318,"TERMINAL",0,0,"301",,terminal_output +5134,5385384,"TERMINAL",0,0,"12",,terminal_output +5135,5386401,"TERMINAL",0,0,"23",,terminal_output +5136,5387434,"TERMINAL",0,0,"34",,terminal_output +5137,5387875,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +5138,5389411,"TERMINAL",0,0,"bash",,terminal_focus +5139,5393152,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +5140,5394908,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +5141,5395212,"TERMINAL",0,0,"ls",,terminal_command +5142,5395252,"TERMINAL",0,0,"]633;E;2025-07-27 11:29:41 ls;f02cc5a4-63b1-4d2c-a043-ded0cd6b1f6f]633;C",,terminal_output +5143,5395434,"TERMINAL",0,0,"0000 3292213 3292337 3296575 3297727 3300233 3307618 3313565 interactive train_dyn_causal_255M\r\n3290283 3292221 3292338 3297569 3299016 3300290 3307619 3313570 lam train_dyn_causal_356M\r\n3290284 3292258 3292339 3297575 3299062 3300658 3309662 3313571 lam-1-action train_dyn_causal_500M\r\n3290295 3292328 3294600 3297576 3299063 3300663 3309663 3313572 lam_ckpt_dir train_dyn_new_arch-bugfixed-spatial-shift\r\n3290296 3292329 3294601 3297577 3299065 3300672 3309699 3316022 lam_main_test train_dyn_new_arch-bugfixed-temporal-shift\r\n3290366 3292330 3294602 3297578 3299066 3301025 3310436 big-runs maskgit-maskprob-fix train_dyn_yolorun_new_arch\r\n3290367 3292331 3294603 3297582 3299068 3301026 3310437 causal tokenizer train_lam_minecraft_overfit_sample\r\n3290391 3292332 3296502 3297586 3299069 3301027 3311671 checkpoints_alfred tokenizer_ckpt_dir train_tokenizer_batch_size_scaling_16_node\r\n3290392 3292333 3296540 3297606 3299258 3301029 3311672 coinrun train_dynamics_lr_schedule_const train_tokenizer_minecraft_overfit_sample\r\n3290439 3292334 3296571 3297671 3299259 3301030 3313562 debug train_dynamics_lr_schedule_cos wrap\r\n3290440 3292335 3296573 3297693 3299272 3301031 3313563 dyn train_dynamics_lr_schedule_wsd\r\n3291405 3292336 3296574 3297706 3299579 3306801 3313564 dynamics_ckpt_dir train_dyn_causal_180M\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +5144,5399027,"TERMINAL",0,0,"cd maskgit-maskprob-fix/",,terminal_command +5145,5399384,"TERMINAL",0,0,"ls",,terminal_command +5146,5399436,"TERMINAL",0,0,"]633;E;2025-07-27 11:29:45 ls;f02cc5a4-63b1-4d2c-a043-ded0cd6b1f6f]633;C",,terminal_output +5147,5399511,"TERMINAL",0,0,"interactive train_dynamics_maskprob_fix_2_node_80M train_dynamics_maskprob_fix_8_node_80M\r\ntrain_dynamics_maskprob_fix_2_node train_dynamics_maskprob_fix_8_node\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;0",,terminal_output +5148,5405428,"TERMINAL",0,0,"cd train_dynamics_maskprob_fix_8_node",,terminal_command +5149,5405686,"TERMINAL",0,0,"ls",,terminal_command +5150,5405717,"TERMINAL",0,0,"]633;E;2025-07-27 11:29:51 ls;f02cc5a4-63b1-4d2c-a043-ded0cd6b1f6f]633;C3370788 3371237\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node]633;D;0",,terminal_output +5151,5411712,"TERMINAL",0,0,"cd 3371237/",,terminal_command +5152,5412444,"TERMINAL",0,0,"ls",,terminal_command +5153,5412482,"TERMINAL",0,0,"]633;E;2025-07-27 11:29:58 ls;f02cc5a4-63b1-4d2c-a043-ded0cd6b1f6f]633;C020000 034000 035000 036000\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237]633;D;0",,terminal_output +5154,5414124,"TERMINAL",0,0,"pwd",,terminal_command +5155,5414151,"TERMINAL",0,0,"]633;E;2025-07-27 11:30:00 pwd;f02cc5a4-63b1-4d2c-a043-ded0cd6b1f6f]633;C/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237]633;D;0",,terminal_output +5156,5418397,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-8-node-$slurm_job_id \\n --tags dynamics maskprob-fix 8-node \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +5157,5425050,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1709,0,"",shellscript,selection_mouse +5158,5425915,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1521,0,"",shellscript,selection_mouse +5159,5427433,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1593,0,"\n",shellscript,content +5160,5427705,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1594,0,"C",shellscript,content +5161,5427706,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1595,0,"",shellscript,selection_keyboard +5162,5427775,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1595,0,"H",shellscript,content +5163,5427776,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1596,0,"",shellscript,selection_keyboard +5164,5427938,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1596,0,"E",shellscript,content +5165,5427939,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1597,0,"",shellscript,selection_keyboard +5166,5428265,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1594,3,"CHECKPOINT_DIR",shellscript,content +5167,5428724,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1608,0,"=",shellscript,content +5168,5428725,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1609,0,"",shellscript,selection_keyboard +5169,5429126,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1609,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237",shellscript,content +5170,5433179,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1500,0,"",shellscript,selection_mouse +5171,5434411,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1499,0,"",shellscript,selection_command +5172,5435295,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1466,0,"",shellscript,selection_command +5173,5435959,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1466,0,"#",shellscript,content +5174,5435961,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1467,0,"",shellscript,selection_keyboard +5175,5436040,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1467,0," ",shellscript,content +5176,5436041,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1468,0,"",shellscript,selection_keyboard +5177,5436554,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1467,0,"",shellscript,selection_command +5178,5446894,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1148,0,"",shellscript,selection_mouse +5179,5447530,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1059,0,"",shellscript,selection_mouse +5180,5448070,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1092,0,"",shellscript,selection_mouse +5181,5448234,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1083,17,"restore_ckpt_flag",shellscript,selection_mouse +5182,5449306,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1125,0,"",shellscript,selection_mouse +5183,5449311,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1124,0,"",shellscript,selection_command +5184,5456985,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1150,0,"",shellscript,selection_mouse +5185,5457144,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1147,4,"=""--",shellscript,selection_mouse +5186,5457654,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1156,0,"",shellscript,selection_mouse +5187,5457804,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1151,7,"restore",shellscript,selection_mouse +5188,5458317,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1160,0,"",shellscript,selection_mouse +5189,5458464,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1159,4,"ckpt",shellscript,selection_mouse +5190,5458652,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1158,5,"-ckpt",shellscript,selection_mouse +5191,5458686,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1151,12,"restore-ckpt",shellscript,selection_mouse +5192,5461165,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:]\n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(logits, targets)\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # for i in range(videos.shape[0]):\n # video_i = videos[i:i+1] # shape (1, T, H, W, C)\n # np.save(f""overfit_dir/oai_sample_seed69_{i}.npy"", video_i)\n # jax.debug.breakpoint()\n # videos = np.load(""overfit_dir/oai_sample_seed69_1.npy"") # *255.\n # videos = videos.astype(np.uint8)\n # videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n # while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +5193,5464425,"train_dynamics.py",887,0,"",python,selection_mouse +5194,5464459,"train_dynamics.py",886,0,"",python,selection_command +5195,5465445,"train_dynamics.py",849,0,"",python,selection_mouse +5196,5465555,"train_dynamics.py",842,12,"restore_ckpt",python,selection_mouse +5197,5471327,"slurm/dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/yoloruns/%x_%j.log\n#SBATCH --job-name=train_dynamics_overfit_sample_causal\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/causal/overfit-seed69-1-no-noise/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --seed=69 \\n --num_steps=5000 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --init_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-overfit-no-noise$slurm_job_id \\n --tags dynamics causal overfit \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n ",shellscript,tab +5198,5472790,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +5199,5474947,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2032,0,"",shellscript,selection_mouse +5200,5474953,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2031,0,"",shellscript,selection_command +5201,5475874,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2032,0,"\n ",shellscript,content +5202,5476852,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2037,0,"restore_ckpt",shellscript,content +5203,5478268,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2037,0,"-",shellscript,content +5204,5478270,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2038,0,"",shellscript,selection_keyboard +5205,5478424,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2038,0,"-",shellscript,content +5206,5478425,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2039,0,"",shellscript,selection_keyboard +5207,5479400,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2051,0," ",shellscript,content +5208,5479400,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2052,0,"",shellscript,selection_keyboard +5209,5479786,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2052,0,"\",shellscript,content +5210,5479787,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2053,0,"",shellscript,selection_keyboard +5211,5480097,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2052,0,"",shellscript,selection_command +5212,5480366,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2073,0,"",shellscript,selection_command +5213,5480696,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2058,0,"",shellscript,selection_command +5214,5481812,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2057,0,"",shellscript,selection_command +5215,5482811,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2054,25,"",shellscript,content +5216,5482825,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2058,0,"",shellscript,selection_command +5217,5488274,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1168,0,"",shellscript,selection_mouse +5218,5488597,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1167,1,"\n",shellscript,selection_mouse +5219,5488633,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1130,38,"restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5220,5488662,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1125,43,"\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5221,5488697,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1083,85,"restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5222,5488737,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1082,86," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5223,5489480,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1081,0,"",shellscript,selection_mouse +5224,5489630,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1079,4," ",shellscript,selection_mouse +5225,5489887,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1079,46," restore_ckpt_flag=""--no-restore-ckpt""\nelse",shellscript,selection_mouse +5226,5489952,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1079,68," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag",shellscript,selection_mouse +5227,5490062,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1079,88," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi",shellscript,selection_mouse +5228,5490262,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1167,0,"",shellscript,selection_mouse +5229,5490270,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1166,0,"",shellscript,selection_command +5230,5490477,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1165,2,"fi",shellscript,selection_mouse +5231,5490483,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1166,1,"i",shellscript,selection_command +5232,5490637,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1130,36,"restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5233,5490681,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1125,41,"\n restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5234,5490756,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1082,84," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5235,5490834,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1081,85," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5236,5490931,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1047,119," [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5237,5490952,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,121,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nf",shellscript,selection_mouse +5238,5491432,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,0,"",shellscript,selection_mouse +5239,5491433,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,2,"if",shellscript,selection_mouse +5240,5491622,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,35,"if [ $restart_count -eq 0 ]; then\n ",shellscript,selection_mouse +5241,5491647,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,36,"if [ $restart_count -eq 0 ]; then\n ",shellscript,selection_mouse +5242,5491692,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,80,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse",shellscript,selection_mouse +5243,5491724,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,84,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n ",shellscript,selection_mouse +5244,5491760,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,85,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n ",shellscript,selection_mouse +5245,5491792,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,122,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi",shellscript,selection_mouse +5246,5491828,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,123,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5247,5492408,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1168,0,"",shellscript,selection_mouse +5248,5492615,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1167,1,"\n",shellscript,selection_mouse +5249,5492640,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1132,36,"store_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5250,5492672,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1131,37,"estore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5251,5492707,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1125,43,"\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5252,5492808,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1083,85,"restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5253,5492885,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1082,86," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5254,5492962,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1081,87," restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5255,5492963,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1047,121," [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5256,5493400,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1047,0,"",shellscript,selection_mouse +5257,5493401,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,2,"if",shellscript,selection_mouse +5258,5493556,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,34,"if [ $restart_count -eq 0 ]; then\n",shellscript,selection_mouse +5259,5493732,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,76,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\n",shellscript,selection_mouse +5260,5493758,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,81,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n",shellscript,selection_mouse +5261,5493842,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,120,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\n",shellscript,selection_mouse +5262,5493920,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1045,123,"if [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_mouse +5263,5494200,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1167,0,"",shellscript,selection_mouse +5264,5494201,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",1166,0,"",shellscript,selection_command +5265,5507346,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2076,0,"",shellscript,selection_mouse +5266,5507492,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2070,12,"SLURM_JOB_ID",shellscript,selection_mouse +5267,5520842,"TERMINAL",0,0,"bash",,terminal_focus +5268,5522654,"TERMINAL",0,0,"queue",,terminal_command +5269,5522731,"TERMINAL",0,0,"]633;E;2025-07-27 11:31:48 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Sun Jul 27 11:31:48 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371238 accelerat train_dy tum_cte0 R 1-01:46:19\t 2 hkn[0714,0716]",,terminal_output +5270,5523774,"TERMINAL",0,0,"920",,terminal_output +5271,5524798,"TERMINAL",0,0,"501",,terminal_output +5272,5525857,"TERMINAL",0,0,"12",,terminal_output +5273,5526839,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +5274,5528013,"TERMINAL",0,0,"fqueue",,terminal_command +5275,5528130,"TERMINAL",0,0,"]633;E;2025-07-27 11:31:54 fqueue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1993.localdomain: Sun Jul 27 11:31:54 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3373407\taccelerated train_dynamics_causal_2_node tum_cte0 PENDING\t 0:00 2-00:00:00\t2 (Priority)3373408\taccelerated train_dynamics_causal_8_node tum_cte0 PENDING\t 0:00 2-00:00:00\t8 (Priority)3371238\taccelerated train_dynamics_maskprob_fix_2_ tum_cte0 RUNNING 1-01:46:25 2-00:00:00\t2 hkn[0714,0716]",,terminal_output +5276,5529194,"TERMINAL",0,0,"56",,terminal_output +5277,5530151,"TERMINAL",0,0,"67",,terminal_output +5278,5531273,"TERMINAL",0,0,"78",,terminal_output +5279,5532213,"TERMINAL",0,0,"89",,terminal_output +5280,5533321,"TERMINAL",0,0,"930",,terminal_output +5281,5534305,"TERMINAL",0,0,"2:001",,terminal_output +5282,5535314,"TERMINAL",0,0,"12",,terminal_output +5283,5536338,"TERMINAL",0,0,"23",,terminal_output +5284,5537416,"TERMINAL",0,0,"34",,terminal_output +5285,5538402,"TERMINAL",0,0,"45",,terminal_output +5286,5539437,"TERMINAL",0,0,"56",,terminal_output +5287,5540489,"TERMINAL",0,0,"67",,terminal_output +5288,5541623,"TERMINAL",0,0,"78",,terminal_output +5289,5542562,"TERMINAL",0,0,"89",,terminal_output +5290,5542878,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +5291,5543607,"TERMINAL",0,0,"940",,terminal_output +5292,5544619,"TERMINAL",0,0,"101",,terminal_output +5293,5545831,"TERMINAL",0,0,"12",,terminal_output +5294,5546098,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2070,12,"",shellscript,content +5295,5546692,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2069,1,"",shellscript,content +5296,5546696,"TERMINAL",0,0,"23",,terminal_output +5297,5547058,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2069,0,"3371237",shellscript,content +5298,5547687,"TERMINAL",0,0,"34",,terminal_output +5299,5548717,"TERMINAL",0,0,"45",,terminal_output +5300,5548846,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2068,1,"",shellscript,content +5301,5549629,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2068,0,"=",shellscript,content +5302,5549629,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2069,0,"",shellscript,selection_keyboard +5303,5549753,"TERMINAL",0,0,"56",,terminal_output +5304,5550797,"TERMINAL",0,0,"67",,terminal_output +5305,5551816,"TERMINAL",0,0,"78",,terminal_output +5306,5552850,"TERMINAL",0,0,"89",,terminal_output +5307,5554012,"TERMINAL",0,0,"950",,terminal_output +5308,5554913,"TERMINAL",0,0,"201",,terminal_output +5309,5556013,"TERMINAL",0,0,"12",,terminal_output +5310,5557080,"TERMINAL",0,0,"23",,terminal_output +5311,5558103,"TERMINAL",0,0,"35",,terminal_output +5312,5559135,"TERMINAL",0,0,"56",,terminal_output +5313,5560151,"TERMINAL",0,0,"67",,terminal_output +5314,5561182,"TERMINAL",0,0,"78",,terminal_output +5315,5562301,"TERMINAL",0,0,"89",,terminal_output +5316,5563225,"TERMINAL",0,0,"97:00",,terminal_output +5317,5564217,"TERMINAL",0,0,"301",,terminal_output +5318,5565373,"TERMINAL",0,0,"12",,terminal_output +5319,5566277,"TERMINAL",0,0,"23",,terminal_output +5320,5567472,"TERMINAL",0,0,"34",,terminal_output +5321,5568581,"TERMINAL",0,0,"45",,terminal_output +5322,5569378,"TERMINAL",0,0,"56",,terminal_output +5323,5570497,"TERMINAL",0,0,"67",,terminal_output +5324,5571591,"TERMINAL",0,0,"78",,terminal_output +5325,5572647,"TERMINAL",0,0,"89",,terminal_output +5326,5573668,"TERMINAL",0,0,"910",,terminal_output +5327,5574593,"TERMINAL",0,0,"401",,terminal_output +5328,5575592,"TERMINAL",0,0,"12",,terminal_output +5329,5576743,"TERMINAL",0,0,"23",,terminal_output +5330,5577640,"TERMINAL",0,0,"34",,terminal_output +5331,5578079,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,0,"",shellscript,selection_mouse +5332,5578227,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,1,"$",shellscript,selection_mouse +5333,5578239,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,2,"$s",shellscript,selection_mouse +5334,5578255,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,3,"$sl",shellscript,selection_mouse +5335,5578277,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,4,"$slu",shellscript,selection_mouse +5336,5578311,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,5,"$slur",shellscript,selection_mouse +5337,5578344,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,6,"$slurm",shellscript,selection_mouse +5338,5578426,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,7,"$slurm_",shellscript,selection_mouse +5339,5578433,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,8,"$slurm_j",shellscript,selection_mouse +5340,5578460,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,9,"$slurm_jo",shellscript,selection_mouse +5341,5578504,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,10,"$slurm_job",shellscript,selection_mouse +5342,5578578,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,11,"$slurm_job_",shellscript,selection_mouse +5343,5578592,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,12,"$slurm_job_i",shellscript,selection_mouse +5344,5578629,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,13,"$slurm_job_id",shellscript,selection_mouse +5345,5578720,"TERMINAL",0,0,"45",,terminal_output +5346,5579514,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,13,"",shellscript,content +5347,5579729,"TERMINAL",0,0,"56",,terminal_output +5348,5579893,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2294,0,"3371237",shellscript,content +5349,5580737,"TERMINAL",0,0,"67",,terminal_output +5350,5581760,"TERMINAL",0,0,"78",,terminal_output +5351,5582802,"TERMINAL",0,0,"89",,terminal_output +5352,5584012,"TERMINAL",0,0,"920",,terminal_output +5353,5584940,"TERMINAL",0,0,"501",,terminal_output +5354,5585956,"TERMINAL",0,0,"12",,terminal_output +5355,5586941,"TERMINAL",0,0,"23",,terminal_output +5356,5587984,"TERMINAL",0,0,"34",,terminal_output +5357,5589029,"TERMINAL",0,0,"45",,terminal_output +5358,5590162,"TERMINAL",0,0,"57",,terminal_output +5359,5591180,"TERMINAL",0,0,"78",,terminal_output +5360,5592206,"TERMINAL",0,0,"89",,terminal_output +5361,5593183,"TERMINAL",0,0,"930",,terminal_output +5362,5594193,"TERMINAL",0,0,"3:001",,terminal_output +5363,5595238,"TERMINAL",0,0,"12",,terminal_output +5364,5596399,"TERMINAL",0,0,"23",,terminal_output +5365,5596518,"TERMINAL",0,0,"bash",,terminal_focus +5366,5597279,"TERMINAL",0,0,"34",,terminal_output +5367,5598119,"TERMINAL",0,0,"watch",,terminal_focus +5368,5598308,"TERMINAL",0,0,"45",,terminal_output +5369,5599352,"TERMINAL",0,0,"56",,terminal_output +5370,5600040,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +5371,5602427,"TERMINAL",0,0,"runner-2",,terminal_command +5372,5603296,"TERMINAL",0,0,"l",,terminal_command +5373,5604035,"TERMINAL",0,0,"lw",,terminal_command +5374,5604081,"TERMINAL",0,0,"]633;E;2025-07-27 11:33:10 lw;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cbash: lw: command not found...\r\n",,terminal_output +5375,5605284,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs_2]633;D;127]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs_2",,terminal_output +5376,5606015,"TERMINAL",0,0,"ls",,terminal_command +5377,5606058,"TERMINAL",0,0,"]633;E;2025-07-27 11:33:12 ls;adbf53fe-397b-40d3-9339-94ea79afad56]633;C",,terminal_output +5378,5606183,"TERMINAL",0,0,"debug generate_dataset.py overfit_dir requirements.txt slurm-3359334.out train_lam.py\r\ndiff.diff genie.py overfit_dir.zip sample.py slurm-3359338.out train_tokenizer.py\r\ndiff.log gifs __pycache__ scripts_cremers slurm-3373400.out utils\r\nframe-knoms.png input_pipeline README.md scripts_horeka slurm-3373404.out wandb\r\nframe.png LICENSE read_tf_record.py slurm tests weekend-job-requeuer.sh\r\nframes models requirements-franz.txt slurm-3359333.out train_dynamics.py weekend-job-starter.sh\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +5379,5615891,"TERMINAL",0,0,"cat utils/nn.py",,terminal_command +5380,5615931,"TERMINAL",0,0,"]633;E;2025-07-27 11:33:21 cat utils/nn.py ;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cimport math\r\nfrom typing import Tuple\r\n\r\nfrom flax import linen as nn\r\nimport jax\r\nimport jax.numpy as jnp\r\nimport einops\r\n\r\n\r\nclass PositionalEncoding(nn.Module):\r\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\r\n\r\n d_model: int # Hidden dimensionality of the input.\r\n max_len: int = 5000 # Maximum length of a sequence to expect.\r\n\r\n def setup(self):\r\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\r\n self.pe = jnp.zeros((self.max_len, self.d_model))\r\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\r\n div_term = jnp.exp(\r\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\r\n )\r\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\r\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\r\n\r\n def __call__(self, x):\r\n x = x + self.pe[: x.shape[2]]\r\n return x\r\n\r\n\r\n# class STBlock2(nn.Module):\r\n# dim: int\r\n# num_heads: int\r\n# dropout: float\r\n# param_dtype: jnp.dtype\r\n# dtype: jnp.dtype\r\n\r\n# @nn.remat\r\n# @nn.compact\r\n# def __call__(self, x: jax.Array) -> jax.Array:\r\n# # --- Spatial attention ---\r\n# z = PositionalEncoding(self.dim)(x)\r\n# z = nn.LayerNorm(\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(z)\r\n# causal_mask = jnp.tri(z.shape[-2])\r\n# z = nn.MultiHeadAttention(\r\n# num_heads=self.num_heads,\r\n# qkv_features=self.dim,\r\n# dropout_rate=self.dropout,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(z, mask=causal_mask)\r\n# x = x + z\r\n\r\n# # --- Temporal attention ---\r\n# x = x.swapaxes(1, 2)\r\n# z = PositionalEncoding(self.dim)(x)\r\n# z = nn.LayerNorm(\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(z)\r\n# causal_mask = jnp.tri(z.shape[-2])\r\n# z = nn.MultiHeadAttention(\r\n# num_heads=self.num_heads,\r\n# qkv_features=self.dim,\r\n# dropout_rate=self.dropout,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(z, mask=causal_mask)\r\n# x = x + z\r\n# x = x.swapaxes(1, 2)\r\n\r\n# # --- Feedforward ---\r\n# z = nn.LayerNorm(\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(x)\r\n# # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\r\n# z = nn.Dense(\r\n# self.dim,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(z)\r\n# z = nn.gelu(z)\r\n# x = x + z\r\n\r\n# return x\r\n\r\n# class CausalTransformer(nn.Module):\r\n# model_dim: int\r\n# out_dim: int\r\n# num_blocks: int\r\n# num_heads: int\r\n# dropout: float\r\n# param_dtype: jnp.dtype\r\n# dtype: jnp.dtype\r\n\r\n# @nn.compact\r\n# def __call__(self, x: jax.Array) -> jax.Array:\r\n# # Input projection and normalization\r\n# x = nn.Sequential(\r\n# [\r\n# nn.LayerNorm(\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# ),\r\n# nn.Dense(self.model_dim,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# ),\r\n# nn.LayerNorm(\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# ),\r\n# ]\r\n# )(x)\r\n# # Causal transformer blocks\r\n# for _ in range(self.num_blocks):\r\n# x = STBlock2(\r\n# dim=self.model_dim,\r\n# num_heads=self.num_heads,\r\n# dropout=self.dropout,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(x)\r\n\r\n# # Output projection\r\n# x = nn.Dense(\r\n# self.out_dim,\r\n# param_dtype=self.param_dtype,\r\n# dtype=self.dtype,\r\n# )(x)\r\n# return x # (B, T, E)\r\n\r\n\r\nclass STBlock(nn.Module):\r\n dim: int\r\n ffn_dim: int\r\n num_heads: int\r\n dropout: float\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n use_flash_attention: bool\r\n spatial_bert: bool = True\r\n\r\n @nn.remat\r\n @nn.compact\r\n def __call__(self, x: jax.Array) -> jax.Array:\r\n # --- Spatial attention ---\r\n z = PositionalEncoding(self.dim)(x)\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n spatial_causal_mask = None if self.spatial_bert else jnp.tri(z.shape[-2])\r\n z = nn.MultiHeadAttention(\r\n num_heads=self.num_heads,\r\n qkv_features=self.dim,\r\n dropout_rate=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n attention_fn=_create_flash_attention_fn(\r\n self.use_flash_attention, is_causal=not self.spatial_bert\r\n ),\r\n # decode=True\r\n )(z, mask=spatial_causal_mask)\r\n x = x + z\r\n\r\n # --- Temporal attention ---\r\n x = x.swapaxes(1, 2)\r\n z = PositionalEncoding(self.dim)(x)\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n causal_mask = jnp.tri(z.shape[-2])\r\n z = nn.MultiHeadAttention(\r\n num_heads=self.num_heads,\r\n qkv_features=self.dim,\r\n dropout_rate=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n attention_fn=_create_flash_attention_fn(\r\n self.use_flash_attention, is_causal=True\r\n ),\r\n # decode=True\r\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\r\n )(z, mask=causal_mask)\r\n x = x + z\r\n x = x.swapaxes(1, 2)\r\n\r\n # --- Feedforward ---\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(x)\r\n z = nn.Dense(\r\n self.ffn_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n z = nn.gelu(z)\r\n z = nn.Dense(\r\n self.dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n x = x + z\r\n\r\n return x\r\n\r\n\r\nclass STTransformer(nn.Module):\r\n model_dim: int\r\n ffn_dim: int\r\n out_dim: int\r\n num_blocks: int\r\n num_heads: int\r\n dropout: float\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n use_flash_attention: bool\r\n spatial_bert: bool = True\r\n\r\n @nn.compact\r\n def __call__(self, x: jax.Array) -> jax.Array:\r\n x = nn.Sequential(\r\n [\r\n nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n nn.Dense(\r\n self.model_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n ]\r\n )(x)\r\n for _ in range(self.num_blocks):\r\n x = STBlock(\r\n dim=self.model_dim,\r\n ffn_dim=self.ffn_dim,\r\n num_heads=self.num_heads,\r\n dropout=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n spatial_bert=self.spatial_bert,\r\n use_flash_attention=self.use_flash_attention,\r\n )(x)\r\n x = nn.Dense(\r\n self.out_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(x)\r\n return x # (B, T, E)\r\n\r\n\r\ndef normalize(x):\r\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\r\n\r\n\r\nclass VectorQuantizer(nn.Module):\r\n latent_dim: int\r\n num_latents: int\r\n dropout: float\r\n\r\n def setup(self):\r\n self.codebook = normalize(\r\n self.param(\r\n ""codebook"",\r\n nn.initializers.lecun_uniform(),\r\n (self.num_latents, self.latent_dim),\r\n )\r\n )\r\n self.drop = nn.Dropout(self.dropout, deterministic=False)\r\n\r\n def __call__(\r\n self, x: jax.Array, training: bool\r\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\r\n # --- Compute distances ---\r\n x = normalize(x)\r\n codebook = normalize(self.codebook)\r\n distance = -jnp.matmul(x, codebook.T)\r\n if training:\r\n dropout_key = self.make_rng(""dropout"")\r\n distance = self.drop(distance, rng=dropout_key)\r\n\r\n # --- Get indices and embeddings ---\r\n indices = jnp.argmin(distance, axis=-1)\r\n z = self.codebook[indices]\r\n\r\n # --- Straight through estimator ---\r\n z_q = x + jax.lax.stop_gradient(z - x)\r\n return z_q, z, x, indices\r\n\r\n def get_codes(self, indices: jax.Array):\r\n return self.codebook[indices]\r\n\r\n\r\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\r\n """"""\r\n Create an attention function that uses flash attention if enabled.\r\n\r\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\r\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\r\n\r\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\r\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\r\n multiple of 4 and mask accordingly.\r\n """"""\r\n\r\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\r\n implementation = ""cudnn"" if use_flash_attention else None\r\n\r\n def _rearrange(x):\r\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\r\n\r\n def _pad(x):\r\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\r\n\r\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\r\n mask_bool = mask.astype(jnp.bool_)\r\n expanded_mask = jnp.pad(\r\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\r\n )\r\n return jnp.logical_and(attention_mask, expanded_mask)\r\n\r\n original_shape = query.shape\r\n original_seq_len = query.shape[-3]\r\n\r\n # Pad to nearest multiple of 4\r\n target_seq_len = ((original_seq_len + 3) // 4) * 4\r\n pad_size = target_seq_len - original_seq_len\r\n\r\n query_4d = _pad(_rearrange(query))\r\n key_4d = _pad(_rearrange(key))\r\n value_4d = _pad(_rearrange(value))\r\n\r\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\r\n\r\n mask_4d = (\r\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n )\r\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\r\n\r\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n\r\n output_4d = jax.nn.dot_product_attention(\r\n query=query_4d,\r\n key=key_4d,\r\n value=value_4d,\r\n bias=bias_4d,\r\n mask=mask_4d,\r\n implementation=implementation,\r\n is_causal=is_causal,\r\n **kwargs\r\n )\r\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\r\n\r\n return attention_fn\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs_2]633;D;0",,terminal_output +5381,5625664,"TERMINAL",0,0,"runner",,terminal_command +5382,5628056,"TERMINAL",0,0,"cat utils/nn.py",,terminal_command +5383,5628089,"TERMINAL",0,0,"]633;E;2025-07-27 11:33:34 cat utils/nn.py ;adbf53fe-397b-40d3-9339-94ea79afad56]633;Cimport math\r\nfrom typing import Tuple\r\n\r\nfrom flax import linen as nn\r\nimport jax\r\nimport jax.numpy as jnp\r\nimport einops\r\n\r\n\r\nclass PositionalEncoding(nn.Module):\r\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\r\n\r\n d_model: int # Hidden dimensionality of the input.\r\n max_len: int = 5000 # Maximum length of a sequence to expect.\r\n\r\n def setup(self):\r\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\r\n self.pe = jnp.zeros((self.max_len, self.d_model))\r\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\r\n div_term = jnp.exp(\r\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\r\n )\r\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\r\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\r\n\r\n def __call__(self, x):\r\n x = x + self.pe[: x.shape[2]]\r\n return x\r\n\r\n\r\nclass STBlock(nn.Module):\r\n dim: int\r\n ffn_dim: int\r\n num_heads: int\r\n dropout: float\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n use_flash_attention: bool\r\n\r\n @nn.remat\r\n @nn.compact\r\n def __call__(self, x: jax.Array) -> jax.Array:\r\n # --- Spatial attention ---\r\n z = PositionalEncoding(self.dim)(x)\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n z = nn.MultiHeadAttention(\r\n num_heads=self.num_heads,\r\n qkv_features=self.dim,\r\n dropout_rate=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n attention_fn=_create_flash_attention_fn(\r\n self.use_flash_attention, is_causal=False\r\n ),\r\n )(z)\r\n x = x + z\r\n\r\n # --- Temporal attention ---\r\n x = x.swapaxes(1, 2)\r\n z = PositionalEncoding(self.dim)(x)\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n causal_mask = jnp.tri(z.shape[-2])\r\n z = nn.MultiHeadAttention(\r\n num_heads=self.num_heads,\r\n qkv_features=self.dim,\r\n dropout_rate=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n attention_fn=_create_flash_attention_fn(\r\n self.use_flash_attention, is_causal=True\r\n ),\r\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\r\n )(z, mask=causal_mask)\r\n x = x + z\r\n x = x.swapaxes(1, 2)\r\n\r\n # --- Feedforward ---\r\n z = nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(x)\r\n z = nn.Dense(\r\n self.ffn_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n z = nn.gelu(z)\r\n z = nn.Dense(\r\n self.dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(z)\r\n x = x + z\r\n\r\n return x\r\n\r\n\r\nclass STTransformer(nn.Module):\r\n model_dim: int\r\n ffn_dim: int\r\n out_dim: int\r\n num_blocks: int\r\n num_heads: int\r\n dropout: float\r\n param_dtype: jnp.dtype\r\n dtype: jnp.dtype\r\n use_flash_attention: bool\r\n\r\n @nn.compact\r\n def __call__(self, x: jax.Array) -> jax.Array:\r\n x = nn.Sequential(\r\n [\r\n nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n nn.Dense(\r\n self.model_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n nn.LayerNorm(\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n ),\r\n ]\r\n )(x)\r\n for _ in range(self.num_blocks):\r\n x = STBlock(\r\n dim=self.model_dim,\r\n ffn_dim=self.ffn_dim,\r\n num_heads=self.num_heads,\r\n dropout=self.dropout,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n use_flash_attention=self.use_flash_attention,\r\n )(x)\r\n x = nn.Dense(\r\n self.out_dim,\r\n param_dtype=self.param_dtype,\r\n dtype=self.dtype,\r\n )(x)\r\n return x # (B, T, E)\r\n\r\n\r\ndef normalize(x):\r\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\r\n\r\n\r\nclass VectorQuantizer(nn.Module):\r\n latent_dim: int\r\n num_latents: int\r\n dropout: float\r\n\r\n def setup(self):\r\n self.codebook = normalize(\r\n self.param(\r\n ""codebook"",\r\n nn.initializers.lecun_uniform(),\r\n (self.num_latents, self.latent_dim),\r\n )\r\n )\r\n self.drop = nn.Dropout(self.dropout, deterministic=False)\r\n\r\n def __call__(\r\n self, x: jax.Array, training: bool\r\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\r\n # --- Compute distances ---\r\n x = normalize(x)\r\n codebook = normalize(self.codebook)\r\n distance = -jnp.matmul(x, codebook.T)\r\n if training:\r\n dropout_key = self.make_rng(""dropout"")\r\n distance = self.drop(distance, rng=dropout_key)\r\n\r\n # --- Get indices and embeddings ---\r\n indices = jnp.argmin(distance, axis=-1)\r\n z = self.codebook[indices]\r\n\r\n # --- Straight through estimator ---\r\n z_q = x + jax.lax.stop_gradient(z - x)\r\n return z_q, z, x, indices\r\n\r\n def get_codes(self, indices: jax.Array):\r\n return self.codebook[indices]\r\n\r\n\r\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\r\n """"""\r\n Create an attention function that uses flash attention if enabled.\r\n\r\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\r\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\r\n\r\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\r\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\r\n multiple of 4 and mask accordingly.\r\n """"""\r\n\r\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\r\n implementation = ""cudnn"" if use_flash_attention else None\r\n\r\n def _rearrange(x):\r\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\r\n\r\n def _pad(x):\r\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\r\n\r\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\r\n mask_bool = mask.astype(jnp.bool_)\r\n expanded_mask = jnp.pad(\r\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\r\n )\r\n return jnp.logical_and(attention_mask, expanded_mask)\r\n\r\n original_shape = query.shape\r\n original_seq_len = query.shape[-3]\r\n\r\n # Pad to nearest multiple of 4\r\n target_seq_len = ((original_seq_len + 3) // 4) * 4\r\n pad_size = target_seq_len - original_seq_len\r\n\r\n query_4d = _pad(_rearrange(query))\r\n key_4d = _pad(_rearrange(key))\r\n value_4d = _pad(_rearrange(value))\r\n\r\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\r\n\r\n mask_4d = (\r\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n )\r\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\r\n\r\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n\r\n output_4d = jax.nn.dot_product_attention(\r\n query=query_4d,\r\n key=key_4d,\r\n value=value_4d,\r\n bias=bias_4d,\r\n mask=mask_4d,\r\n implementation=implementation,\r\n is_causal=is_causal,\r\n **kwargs\r\n )\r\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\r\n\r\n return attention_fn\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs]633;D;0",,terminal_output +5384,5634988,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +5385,5649502,"TERMINAL",0,0,"cd ../jafar",,terminal_command +5386,5649533,"TERMINAL",0,0,"]633;E;2025-07-27 11:33:55 cd ../jafar;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +5387,5665948,"TERMINAL",0,0,"cp slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch ../jafar_jobs/slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",,terminal_command +5388,5665981,"TERMINAL",0,0,"]633;E;2025-07-27 11:34:11 cp slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch ../jafar_jobs/slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +5389,5673050,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +5390,5673080,"TERMINAL",0,0,"]633;E;2025-07-27 11:34:19 source .venv/bin/activate;adbf53fe-397b-40d3-9339-94ea79afad56]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +5391,5675306,"TERMINAL",0,0,"runner",,terminal_command +5392,5678898,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",,terminal_command +5393,5678914,"TERMINAL",0,0,"]633;E;2025-07-27 11:34:24 sbatch slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch;adbf53fe-397b-40d3-9339-94ea79afad56]633;CSubmitted batch job 3377693\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs]633;D;0",,terminal_output +5394,5680525,"TERMINAL",0,0,"queue",,terminal_command +5395,5680635,"TERMINAL",0,0,"]633;E;2025-07-27 11:34:26 queue;adbf53fe-397b-40d3-9339-94ea79afad56]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Sun Jul 27 11:34:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3373407 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)3373408 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3377693 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3371238 accelerat train_dy tum_cte0 R 1-01:48:57\t 2 hkn[0714,0716]",,terminal_output +5396,5681701,"TERMINAL",0,0,"78",,terminal_output +5397,5682682,"TERMINAL",0,0,"89",,terminal_output +5398,5683714,"TERMINAL",0,0,"99:00",,terminal_output +5399,5684756,"TERMINAL",0,0,"301",,terminal_output +5400,5685804,"TERMINAL",0,0,"12",,terminal_output +5401,5686922,"TERMINAL",0,0,"23",,terminal_output +5402,5688047,"TERMINAL",0,0,"34",,terminal_output +5403,5689071,"TERMINAL",0,0,"45",,terminal_output +5404,5690094,"TERMINAL",0,0,"56",,terminal_output +5405,5691119,"TERMINAL",0,0,"68",,terminal_output +5406,5692067,"TERMINAL",0,0,"89",,terminal_output +5407,5693169,"TERMINAL",0,0,"910",,terminal_output +5408,5694191,"TERMINAL",0,0,"401",,terminal_output +5409,5695218,"TERMINAL",0,0,"12",,terminal_output +5410,5696240,"TERMINAL",0,0,"23",,terminal_output +5411,5697277,"TERMINAL",0,0,"34",,terminal_output +5412,5698315,"TERMINAL",0,0,"45",,terminal_output +5413,5699421,"TERMINAL",0,0,"56",,terminal_output +5414,5700454,"TERMINAL",0,0,"67",,terminal_output +5415,5701702,"TERMINAL",0,0,"78",,terminal_output +5416,5702691,"TERMINAL",0,0,"89",,terminal_output +5417,5703612,"TERMINAL",0,0,"920",,terminal_output +5418,5704585,"TERMINAL",0,0,"501",,terminal_output +5419,5705695,"TERMINAL",0,0,"12",,terminal_output +5420,5706675,"TERMINAL",0,0,"23",,terminal_output +5421,5707710,"TERMINAL",0,0,"34",,terminal_output +5422,5708752,"TERMINAL",0,0,"45",,terminal_output +5423,5709797,"TERMINAL",0,0,"56",,terminal_output +5424,5710842,"TERMINAL",0,0,"67",,terminal_output +5425,5711910,"TERMINAL",0,0,"78",,terminal_output +5426,5713037,"TERMINAL",0,0,"89",,terminal_output +5427,5714145,"TERMINAL",0,0,"930",,terminal_output +5428,5715088,"TERMINAL",0,0,"5:002",,terminal_output +5429,5716211,"TERMINAL",0,0,"23",,terminal_output +5430,5717138,"TERMINAL",0,0,"34",,terminal_output +5431,5718259,"TERMINAL",0,0,"45",,terminal_output +5432,5719213,"TERMINAL",0,0,"56",,terminal_output +5433,5720529,"TERMINAL",0,0,"67",,terminal_output +5434,5721318,"TERMINAL",0,0,"78",,terminal_output +5435,5722456,"TERMINAL",0,0,"89",,terminal_output +5436,5723481,"TERMINAL",0,0,"940",,terminal_output +5437,5724471,"TERMINAL",0,0,"101",,terminal_output +5438,5725505,"TERMINAL",0,0,"12",,terminal_output +5439,5726553,"TERMINAL",0,0,"23",,terminal_output +5440,5727594,"TERMINAL",0,0,"34",,terminal_output +5441,5728814,"TERMINAL",0,0,"45",,terminal_output +5442,5729670,"TERMINAL",0,0,"56",,terminal_output +5443,5730858,"TERMINAL",0,0,"67",,terminal_output +5444,5731770,"TERMINAL",0,0,"78",,terminal_output +5445,5732831,"TERMINAL",0,0,"89",,terminal_output +5446,5733854,"TERMINAL",0,0,"950",,terminal_output +5447,5734896,"TERMINAL",0,0,"201",,terminal_output +5448,5735972,"TERMINAL",0,0,"12",,terminal_output +5449,5736998,"TERMINAL",0,0,"23",,terminal_output +5450,5738022,"TERMINAL",0,0,"34",,terminal_output +5451,5739187,"TERMINAL",0,0,"56",,terminal_output +5452,5740100,"TERMINAL",0,0,"67",,terminal_output +5453,5741199,"TERMINAL",0,0,"78",,terminal_output +5454,5742218,"TERMINAL",0,0,"89",,terminal_output +5455,5743246,"TERMINAL",0,0,"950:00",,terminal_output +5456,5744258,"TERMINAL",0,0,"301",,terminal_output +5457,5745601,"TERMINAL",0,0,"12",,terminal_output +5458,5746660,"TERMINAL",0,0,"23",,terminal_output +5459,5747383,"TERMINAL",0,0,"34",,terminal_output +5460,5748571,"TERMINAL",0,0,"45",,terminal_output +5461,5749479,"TERMINAL",0,0,"56",,terminal_output +5462,5750643,"TERMINAL",0,0,"67",,terminal_output +5463,5751571,"TERMINAL",0,0,"78",,terminal_output +5464,5752608,"TERMINAL",0,0,"89",,terminal_output +5465,5753723,"TERMINAL",0,0,"910",,terminal_output +5466,5754819,"TERMINAL",0,0,"401",,terminal_output +5467,5755742,"TERMINAL",0,0,"12",,terminal_output +5468,5756774,"TERMINAL",0,0,"23",,terminal_output +5469,5757809,"TERMINAL",0,0,"34",,terminal_output +5470,5758843,"TERMINAL",0,0,"45",,terminal_output +5471,5760036,"TERMINAL",0,0,"56",,terminal_output +5472,5760958,"TERMINAL",0,0,"67",,terminal_output +5473,5762187,"TERMINAL",0,0,"78",,terminal_output +5474,5763213,"TERMINAL",0,0,"820",,terminal_output +5475,5764060,"TERMINAL",0,0,"501",,terminal_output +5476,5765112,"TERMINAL",0,0,"12",,terminal_output +5477,5766180,"TERMINAL",0,0,"23",,terminal_output +5478,5767214,"TERMINAL",0,0,"34",,terminal_output +5479,5768241,"TERMINAL",0,0,"45",,terminal_output +5480,5769292,"TERMINAL",0,0,"56",,terminal_output +5481,5770479,"TERMINAL",0,0,"67",,terminal_output +5482,5771403,"TERMINAL",0,0,"78",,terminal_output +5483,5772469,"TERMINAL",0,0,"89",,terminal_output +5484,5773467,"TERMINAL",0,0,"930",,terminal_output +5485,5774519,"TERMINAL",0,0,"6:001",,terminal_output +5486,5775547,"TERMINAL",0,0,"12",,terminal_output +5487,5776579,"TERMINAL",0,0,"23",,terminal_output +5488,5777683,"TERMINAL",0,0,"34",,terminal_output +5489,5778672,"TERMINAL",0,0,"45",,terminal_output +5490,5779722,"TERMINAL",0,0,"56",,terminal_output +5491,5780827,"TERMINAL",0,0,"67",,terminal_output +5492,5781849,"TERMINAL",0,0,"78",,terminal_output +5493,5782874,"TERMINAL",0,0,"89",,terminal_output +5494,5783895,"TERMINAL",0,0,"940",,terminal_output +5495,5785122,"TERMINAL",0,0,"101",,terminal_output +5496,5785977,"TERMINAL",0,0,"12",,terminal_output +5497,5787017,"TERMINAL",0,0,"24",,terminal_output +5498,5788093,"TERMINAL",0,0,"45",,terminal_output +5499,5789117,"TERMINAL",0,0,"56",,terminal_output +5500,5790244,"TERMINAL",0,0,"67",,terminal_output +5501,5791189,"TERMINAL",0,0,"78",,terminal_output +5502,5792337,"TERMINAL",0,0,"89",,terminal_output +5503,5793422,"TERMINAL",0,0,"950",,terminal_output +5504,5794549,"TERMINAL",0,0,"201",,terminal_output +5505,5795465,"TERMINAL",0,0,"188722",,terminal_output +5506,5796449,"TERMINAL",0,0,"23",,terminal_output +5507,5797616,"TERMINAL",0,0,"34",,terminal_output +5508,5798669,"TERMINAL",0,0,"45",,terminal_output +5509,5799700,"TERMINAL",0,0,"56",,terminal_output +5510,5800723,"TERMINAL",0,0,"67",,terminal_output +5511,5801792,"TERMINAL",0,0,"78",,terminal_output +5512,5802845,"TERMINAL",0,0,"89",,terminal_output +5513,5803773,"TERMINAL",0,0,"91:00",,terminal_output +5514,5804989,"TERMINAL",0,0,"301",,terminal_output +5515,5805858,"TERMINAL",0,0,"12",,terminal_output +5516,5806898,"TERMINAL",0,0,"23",,terminal_output +5517,5807940,"TERMINAL",0,0,"34",,terminal_output +5518,5808991,"TERMINAL",0,0,"45",,terminal_output +5519,5810111,"TERMINAL",0,0,"57",,terminal_output +5520,5811083,"TERMINAL",0,0,"78",,terminal_output +5521,5812125,"TERMINAL",0,0,"89",,terminal_output +5522,5813172,"TERMINAL",0,0,"910",,terminal_output +5523,5814213,"TERMINAL",0,0,"401",,terminal_output +5524,5815334,"TERMINAL",0,0,"12",,terminal_output +5525,5816309,"TERMINAL",0,0,"23",,terminal_output +5526,5817349,"TERMINAL",0,0,"34",,terminal_output +5527,5818408,"TERMINAL",0,0,"45",,terminal_output +5528,5819482,"TERMINAL",0,0,"56",,terminal_output +5529,5820559,"TERMINAL",0,0,"67",,terminal_output +5530,5821715,"TERMINAL",0,0,"78",,terminal_output +5531,5822707,"TERMINAL",0,0,"89",,terminal_output +5532,5823735,"TERMINAL",0,0,"920",,terminal_output +5533,5824723,"TERMINAL",0,0,"501",,terminal_output +5534,5825782,"TERMINAL",0,0,"12",,terminal_output +5535,5826757,"TERMINAL",0,0,"23",,terminal_output +5536,5827834,"TERMINAL",0,0,"34",,terminal_output +5537,5828858,"TERMINAL",0,0,"45",,terminal_output +5538,5829896,"TERMINAL",0,0,"56",,terminal_output +5539,5831106,"TERMINAL",0,0,"67",,terminal_output +5540,5832025,"TERMINAL",0,0,"78",,terminal_output +5541,5833049,"TERMINAL",0,0,"830",,terminal_output +5542,5834075,"TERMINAL",0,0,"7:001",,terminal_output +5543,5835482,"TERMINAL",0,0,"12",,terminal_output +5544,5836172,"TERMINAL",0,0,"23",,terminal_output +5545,5837233,"TERMINAL",0,0,"34",,terminal_output +5546,5838263,"TERMINAL",0,0,"45",,terminal_output +5547,5839309,"TERMINAL",0,0,"56",,terminal_output +5548,5840529,"TERMINAL",0,0,"67",,terminal_output +5549,5841445,"TERMINAL",0,0,"78",,terminal_output +5550,5842445,"TERMINAL",0,0,"89",,terminal_output +5551,5843653,"TERMINAL",0,0,"940",,terminal_output +5552,5844536,"TERMINAL",0,0,"101",,terminal_output +5553,5845646,"TERMINAL",0,0,"12",,terminal_output +5554,5846682,"TERMINAL",0,0,"23",,terminal_output +5555,5847711,"TERMINAL",0,0,"34",,terminal_output +5556,5848824,"TERMINAL",0,0,"45",,terminal_output +5557,5849760,"TERMINAL",0,0,"56",,terminal_output +5558,5850806,"TERMINAL",0,0,"67",,terminal_output +5559,5851845,"TERMINAL",0,0,"78",,terminal_output +5560,5852893,"TERMINAL",0,0,"89",,terminal_output +5561,5853944,"TERMINAL",0,0,"950",,terminal_output +5562,5855166,"TERMINAL",0,0,"201",,terminal_output +5563,5856053,"TERMINAL",0,0,"13",,terminal_output +5564,5857113,"TERMINAL",0,0,"34",,terminal_output +5565,5858149,"TERMINAL",0,0,"45",,terminal_output +5566,5859317,"TERMINAL",0,0,"56",,terminal_output +5567,5860491,"TERMINAL",0,0,"67",,terminal_output +5568,5861517,"TERMINAL",0,0,"78",,terminal_output +5569,5862441,"TERMINAL",0,0,"89",,terminal_output +5570,5863576,"TERMINAL",0,0,"92:00",,terminal_output +5571,5864540,"TERMINAL",0,0,"301",,terminal_output +5572,5865601,"TERMINAL",0,0,"12",,terminal_output +5573,5866856,"TERMINAL",0,0,"23",,terminal_output +5574,5867771,"TERMINAL",0,0,"34",,terminal_output +5575,5868715,"TERMINAL",0,0,"45",,terminal_output +5576,5869815,"TERMINAL",0,0,"56",,terminal_output +5577,5871041,"TERMINAL",0,0,"67",,terminal_output +5578,5871835,"TERMINAL",0,0,"78",,terminal_output +5579,5872877,"TERMINAL",0,0,"89",,terminal_output +5580,5873959,"TERMINAL",0,0,"910",,terminal_output +5581,5875140,"TERMINAL",0,0,"401",,terminal_output +5582,5876058,"TERMINAL",0,0,"13",,terminal_output +5583,5877080,"TERMINAL",0,0,"34",,terminal_output +5584,5878209,"TERMINAL",0,0,"45",,terminal_output +5585,5879254,"TERMINAL",0,0,"56",,terminal_output +5586,5880278,"TERMINAL",0,0,"67",,terminal_output +5587,5881587,"TERMINAL",0,0,"78",,terminal_output +5588,5882312,"TERMINAL",0,0,"89",,terminal_output +5589,5883536,"TERMINAL",0,0,"920",,terminal_output +5590,5884455,"TERMINAL",0,0,"501",,terminal_output +5591,5885439,"TERMINAL",0,0,"12",,terminal_output +5592,5886493,"TERMINAL",0,0,"23",,terminal_output +5593,5887547,"TERMINAL",0,0,"34",,terminal_output +5594,5888587,"TERMINAL",0,0,"45",,terminal_output +5595,5889721,"TERMINAL",0,0,"56",,terminal_output +5596,5890807,"TERMINAL",0,0,"67",,terminal_output +5597,5891831,"TERMINAL",0,0,"78",,terminal_output +5598,5892808,"TERMINAL",0,0,"89",,terminal_output +5599,5893884,"TERMINAL",0,0,"930",,terminal_output +5600,5894877,"TERMINAL",0,0,"8:001",,terminal_output +5601,5895922,"TERMINAL",0,0,"12",,terminal_output +5602,5897124,"TERMINAL",0,0,"23",,terminal_output +5603,5898176,"TERMINAL",0,0,"35",,terminal_output +5604,5899059,"TERMINAL",0,0,"56",,terminal_output +5605,5900228,"TERMINAL",0,0,"67",,terminal_output +5606,5901569,"TERMINAL",0,0,"78",,terminal_output +5607,5902273,"TERMINAL",0,0,"89",,terminal_output +5608,5903413,"TERMINAL",0,0,"940",,terminal_output +5609,5904292,"TERMINAL",0,0,"101",,terminal_output +5610,5905446,"TERMINAL",0,0,"12",,terminal_output +5611,5906573,"TERMINAL",0,0,"23",,terminal_output +5612,5907574,"TERMINAL",0,0,"34",,terminal_output +5613,5908831,"TERMINAL",0,0,"45",,terminal_output +5614,5909512,"TERMINAL",0,0,"56",,terminal_output +5615,5910566,"TERMINAL",0,0,"67",,terminal_output +5616,5911797,"TERMINAL",0,0,"78",,terminal_output +5617,5912825,"TERMINAL",0,0,"89",,terminal_output +5618,5913859,"TERMINAL",0,0,"950",,terminal_output +5619,5914751,"TERMINAL",0,0,"2072881",,terminal_output +5620,5915806,"TERMINAL",0,0,"12",,terminal_output +5621,5916831,"TERMINAL",0,0,"23",,terminal_output +5622,5917884,"TERMINAL",0,0,"34",,terminal_output +5623,5918908,"TERMINAL",0,0,"45",,terminal_output +5624,5919950,"TERMINAL",0,0,"56",,terminal_output +5625,5921114,"TERMINAL",0,0,"67",,terminal_output +5626,5922268,"TERMINAL",0,0,"79",,terminal_output +5627,5923266,"TERMINAL",0,0,"93:00",,terminal_output +5628,5924186,"TERMINAL",0,0,"301",,terminal_output +5629,5925314,"TERMINAL",0,0,"12",,terminal_output +5630,5926337,"TERMINAL",0,0,"23",,terminal_output +5631,5927463,"TERMINAL",0,0,"34",,terminal_output +5632,5928283,"TERMINAL",0,0,"45",,terminal_output +5633,5929409,"TERMINAL",0,0,"56",,terminal_output +5634,5930437,"TERMINAL",0,0,"67",,terminal_output +5635,5931470,"TERMINAL",0,0,"78",,terminal_output +5636,5932507,"TERMINAL",0,0,"89",,terminal_output +5637,5933757,"TERMINAL",0,0,"910",,terminal_output +5638,5934748,"TERMINAL",0,0,"401",,terminal_output +5639,5935589,"TERMINAL",0,0,"12",,terminal_output +5640,5936631,"TERMINAL",0,0,"23",,terminal_output +5641,5937665,"TERMINAL",0,0,"34",,terminal_output +5642,5938761,"TERMINAL",0,0,"45",,terminal_output +5643,5939764,"TERMINAL",0,0,"56",,terminal_output +5644,5940830,"TERMINAL",0,0,"67",,terminal_output +5645,5941870,"TERMINAL",0,0,"78",,terminal_output +5646,5942899,"TERMINAL",0,0,"89",,terminal_output +5647,5943945,"TERMINAL",0,0,"920",,terminal_output +5648,5945015,"TERMINAL",0,0,"501",,terminal_output +5649,5946033,"TERMINAL",0,0,"13",,terminal_output +5650,5947073,"TERMINAL",0,0,"34",,terminal_output +5651,5948110,"TERMINAL",0,0,"45",,terminal_output +5652,5949196,"TERMINAL",0,0,"56",,terminal_output +5653,5950240,"TERMINAL",0,0,"67",,terminal_output +5654,5951254,"TERMINAL",0,0,"78",,terminal_output +5655,5952286,"TERMINAL",0,0,"89",,terminal_output +5656,5953322,"TERMINAL",0,0,"930",,terminal_output +5657,5954437,"TERMINAL",0,0,"9:001",,terminal_output +5658,5955391,"TERMINAL",0,0,"12",,terminal_output +5659,5956588,"TERMINAL",0,0,"23",,terminal_output +5660,5957513,"TERMINAL",0,0,"34",,terminal_output +5661,5958637,"TERMINAL",0,0,"45",,terminal_output +5662,5959556,"TERMINAL",0,0,"56",,terminal_output +5663,5960685,"TERMINAL",0,0,"67",,terminal_output +5664,5961632,"TERMINAL",0,0,"78",,terminal_output +5665,5962673,"TERMINAL",0,0,"89",,terminal_output +5666,5963744,"TERMINAL",0,0,"940",,terminal_output +5667,5964857,"TERMINAL",0,0,"101",,terminal_output +5668,5965862,"TERMINAL",0,0,"12",,terminal_output +5669,5966840,"TERMINAL",0,0,"23",,terminal_output +5670,5967884,"TERMINAL",0,0,"34",,terminal_output +5671,5968937,"TERMINAL",0,0,"45",,terminal_output +5672,5969970,"TERMINAL",0,0,"56",,terminal_output +5673,5971024,"TERMINAL",0,0,"68",,terminal_output +5674,5972153,"TERMINAL",0,0,"89",,terminal_output +5675,5973185,"TERMINAL",0,0,"950",,terminal_output +5676,5974163,"TERMINAL",0,0,"201",,terminal_output +5677,5975226,"TERMINAL",0,0,"12",,terminal_output +5678,5976250,"TERMINAL",0,0,"23",,terminal_output +5679,5977483,"TERMINAL",0,0,"34",,terminal_output +5680,5978364,"TERMINAL",0,0,"45",,terminal_output +5681,5979357,"TERMINAL",0,0,"56",,terminal_output +5682,5980548,"TERMINAL",0,0,"67",,terminal_output +5683,5981436,"TERMINAL",0,0,"78",,terminal_output +5684,5982596,"TERMINAL",0,0,"89",,terminal_output +5685,5983528,"TERMINAL",0,0,"94:00",,terminal_output +5686,5984592,"TERMINAL",0,0,"301",,terminal_output +5687,5985872,"TERMINAL",0,0,"12",,terminal_output +5688,5986700,"TERMINAL",0,0,"23",,terminal_output +5689,5987717,"TERMINAL",0,0,"34",,terminal_output +5690,5988950,"TERMINAL",0,0,"45",,terminal_output +5691,5989789,"TERMINAL",0,0,"56",,terminal_output +5692,5990832,"TERMINAL",0,0,"67",,terminal_output +5693,5991881,"TERMINAL",0,0,"78",,terminal_output +5694,5992918,"TERMINAL",0,0,"89",,terminal_output +5695,5993943,"TERMINAL",0,0,"910",,terminal_output +5696,5994991,"TERMINAL",0,0,"401",,terminal_output +5697,5996030,"TERMINAL",0,0,"13",,terminal_output +5698,5997082,"TERMINAL",0,0,"34",,terminal_output +5699,5998094,"TERMINAL",0,0,"45",,terminal_output +5700,5999185,"TERMINAL",0,0,"56",,terminal_output +5701,6000171,"TERMINAL",0,0,"67",,terminal_output +5702,6001212,"TERMINAL",0,0,"78",,terminal_output +5703,6002277,"TERMINAL",0,0,"89",,terminal_output +5704,6003486,"TERMINAL",0,0,"920",,terminal_output +5705,6004409,"TERMINAL",0,0,"501",,terminal_output +5706,6005390,"TERMINAL",0,0,"12",,terminal_output +5707,6006437,"TERMINAL",0,0,"23",,terminal_output +5708,6007485,"TERMINAL",0,0,"34",,terminal_output +5709,6008608,"TERMINAL",0,0,"45",,terminal_output +5710,6009576,"TERMINAL",0,0,"56",,terminal_output +5711,6010655,"TERMINAL",0,0,"67",,terminal_output +5712,6011656,"TERMINAL",0,0,"78",,terminal_output +5713,6012702,"TERMINAL",0,0,"89",,terminal_output +5714,6013829,"TERMINAL",0,0,"930",,terminal_output +5715,6014809,"TERMINAL",0,0,"40:001",,terminal_output +5716,6015979,"TERMINAL",0,0,"12",,terminal_output +5717,6016859,"TERMINAL",0,0,"23",,terminal_output +5718,6017896,"TERMINAL",0,0,"34",,terminal_output +5719,6018940,"TERMINAL",0,0,"45",,terminal_output +5720,6020081,"TERMINAL",0,0,"56",,terminal_output +5721,6021011,"TERMINAL",0,0,"68",,terminal_output +5722,6022049,"TERMINAL",0,0,"89",,terminal_output +5723,6023087,"TERMINAL",0,0,"940",,terminal_output +5724,6024121,"TERMINAL",0,0,"101",,terminal_output +5725,6025197,"TERMINAL",0,0,"12",,terminal_output +5726,6026221,"TERMINAL",0,0,"23",,terminal_output +5727,6027346,"TERMINAL",0,0,"34",,terminal_output +5728,6028489,"TERMINAL",0,0,"45",,terminal_output +5729,6029405,"TERMINAL",0,0,"56",,terminal_output +5730,6030639,"TERMINAL",0,0,"67",,terminal_output +5731,6031455,"TERMINAL",0,0,"78",,terminal_output +5732,6032435,"TERMINAL",0,0,"89",,terminal_output +5733,6033592,"TERMINAL",0,0,"950",,terminal_output +5734,6034517,"TERMINAL",0,0,"201",,terminal_output +5735,6035847,"TERMINAL",0,0,"12",,terminal_output +5736,6036665,"TERMINAL",0,0,"23",,terminal_output +5737,6037687,"TERMINAL",0,0,"34",,terminal_output +5738,6038713,"TERMINAL",0,0,"45",,terminal_output +5739,6039732,"TERMINAL",0,0,"56",,terminal_output +5740,6040836,"TERMINAL",0,0,"67",,terminal_output +5741,6041901,"TERMINAL",0,0,"78",,terminal_output +5742,6042858,"TERMINAL",0,0,"89",,terminal_output +5743,6043898,"TERMINAL",0,0,"95:00",,terminal_output +5744,6044970,"TERMINAL",0,0,"301",,terminal_output +5745,6045994,"TERMINAL",0,0,"12",,terminal_output +5746,6047031,"TERMINAL",0,0,"24",,terminal_output +5747,6048082,"TERMINAL",0,0,"45",,terminal_output +5748,6049138,"TERMINAL",0,0,"56",,terminal_output +5749,6050173,"TERMINAL",0,0,"67",,terminal_output +5750,6051306,"TERMINAL",0,0,"78",,terminal_output +5751,6052247,"TERMINAL",0,0,"89",,terminal_output +5752,6053359,"TERMINAL",0,0,"910",,terminal_output +5753,6054332,"TERMINAL",0,0,"401",,terminal_output +5754,6055404,"TERMINAL",0,0,"12",,terminal_output +5755,6056541,"TERMINAL",0,0,"23",,terminal_output +5756,6057450,"TERMINAL",0,0,"34",,terminal_output +5757,6058585,"TERMINAL",0,0,"45",,terminal_output +5758,6059536,"TERMINAL",0,0,"56",,terminal_output +5759,6060629,"TERMINAL",0,0,"67",,terminal_output +5760,6061797,"TERMINAL",0,0,"78",,terminal_output +5761,6062822,"TERMINAL",0,0,"89",,terminal_output +5762,6063703,"TERMINAL",0,0,"920",,terminal_output +5763,6064928,"TERMINAL",0,0,"501",,terminal_output +5764,6065849,"TERMINAL",0,0,"12",,terminal_output +5765,6066977,"TERMINAL",0,0,"23",,terminal_output +5766,6068001,"TERMINAL",0,0,"34",,terminal_output +5767,6068909,"TERMINAL",0,0,"45",,terminal_output +5768,6069946,"TERMINAL",0,0,"56",,terminal_output +5769,6070984,"TERMINAL",0,0,"67",,terminal_output +5770,6072020,"TERMINAL",0,0,"79",,terminal_output +5771,6073122,"TERMINAL",0,0,"930",,terminal_output +5772,6074100,"TERMINAL",0,0,"1:001",,terminal_output +5773,6075176,"TERMINAL",0,0,"12",,terminal_output +5774,6076159,"TERMINAL",0,0,"23",,terminal_output +5775,6077217,"TERMINAL",0,0,"34",,terminal_output +5776,6078241,"TERMINAL",0,0,"45",,terminal_output +5777,6079281,"TERMINAL",0,0,"56",,terminal_output +5778,6080324,"TERMINAL",0,0,"67",,terminal_output +5779,6081416,"TERMINAL",0,0,"78",,terminal_output +5780,6082404,"TERMINAL",0,0,"89",,terminal_output +5781,6083453,"TERMINAL",0,0,"940",,terminal_output +5782,6084506,"TERMINAL",0,0,"101",,terminal_output +5783,6085613,"TERMINAL",0,0,"12",,terminal_output +5784,6086641,"TERMINAL",0,0,"23",,terminal_output +5785,6087599,"TERMINAL",0,0,"34",,terminal_output +5786,6088641,"TERMINAL",0,0,"45",,terminal_output +5787,6089674,"TERMINAL",0,0,"56",,terminal_output +5788,6090734,"TERMINAL",0,0,"67",,terminal_output +5789,6091959,"TERMINAL",0,0,"78",,terminal_output +5790,6092832,"TERMINAL",0,0,"89",,terminal_output +5791,6094028,"TERMINAL",0,0,"950",,terminal_output +5792,6095159,"TERMINAL",0,0,"201",,terminal_output +5793,6095916,"TERMINAL",0,0,"12",,terminal_output +5794,6096966,"TERMINAL",0,0,"23",,terminal_output +5795,6098002,"TERMINAL",0,0,"34",,terminal_output +5796,6099047,"TERMINAL",0,0,"46",,terminal_output +5797,6100165,"TERMINAL",0,0,"67",,terminal_output +5798,6101181,"TERMINAL",0,0,"78",,terminal_output +5799,6102166,"TERMINAL",0,0,"89",,terminal_output +5800,6103199,"TERMINAL",0,0,"96:00",,terminal_output +5801,6104240,"TERMINAL",0,0,"301",,terminal_output +5802,6105382,"TERMINAL",0,0,"12",,terminal_output +5803,6106513,"TERMINAL",0,0,"23",,terminal_output +5804,6107373,"TERMINAL",0,0,"34",,terminal_output +5805,6108448,"TERMINAL",0,0,"45",,terminal_output +5806,6109477,"TERMINAL",0,0,"56",,terminal_output +5807,6110489,"TERMINAL",0,0,"67",,terminal_output +5808,6111546,"TERMINAL",0,0,"78",,terminal_output +5809,6112648,"TERMINAL",0,0,"89",,terminal_output +5810,6113671,"TERMINAL",0,0,"910",,terminal_output +5811,6114695,"TERMINAL",0,0,"401",,terminal_output +5812,6115721,"TERMINAL",0,0,"12",,terminal_output +5813,6116947,"TERMINAL",0,0,"23",,terminal_output +5814,6117814,"TERMINAL",0,0,"34",,terminal_output +5815,6119033,"TERMINAL",0,0,"45",,terminal_output +5816,6119918,"TERMINAL",0,0,"56",,terminal_output +5817,6120927,"TERMINAL",0,0,"67",,terminal_output +5818,6121999,"TERMINAL",0,0,"78",,terminal_output +5819,6123011,"TERMINAL",0,0,"820",,terminal_output +5820,6124056,"TERMINAL",0,0,"501",,terminal_output +5821,6125110,"TERMINAL",0,0,"12",,terminal_output +5822,6126154,"TERMINAL",0,0,"23",,terminal_output +5823,6127302,"TERMINAL",0,0,"34",,terminal_output +5824,6128322,"TERMINAL",0,0,"45",,terminal_output +5825,6129310,"TERMINAL",0,0,"56",,terminal_output +5826,6130346,"TERMINAL",0,0,"67",,terminal_output +5827,6131488,"TERMINAL",0,0,"78",,terminal_output +5828,6132432,"TERMINAL",0,0,"89",,terminal_output +5829,6133536,"TERMINAL",0,0,"930",,terminal_output +5830,6134531,"TERMINAL",0,0,"2:001",,terminal_output +5831,6135584,"TERMINAL",0,0,"12",,terminal_output +5832,6136617,"TERMINAL",0,0,"23",,terminal_output +5833,6137736,"TERMINAL",0,0,"34",,terminal_output +5834,6138695,"TERMINAL",0,0,"45",,terminal_output +5835,6139781,"TERMINAL",0,0,"56",,terminal_output +5836,6140910,"TERMINAL",0,0,"67",,terminal_output +5837,6141953,"TERMINAL",0,0,"78",,terminal_output +5838,6142960,"TERMINAL",0,0,"89",,terminal_output +5839,6143914,"TERMINAL",0,0,"940",,terminal_output +5840,6145170,"TERMINAL",0,0,"101",,terminal_output +5841,6146064,"TERMINAL",0,0,"12",,terminal_output +5842,6147052,"TERMINAL",0,0,"24",,terminal_output +5843,6148181,"TERMINAL",0,0,"45",,terminal_output +5844,6149194,"TERMINAL",0,0,"56",,terminal_output +5845,6150179,"TERMINAL",0,0,"67",,terminal_output +5846,6151250,"TERMINAL",0,0,"78",,terminal_output +5847,6152379,"TERMINAL",0,0,"89",,terminal_output +5848,6153425,"TERMINAL",0,0,"950",,terminal_output +5849,6154376,"TERMINAL",0,0,"201",,terminal_output +5850,6155451,"TERMINAL",0,0,"12",,terminal_output +5851,6156474,"TERMINAL",0,0,"23",,terminal_output +5852,6157752,"TERMINAL",0,0,"34",,terminal_output +5853,6158878,"TERMINAL",0,0,"45",,terminal_output +5854,6159752,"TERMINAL",0,0,"56",,terminal_output +5855,6160671,"TERMINAL",0,0,"67",,terminal_output +5856,6161800,"TERMINAL",0,0,"78",,terminal_output +5857,6162877,"TERMINAL",0,0,"89",,terminal_output +5858,6163849,"TERMINAL",0,0,"97:00",,terminal_output +5859,6164871,"TERMINAL",0,0,"301",,terminal_output +5860,6165897,"TERMINAL",0,0,"12",,terminal_output +5861,6166922,"TERMINAL",0,0,"23",,terminal_output +5862,6167952,"TERMINAL",0,0,"34",,terminal_output +5863,6168991,"TERMINAL",0,0,"45",,terminal_output +5864,6170096,"TERMINAL",0,0,"57",,terminal_output +5865,6171121,"TERMINAL",0,0,"78",,terminal_output +5866,6172110,"TERMINAL",0,0,"89",,terminal_output +5867,6173166,"TERMINAL",0,0,"910",,terminal_output +5868,6174183,"TERMINAL",0,0,"401",,terminal_output +5869,6175228,"TERMINAL",0,0,"12",,terminal_output +5870,6176345,"TERMINAL",0,0,"23",,terminal_output +5871,6177364,"TERMINAL",0,0,"34",,terminal_output +5872,6178390,"TERMINAL",0,0,"45",,terminal_output +5873,6179475,"TERMINAL",0,0,"56",,terminal_output +5874,6180459,"TERMINAL",0,0,"67",,terminal_output +5875,6181506,"TERMINAL",0,0,"78",,terminal_output +5876,6182531,"TERMINAL",0,0,"89",,terminal_output +5877,6183714,"TERMINAL",0,0,"920",,terminal_output +5878,6184644,"TERMINAL",0,0,"501",,terminal_output +5879,6185760,"TERMINAL",0,0,"12",,terminal_output +5880,6186692,"TERMINAL",0,0,"23",,terminal_output +5881,6188014,"TERMINAL",0,0,"34",,terminal_output +5882,6188926,"TERMINAL",0,0,"45",,terminal_output +5883,6189928,"TERMINAL",0,0,"56",,terminal_output +5884,6190995,"TERMINAL",0,0,"67",,terminal_output +5885,6192022,"TERMINAL",0,0,"78",,terminal_output +5886,6193136,"TERMINAL",0,0,"89",,terminal_output +5887,6194004,"TERMINAL",0,0,"930",,terminal_output +5888,6195034,"TERMINAL",0,0,"3:002",,terminal_output +5889,6196103,"TERMINAL",0,0,"23",,terminal_output +5890,6197127,"TERMINAL",0,0,"34",,terminal_output +5891,6198254,"TERMINAL",0,0,"45",,terminal_output +5892,6199205,"TERMINAL",0,0,"56",,terminal_output +5893,6200301,"TERMINAL",0,0,"67",,terminal_output +5894,6201336,"TERMINAL",0,0,"78",,terminal_output +5895,6202359,"TERMINAL",0,0,"89",,terminal_output +5896,6203373,"TERMINAL",0,0,"940",,terminal_output +5897,6204632,"TERMINAL",0,0,"101",,terminal_output +5898,6205627,"TERMINAL",0,0,"12",,terminal_output +5899,6206547,"TERMINAL",0,0,"23",,terminal_output +5900,6207571,"TERMINAL",0,0,"34",,terminal_output +5901,6208698,"TERMINAL",0,0,"45",,terminal_output +5902,6209613,"TERMINAL",0,0,"56",,terminal_output +5903,6210740,"TERMINAL",0,0,"67",,terminal_output +5904,6211748,"TERMINAL",0,0,"78",,terminal_output +5905,6212794,"TERMINAL",0,0,"89",,terminal_output +5906,6213755,"TERMINAL",0,0,"950",,terminal_output +5907,6214798,"TERMINAL",0,0,"201",,terminal_output +5908,6215901,"TERMINAL",0,0,"12",,terminal_output +5909,6216890,"TERMINAL",0,0,"23",,terminal_output +5910,6217917,"TERMINAL",0,0,"34",,terminal_output +5911,6218963,"TERMINAL",0,0,"45",,terminal_output +5912,6219998,"TERMINAL",0,0,"56",,terminal_output +5913,6221191,"TERMINAL",0,0,"68",,terminal_output +5914,6222073,"TERMINAL",0,0,"89",,terminal_output +5915,6223118,"TERMINAL",0,0,"98:00",,terminal_output +5916,6224172,"TERMINAL",0,0,"301",,terminal_output +5917,6225288,"TERMINAL",0,0,"12",,terminal_output +5918,6226312,"TERMINAL",0,0,"23",,terminal_output +5919,6227330,"TERMINAL",0,0,"34",,terminal_output +5920,6228360,"TERMINAL",0,0,"45",,terminal_output +5921,6229392,"TERMINAL",0,0,"56",,terminal_output +5922,6230614,"TERMINAL",0,0,"67",,terminal_output +5923,6231534,"TERMINAL",0,0,"78",,terminal_output +5924,6232512,"TERMINAL",0,0,"89",,terminal_output +5925,6234184,"TERMINAL",0,0,"910",,terminal_output +5926,6234709,"TERMINAL",0,0,"401",,terminal_output +5927,6235733,"TERMINAL",0,0,"12",,terminal_output +5928,6236716,"TERMINAL",0,0,"23",,terminal_output +5929,6237782,"TERMINAL",0,0,"34",,terminal_output +5930,6238803,"TERMINAL",0,0,"45",,terminal_output +5931,6239827,"TERMINAL",0,0,"56",,terminal_output +5932,6241055,"TERMINAL",0,0,"67",,terminal_output +5933,6242081,"TERMINAL",0,0,"78",,terminal_output +5934,6242962,"TERMINAL",0,0,"89",,terminal_output +5935,6244007,"TERMINAL",0,0,"920",,terminal_output +5936,6245153,"TERMINAL",0,0,"502",,terminal_output +5937,6246091,"TERMINAL",0,0,"23",,terminal_output +5938,6247130,"TERMINAL",0,0,"34",,terminal_output +5939,6248226,"TERMINAL",0,0,"45",,terminal_output +5940,6249215,"TERMINAL",0,0,"56",,terminal_output +5941,6250275,"TERMINAL",0,0,"67",,terminal_output +5942,6251400,"TERMINAL",0,0,"78",,terminal_output +5943,6252364,"TERMINAL",0,0,"89",,terminal_output +5944,6253464,"TERMINAL",0,0,"930",,terminal_output +5945,6254487,"TERMINAL",0,0,"4:001",,terminal_output +5946,6255658,"TERMINAL",0,0,"12",,terminal_output +5947,6256624,"TERMINAL",0,0,"23",,terminal_output +5948,6257646,"TERMINAL",0,0,"34",,terminal_output +5949,6258673,"TERMINAL",0,0,"45",,terminal_output +5950,6259699,"TERMINAL",0,0,"56",,terminal_output +5951,6260708,"TERMINAL",0,0,"67",,terminal_output +5952,6261819,"TERMINAL",0,0,"78",,terminal_output +5953,6262901,"TERMINAL",0,0,"89",,terminal_output +5954,6263897,"TERMINAL",0,0,"940",,terminal_output +5955,6264919,"TERMINAL",0,0,"101",,terminal_output +5956,6266045,"TERMINAL",0,0,"12",,terminal_output +5957,6267070,"TERMINAL",0,0,"23",,terminal_output +5958,6268001,"TERMINAL",0,0,"34",,terminal_output +5959,6269121,"TERMINAL",0,0,"46",,terminal_output +5960,6270082,"TERMINAL",0,0,"67",,terminal_output +5961,6271138,"TERMINAL",0,0,"78",,terminal_output +5962,6272177,"TERMINAL",0,0,"89",,terminal_output +5963,6273324,"TERMINAL",0,0,"950",,terminal_output +5964,6274297,"TERMINAL",0,0,"201",,terminal_output +5965,6275364,"TERMINAL",0,0,"188722",,terminal_output +5966,6276349,"TERMINAL",0,0,"23",,terminal_output +5967,6277392,"TERMINAL",0,0,"34",,terminal_output +5968,6278447,"TERMINAL",0,0,"45",,terminal_output +5969,6279502,"TERMINAL",0,0,"56",,terminal_output +5970,6280521,"TERMINAL",0,0,"67",,terminal_output +5971,6281609,"TERMINAL",0,0,"78",,terminal_output +5972,6282636,"TERMINAL",0,0,"89",,terminal_output +5973,6283662,"TERMINAL",0,0,"99:00",,terminal_output +5974,6284686,"TERMINAL",0,0,"301",,terminal_output +5975,6285945,"TERMINAL",0,0,"12",,terminal_output +5976,6286954,"TERMINAL",0,0,"23",,terminal_output +5977,6287868,"TERMINAL",0,0,"34",,terminal_output +5978,6288981,"TERMINAL",0,0,"45",,terminal_output +5979,6289929,"TERMINAL",0,0,"56",,terminal_output +5980,6291234,"TERMINAL",0,0,"67",,terminal_output +5981,6292026,"TERMINAL",0,0,"79",,terminal_output +5982,6294038,"TERMINAL",0,0,"910",,terminal_output +5983,6294125,"TERMINAL",0,0,"401",,terminal_output +5984,6295161,"TERMINAL",0,0,"12",,terminal_output +5985,6296260,"TERMINAL",0,0,"23",,terminal_output +5986,6297276,"TERMINAL",0,0,"34",,terminal_output +5987,6298286,"TERMINAL",0,0,"45",,terminal_output +5988,6299840,"TERMINAL",0,0,"56",,terminal_output +5989,6300461,"TERMINAL",0,0,"67",,terminal_output +5990,6301486,"TERMINAL",0,0,"78",,terminal_output +5991,6302943,"TERMINAL",0,0,"89",,terminal_output +5992,6303523,"TERMINAL",0,0,"920",,terminal_output +5993,6304660,"TERMINAL",0,0,"501",,terminal_output +5994,6305633,"TERMINAL",0,0,"12",,terminal_output +5995,6307310,"TERMINAL",0,0,"23",,terminal_output +5996,6307702,"TERMINAL",0,0,"34",,terminal_output +5997,6308742,"TERMINAL",0,0,"45",,terminal_output +5998,6309796,"TERMINAL",0,0,"56",,terminal_output +5999,6311100,"TERMINAL",0,0,"67",,terminal_output +6000,6311920,"TERMINAL",0,0,"78",,terminal_output +6001,6312961,"TERMINAL",0,0,"89",,terminal_output +6002,6313970,"TERMINAL",0,0,"930",,terminal_output +6003,6314992,"TERMINAL",0,0,"5:001",,terminal_output +6004,6316040,"TERMINAL",0,0,"13",,terminal_output +6005,6317093,"TERMINAL",0,0,"34",,terminal_output +6006,6318135,"TERMINAL",0,0,"45",,terminal_output +6007,6319183,"TERMINAL",0,0,"56",,terminal_output +6008,6320229,"TERMINAL",0,0,"67",,terminal_output +6009,6321351,"TERMINAL",0,0,"78",,terminal_output +6010,6322374,"TERMINAL",0,0,"89",,terminal_output +6011,6323388,"TERMINAL",0,0,"940",,terminal_output +6012,6324414,"TERMINAL",0,0,"101",,terminal_output +6013,6325498,"TERMINAL",0,0,"12",,terminal_output +6014,6326668,"TERMINAL",0,0,"23",,terminal_output +6015,6327580,"TERMINAL",0,0,"34",,terminal_output +6016,6328611,"TERMINAL",0,0,"45",,terminal_output +6017,6329631,"TERMINAL",0,0,"56",,terminal_output +6018,6330761,"TERMINAL",0,0,"67",,terminal_output +6019,6331781,"TERMINAL",0,0,"78",,terminal_output +6020,6332811,"TERMINAL",0,0,"89",,terminal_output +6021,6333833,"TERMINAL",0,0,"950",,terminal_output +6022,6335062,"TERMINAL",0,0,"201",,terminal_output +6023,6335982,"TERMINAL",0,0,"12",,terminal_output +6024,6337047,"TERMINAL",0,0,"23",,terminal_output +6025,6338017,"TERMINAL",0,0,"35",,terminal_output +6026,6339064,"TERMINAL",0,0,"56",,terminal_output +6027,6340114,"TERMINAL",0,0,"67",,terminal_output +6028,6341210,"TERMINAL",0,0,"78",,terminal_output +6029,6342251,"TERMINAL",0,0,"89",,terminal_output +6030,6343263,"TERMINAL",0,0,"92:00:00",,terminal_output +6031,6344284,"TERMINAL",0,0,"301",,terminal_output +6032,6345404,"TERMINAL",0,0,"12",,terminal_output +6033,6346429,"TERMINAL",0,0,"23",,terminal_output +6034,6347415,"TERMINAL",0,0,"34",,terminal_output +6035,6348462,"TERMINAL",0,0,"45",,terminal_output +6036,6349544,"TERMINAL",0,0,"56",,terminal_output +6037,6350633,"TERMINAL",0,0,"67",,terminal_output +6038,6351608,"TERMINAL",0,0,"78",,terminal_output +6039,6352661,"TERMINAL",0,0,"89",,terminal_output +6040,6354250,"TERMINAL",0,0,"910",,terminal_output +6041,6354980,"TERMINAL",0,0,"401",,terminal_output +6042,6355856,"TERMINAL",0,0,"12",,terminal_output +6043,6356975,"TERMINAL",0,0,"23",,terminal_output +6044,6357876,"TERMINAL",0,0,"34",,terminal_output +6045,6358954,"TERMINAL",0,0,"45",,terminal_output +6046,6360051,"TERMINAL",0,0,"56",,terminal_output +6047,6361010,"TERMINAL",0,0,"67",,terminal_output +6048,6362052,"TERMINAL",0,0,"89",,terminal_output +6049,6363122,"TERMINAL",0,0,"920",,terminal_output +6050,6364147,"TERMINAL",0,0,"501",,terminal_output +6051,6365186,"TERMINAL",0,0,"12",,terminal_output +6052,6366297,"TERMINAL",0,0,"23",,terminal_output +6053,6367300,"TERMINAL",0,0,"34",,terminal_output +6054,6368343,"TERMINAL",0,0,"45",,terminal_output +6055,6369579,"TERMINAL",0,0,"56",,terminal_output +6056,6370481,"TERMINAL",0,0,"67",,terminal_output +6057,6371457,"TERMINAL",0,0,"78",,terminal_output +6058,6372542,"TERMINAL",0,0,"89",,terminal_output +6059,6373568,"TERMINAL",0,0,"930",,terminal_output +6060,6374601,"TERMINAL",0,0,"6:001",,terminal_output +6061,6375820,"TERMINAL",0,0,"12",,terminal_output +6062,6376737,"TERMINAL",0,0,"23",,terminal_output +6063,6377763,"TERMINAL",0,0,"34",,terminal_output +6064,6378775,"TERMINAL",0,0,"45",,terminal_output +6065,6379913,"TERMINAL",0,0,"56",,terminal_output +6066,6381163,"TERMINAL",0,0,"67",,terminal_output +6067,6381904,"TERMINAL",0,0,"78",,terminal_output +6068,6382987,"TERMINAL",0,0,"89",,terminal_output +6069,6383997,"TERMINAL",0,0,"940",,terminal_output +6070,6385049,"TERMINAL",0,0,"102",,terminal_output +6071,6386088,"TERMINAL",0,0,"23",,terminal_output +6072,6387135,"TERMINAL",0,0,"34",,terminal_output +6073,6388207,"TERMINAL",0,0,"45",,terminal_output +6074,6389606,"TERMINAL",0,0,"56",,terminal_output +6075,6390381,"TERMINAL",0,0,"67",,terminal_output +6076,6391389,"TERMINAL",0,0,"78",,terminal_output +6077,6392366,"TERMINAL",0,0,"89",,terminal_output +6078,6393430,"TERMINAL",0,0,"950",,terminal_output +6079,6394500,"TERMINAL",0,0,"201",,terminal_output +6080,6397031,"TERMINAL",0,0,"172884",,terminal_output +6081,6398140,"TERMINAL",0,0,"45",,terminal_output +6082,6399165,"TERMINAL",0,0,"56",,terminal_output +6083,6400163,"TERMINAL",0,0,"67",,terminal_output +6084,6401318,"TERMINAL",0,0,"78",,terminal_output +6085,6402358,"TERMINAL",0,0,"89",,terminal_output +6086,6403318,"TERMINAL",0,0,"91:00",,terminal_output +6087,6404386,"TERMINAL",0,0,"301",,terminal_output +6088,6405412,"TERMINAL",0,0,"12",,terminal_output +6089,6406537,"TERMINAL",0,0,"23",,terminal_output +6090,6407504,"TERMINAL",0,0,"34",,terminal_output +6091,6408587,"TERMINAL",0,0,"45",,terminal_output +6092,6409591,"TERMINAL",0,0,"56",,terminal_output +6093,6410641,"TERMINAL",0,0,"67",,terminal_output +6094,6411764,"TERMINAL",0,0,"78",,terminal_output +6095,6412714,"TERMINAL",0,0,"89",,terminal_output +6096,6413807,"TERMINAL",0,0,"910",,terminal_output +6097,6414831,"TERMINAL",0,0,"401",,terminal_output +6098,6415875,"TERMINAL",0,0,"12",,terminal_output +6099,6416904,"TERMINAL",0,0,"23",,terminal_output +6100,6418010,"TERMINAL",0,0,"34",,terminal_output +6101,6419140,"TERMINAL",0,0,"45",,terminal_output +6102,6420057,"TERMINAL",0,0,"57",,terminal_output +6103,6421084,"TERMINAL",0,0,"78",,terminal_output +6104,6422249,"TERMINAL",0,0,"89",,terminal_output +6105,6423216,"TERMINAL",0,0,"920",,terminal_output +6106,6424219,"TERMINAL",0,0,"501",,terminal_output +6107,6425289,"TERMINAL",0,0,"12",,terminal_output +6108,6426307,"TERMINAL",0,0,"23",,terminal_output +6109,6427343,"TERMINAL",0,0,"34",,terminal_output +6110,6428386,"TERMINAL",0,0,"45",,terminal_output +6111,6429447,"TERMINAL",0,0,"56",,terminal_output +6112,6430835,"TERMINAL",0,0,"67",,terminal_output +6113,6431524,"TERMINAL",0,0,"78",,terminal_output +6114,6432567,"TERMINAL",0,0,"89",,terminal_output +6115,6433628,"TERMINAL",0,0,"930",,terminal_output +6116,6434669,"TERMINAL",0,0,"7:001",,terminal_output +6117,6435707,"TERMINAL",0,0,"12",,terminal_output +6118,6436753,"TERMINAL",0,0,"23",,terminal_output +6119,6437804,"TERMINAL",0,0,"34",,terminal_output +6120,6438848,"TERMINAL",0,0,"45",,terminal_output +6121,6439902,"TERMINAL",0,0,"56",,terminal_output +6122,6441142,"TERMINAL",0,0,"67",,terminal_output +6123,6442024,"TERMINAL",0,0,"78",,terminal_output +6124,6443050,"TERMINAL",0,0,"840",,terminal_output +6125,6444083,"TERMINAL",0,0,"101",,terminal_output +6126,6445201,"TERMINAL",0,0,"12",,terminal_output +6127,6446171,"TERMINAL",0,0,"23",,terminal_output +6128,6447253,"TERMINAL",0,0,"34",,terminal_output +6129,6448248,"TERMINAL",0,0,"45",,terminal_output +6130,6449294,"TERMINAL",0,0,"56",,terminal_output +6131,6450570,"TERMINAL",0,0,"67",,terminal_output +6132,6451446,"TERMINAL",0,0,"78",,terminal_output +6133,6452470,"TERMINAL",0,0,"89",,terminal_output +6134,6453591,"TERMINAL",0,0,"950",,terminal_output +6135,6454560,"TERMINAL",0,0,"201",,terminal_output +6136,6455646,"TERMINAL",0,0,"12",,terminal_output +6137,6456674,"TERMINAL",0,0,"23",,terminal_output +6138,6457661,"TERMINAL",0,0,"34",,terminal_output +6139,6458720,"TERMINAL",0,0,"45",,terminal_output +6140,6459757,"TERMINAL",0,0,"56",,terminal_output +6141,6460889,"TERMINAL",0,0,"67",,terminal_output +6142,6462113,"TERMINAL",0,0,"78",,terminal_output +6143,6462934,"TERMINAL",0,0,"89",,terminal_output +6144,6463921,"TERMINAL",0,0,"92:00",,terminal_output +6145,6464966,"TERMINAL",0,0,"301",,terminal_output +6146,6466013,"TERMINAL",0,0,"12",,terminal_output +6147,6467117,"TERMINAL",0,0,"24",,terminal_output +6148,6468120,"TERMINAL",0,0,"45",,terminal_output +6149,6469116,"TERMINAL",0,0,"56",,terminal_output +6150,6470156,"TERMINAL",0,0,"67",,terminal_output +6151,6471416,"TERMINAL",0,0,"78",,terminal_output +6152,6472241,"TERMINAL",0,0,"89",,terminal_output +6153,6473461,"TERMINAL",0,0,"910",,terminal_output +6154,6474321,"TERMINAL",0,0,"401",,terminal_output +6155,6475512,"TERMINAL",0,0,"12",,terminal_output +6156,6476436,"TERMINAL",0,0,"23",,terminal_output +6157,6477503,"TERMINAL",0,0,"34",,terminal_output +6158,6478492,"TERMINAL",0,0,"45",,terminal_output +6159,6479530,"TERMINAL",0,0,"56",,terminal_output +6160,6480629,"TERMINAL",0,0,"67",,terminal_output +6161,6481654,"TERMINAL",0,0,"78",,terminal_output +6162,6482748,"TERMINAL",0,0,"89",,terminal_output +6163,6483805,"TERMINAL",0,0,"920",,terminal_output +6164,6484786,"TERMINAL",0,0,"501",,terminal_output +6165,6485853,"TERMINAL",0,0,"12",,terminal_output +6166,6486877,"TERMINAL",0,0,"23",,terminal_output +6167,6488040,"TERMINAL",0,0,"34",,terminal_output +6168,6488929,"TERMINAL",0,0,"45",,terminal_output +6169,6490154,"TERMINAL",0,0,"56",,terminal_output +6170,6491182,"TERMINAL",0,0,"68",,terminal_output +6171,6492099,"TERMINAL",0,0,"89",,terminal_output +6172,6493679,"TERMINAL",0,0,"930",,terminal_output +6173,6494258,"TERMINAL",0,0,"8:001",,terminal_output +6174,6495204,"TERMINAL",0,0,"12",,terminal_output +6175,6496254,"TERMINAL",0,0,"23",,terminal_output +6176,6497363,"TERMINAL",0,0,"34",,terminal_output +6177,6498550,"TERMINAL",0,0,"45",,terminal_output +6178,6499573,"TERMINAL",0,0,"56",,terminal_output +6179,6500495,"TERMINAL",0,0,"67",,terminal_output +6180,6501519,"TERMINAL",0,0,"78",,terminal_output +6181,6502668,"TERMINAL",0,0,"89",,terminal_output +6182,6503773,"TERMINAL",0,0,"940",,terminal_output +6183,6504620,"TERMINAL",0,0,"101",,terminal_output +6184,6505717,"TERMINAL",0,0,"12",,terminal_output +6185,6506744,"TERMINAL",0,0,"23",,terminal_output +6186,6507765,"TERMINAL",0,0,"34",,terminal_output +6187,6508989,"TERMINAL",0,0,"45",,terminal_output +6188,6509842,"TERMINAL",0,0,"56",,terminal_output +6189,6511074,"TERMINAL",0,0,"67",,terminal_output +6190,6512067,"TERMINAL",0,0,"78",,terminal_output +6191,6513094,"TERMINAL",0,0,"89",,terminal_output +6192,6514176,"TERMINAL",0,0,"951",,terminal_output +6193,6515141,"TERMINAL",0,0,"212",,terminal_output +6194,6516172,"TERMINAL",0,0,"23",,terminal_output +6195,6517194,"TERMINAL",0,0,"34",,terminal_output +6196,6518242,"TERMINAL",0,0,"45",,terminal_output +6197,6519292,"TERMINAL",0,0,"56",,terminal_output +6198,6520335,"TERMINAL",0,0,"67",,terminal_output +6199,6521385,"TERMINAL",0,0,"78",,terminal_output +6200,6522422,"TERMINAL",0,0,"89",,terminal_output +6201,6523463,"TERMINAL",0,0,"93:00",,terminal_output +6202,6524814,"TERMINAL",0,0,"301",,terminal_output +6203,6525623,"TERMINAL",0,0,"12",,terminal_output +6204,6526747,"TERMINAL",0,0,"23",,terminal_output +6205,6527734,"TERMINAL",0,0,"34",,terminal_output +6206,6528768,"TERMINAL",0,0,"45",,terminal_output +6207,6529886,"TERMINAL",0,0,"56",,terminal_output +6208,6530914,"TERMINAL",0,0,"67",,terminal_output +6209,6531937,"TERMINAL",0,0,"78",,terminal_output +6210,6532883,"TERMINAL",0,0,"89",,terminal_output +6211,6534085,"TERMINAL",0,0,"910",,terminal_output +6212,6535046,"TERMINAL",0,0,"401",,terminal_output +6213,6536061,"TERMINAL",0,0,"12",,terminal_output +6214,6537260,"TERMINAL",0,0,"34",,terminal_output +6215,6538098,"TERMINAL",0,0,"45",,terminal_output +6216,6539188,"TERMINAL",0,0,"56",,terminal_output +6217,6540231,"TERMINAL",0,0,"67",,terminal_output +6218,6541255,"TERMINAL",0,0,"78",,terminal_output +6219,6542385,"TERMINAL",0,0,"89",,terminal_output +6220,6543312,"TERMINAL",0,0,"920",,terminal_output +6221,6544506,"TERMINAL",0,0,"501",,terminal_output +6222,6545402,"TERMINAL",0,0,"12",,terminal_output +6223,6546585,"TERMINAL",0,0,"23",,terminal_output +6224,6547500,"TERMINAL",0,0,"34",,terminal_output +6225,6548748,"TERMINAL",0,0,"45",,terminal_output +6226,6549586,"TERMINAL",0,0,"56",,terminal_output +6227,6550745,"TERMINAL",0,0,"67",,terminal_output +6228,6552424,"TERMINAL",0,0,"78",,terminal_output +6229,6552720,"TERMINAL",0,0,"89",,terminal_output +6230,6553849,"TERMINAL",0,0,"930",,terminal_output +6231,6554873,"TERMINAL",0,0,"9:001",,terminal_output +6232,6555896,"TERMINAL",0,0,"12",,terminal_output +6233,6556895,"TERMINAL",0,0,"23",,terminal_output +6234,6557954,"TERMINAL",0,0,"34",,terminal_output +6235,6559172,"TERMINAL",0,0,"45",,terminal_output +6236,6560196,"TERMINAL",0,0,"56",,terminal_output +6237,6561221,"TERMINAL",0,0,"68",,terminal_output +6238,6562094,"TERMINAL",0,0,"89",,terminal_output +6239,6563129,"TERMINAL",0,0,"940",,terminal_output +6240,6564168,"TERMINAL",0,0,"101",,terminal_output +6241,6565213,"TERMINAL",0,0,"12",,terminal_output +6242,6566255,"TERMINAL",0,0,"23",,terminal_output +6243,6567308,"TERMINAL",0,0,"34",,terminal_output +6244,6568350,"TERMINAL",0,0,"45",,terminal_output +6245,6569516,"TERMINAL",0,0,"56",,terminal_output +6246,6570424,"TERMINAL",0,0,"67",,terminal_output +6247,6571568,"TERMINAL",0,0,"78",,terminal_output +6248,6572593,"TERMINAL",0,0,"89",,terminal_output +6249,6573611,"TERMINAL",0,0,"950",,terminal_output +6250,6574632,"TERMINAL",0,0,"201",,terminal_output +6251,6575709,"TERMINAL",0,0,"12",,terminal_output +6252,6576794,"TERMINAL",0,0,"23",,terminal_output +6253,6577814,"TERMINAL",0,0,"34",,terminal_output +6254,6578939,"TERMINAL",0,0,"45",,terminal_output +6255,6579857,"TERMINAL",0,0,"56",,terminal_output +6256,6580885,"TERMINAL",0,0,"67",,terminal_output +6257,6582119,"TERMINAL",0,0,"78",,terminal_output +6258,6583151,"TERMINAL",0,0,"89",,terminal_output +6259,6584193,"TERMINAL",0,0,"94:01",,terminal_output +6260,6585080,"TERMINAL",0,0,"312",,terminal_output +6261,6586107,"TERMINAL",0,0,"23",,terminal_output +6262,6587230,"TERMINAL",0,0,"34",,terminal_output +6263,6588191,"TERMINAL",0,0,"45",,terminal_output +6264,6589218,"TERMINAL",0,0,"56",,terminal_output +6265,6590263,"TERMINAL",0,0,"67",,terminal_output +6266,6591301,"TERMINAL",0,0,"78",,terminal_output +6267,6592349,"TERMINAL",0,0,"89",,terminal_output +6268,6593403,"TERMINAL",0,0,"910",,terminal_output +6269,6594452,"TERMINAL",0,0,"401",,terminal_output +6270,6595528,"TERMINAL",0,0,"12",,terminal_output +6271,6596551,"TERMINAL",0,0,"23",,terminal_output +6272,6597586,"TERMINAL",0,0,"34",,terminal_output +6273,6598804,"TERMINAL",0,0,"45",,terminal_output +6274,6599630,"TERMINAL",0,0,"56",,terminal_output +6275,6600853,"TERMINAL",0,0,"67",,terminal_output +6276,6601713,"TERMINAL",0,0,"78",,terminal_output +6277,6602800,"TERMINAL",0,0,"89",,terminal_output +6278,6603812,"TERMINAL",0,0,"920",,terminal_output +6279,6604970,"TERMINAL",0,0,"501",,terminal_output +6280,6605854,"TERMINAL",0,0,"12",,terminal_output +6281,6607004,"TERMINAL",0,0,"23",,terminal_output +6282,6608020,"TERMINAL",0,0,"34",,terminal_output +6283,6609142,"TERMINAL",0,0,"45",,terminal_output +6284,6610178,"TERMINAL",0,0,"57",,terminal_output +6285,6611104,"TERMINAL",0,0,"78",,terminal_output +6286,6612219,"TERMINAL",0,0,"89",,terminal_output +6287,6613152,"TERMINAL",0,0,"930",,terminal_output +6288,6614196,"TERMINAL",0,0,"50:001",,terminal_output +6289,6615408,"TERMINAL",0,0,"12",,terminal_output +6290,6616375,"TERMINAL",0,0,"23",,terminal_output +6291,6617393,"TERMINAL",0,0,"34",,terminal_output +6292,6624721,"TERMINAL",0,0,"4556677889940101",,terminal_output +6293,6625645,"TERMINAL",0,0,"12",,terminal_output +6294,6626698,"TERMINAL",0,0,"23",,terminal_output +6295,6627762,"TERMINAL",0,0,"34",,terminal_output +6296,6628793,"TERMINAL",0,0,"45",,terminal_output +6297,6629839,"TERMINAL",0,0,"56",,terminal_output +6298,6630873,"TERMINAL",0,0,"67",,terminal_output +6299,6631909,"TERMINAL",0,0,"78",,terminal_output +6300,6633053,"TERMINAL",0,0,"89",,terminal_output +6301,6634107,"TERMINAL",0,0,"950",,terminal_output +6302,6635128,"TERMINAL",0,0,"202",,terminal_output +6303,6636127,"TERMINAL",0,0,"23",,terminal_output +6304,6637146,"TERMINAL",0,0,"34",,terminal_output +6305,6638163,"TERMINAL",0,0,"45",,terminal_output +6306,6639207,"TERMINAL",0,0,"56",,terminal_output +6307,6640248,"TERMINAL",0,0,"67",,terminal_output +6308,6641313,"TERMINAL",0,0,"78",,terminal_output +6309,6642369,"TERMINAL",0,0,"89",,terminal_output +6310,6643372,"TERMINAL",0,0,"95:00",,terminal_output +6311,6644419,"TERMINAL",0,0,"301",,terminal_output +6312,6645543,"TERMINAL",0,0,"12",,terminal_output +6313,6646501,"TERMINAL",0,0,"23",,terminal_output +6314,6647590,"TERMINAL",0,0,"34",,terminal_output +6315,6648613,"TERMINAL",0,0,"45",,terminal_output +6316,6649637,"TERMINAL",0,0,"56",,terminal_output +6317,6650867,"TERMINAL",0,0,"67",,terminal_output +6318,6651789,"TERMINAL",0,0,"78",,terminal_output +6319,6653052,"TERMINAL",0,0,"89",,terminal_output +6320,6653855,"TERMINAL",0,0,"910",,terminal_output +6321,6655110,"TERMINAL",0,0,"401",,terminal_output +6322,6655892,"TERMINAL",0,0,"12",,terminal_output +6323,6657012,"TERMINAL",0,0,"23",,terminal_output +6324,6658035,"TERMINAL",0,0,"34",,terminal_output +6325,6659061,"TERMINAL",0,0,"45",,terminal_output +6326,6660114,"TERMINAL",0,0,"57",,terminal_output +6327,6661107,"TERMINAL",0,0,"78",,terminal_output +6328,6662149,"TERMINAL",0,0,"89",,terminal_output +6329,6663187,"TERMINAL",0,0,"920",,terminal_output +6330,6664225,"TERMINAL",0,0,"501",,terminal_output +6331,6665414,"TERMINAL",0,0,"12",,terminal_output +6332,6666330,"TERMINAL",0,0,"23",,terminal_output +6333,6667352,"TERMINAL",0,0,"34",,terminal_output +6334,6668451,"TERMINAL",0,0,"45",,terminal_output +6335,6669455,"TERMINAL",0,0,"56",,terminal_output +6336,6670476,"TERMINAL",0,0,"67",,terminal_output +6337,6671549,"TERMINAL",0,0,"78",,terminal_output +6338,6672589,"TERMINAL",0,0,"89",,terminal_output +6339,6673607,"TERMINAL",0,0,"930",,terminal_output +6340,6674638,"TERMINAL",0,0,"1:001",,terminal_output +6341,6675857,"TERMINAL",0,0,"12",,terminal_output +6342,6676746,"TERMINAL",0,0,"23",,terminal_output +6343,6677775,"TERMINAL",0,0,"34",,terminal_output +6344,6679172,"TERMINAL",0,0,"45",,terminal_output +6345,6679956,"TERMINAL",0,0,"56",,terminal_output +6346,6680980,"TERMINAL",0,0,"67",,terminal_output +6347,6682002,"TERMINAL",0,0,"78",,terminal_output +6348,6683225,"TERMINAL",0,0,"89",,terminal_output +6349,6684047,"TERMINAL",0,0,"941",,terminal_output +6350,6685367,"TERMINAL",0,0,"112",,terminal_output +6351,6686354,"TERMINAL",0,0,"23",,terminal_output +6352,6687437,"TERMINAL",0,0,"34",,terminal_output +6353,6688255,"TERMINAL",0,0,"45",,terminal_output +6354,6689241,"TERMINAL",0,0,"56",,terminal_output +6355,6690278,"TERMINAL",0,0,"67",,terminal_output +6356,6691418,"TERMINAL",0,0,"78",,terminal_output +6357,6692360,"TERMINAL",0,0,"89",,terminal_output +6358,6693886,"TERMINAL",0,0,"950",,terminal_output +6359,6694796,"TERMINAL",0,0,"201",,terminal_output +6360,6695923,"TERMINAL",0,0,"12",,terminal_output +6361,6696829,"TERMINAL",0,0,"23",,terminal_output +6362,6697970,"TERMINAL",0,0,"34",,terminal_output +6363,6698902,"TERMINAL",0,0,"45",,terminal_output +6364,6700017,"TERMINAL",0,0,"56",,terminal_output +6365,6701048,"TERMINAL",0,0,"67",,terminal_output +6366,6702068,"TERMINAL",0,0,"79",,terminal_output +6367,6703066,"TERMINAL",0,0,"96:00",,terminal_output +6368,6704183,"TERMINAL",0,0,"301",,terminal_output +6369,6705150,"TERMINAL",0,0,"12",,terminal_output +6370,6706199,"TERMINAL",0,0,"23",,terminal_output +6371,6707242,"TERMINAL",0,0,"34",,terminal_output +6372,6708281,"TERMINAL",0,0,"45",,terminal_output +6373,6709320,"TERMINAL",0,0,"56",,terminal_output +6374,6710357,"TERMINAL",0,0,"67",,terminal_output +6375,6711393,"TERMINAL",0,0,"78",,terminal_output +6376,6712444,"TERMINAL",0,0,"89",,terminal_output +6377,6713539,"TERMINAL",0,0,"910",,terminal_output +6378,6714520,"TERMINAL",0,0,"401",,terminal_output +6379,6715556,"TERMINAL",0,0,"12",,terminal_output +6380,6716609,"TERMINAL",0,0,"23",,terminal_output +6381,6717635,"TERMINAL",0,0,"34",,terminal_output +6382,6718788,"TERMINAL",0,0,"45",,terminal_output +6383,6719795,"TERMINAL",0,0,"56",,terminal_output +6384,6720909,"TERMINAL",0,0,"67",,terminal_output +6385,6721829,"TERMINAL",0,0,"78",,terminal_output +6386,6722957,"TERMINAL",0,0,"89",,terminal_output +6387,6723982,"TERMINAL",0,0,"920",,terminal_output +6388,6725003,"TERMINAL",0,0,"501",,terminal_output +6389,6726031,"TERMINAL",0,0,"12",,terminal_output +6390,6727045,"TERMINAL",0,0,"24",,terminal_output +6391,6728282,"TERMINAL",0,0,"45",,terminal_output +6392,6729183,"TERMINAL",0,0,"56",,terminal_output +6393,6730211,"TERMINAL",0,0,"67",,terminal_output +6394,6731233,"TERMINAL",0,0,"78",,terminal_output +6395,6732275,"TERMINAL",0,0,"89",,terminal_output +6396,6733324,"TERMINAL",0,0,"930",,terminal_output +6397,6734365,"TERMINAL",0,0,"2:001",,terminal_output +6398,6735403,"TERMINAL",0,0,"12",,terminal_output +6399,6736454,"TERMINAL",0,0,"23",,terminal_output +6400,6737489,"TERMINAL",0,0,"34",,terminal_output +6401,6738624,"TERMINAL",0,0,"45",,terminal_output +6402,6739574,"TERMINAL",0,0,"56",,terminal_output +6403,6740776,"TERMINAL",0,0,"67",,terminal_output +6404,6741798,"TERMINAL",0,0,"78",,terminal_output +6405,6742687,"TERMINAL",0,0,"89",,terminal_output +6406,6743846,"TERMINAL",0,0,"940",,terminal_output +6407,6744869,"TERMINAL",0,0,"101",,terminal_output +6408,6745902,"TERMINAL",0,0,"12",,terminal_output +6409,6746918,"TERMINAL",0,0,"23",,terminal_output +6410,6747941,"TERMINAL",0,0,"34",,terminal_output +6411,6748968,"TERMINAL",0,0,"45",,terminal_output +6412,6749974,"TERMINAL",0,0,"56",,terminal_output +6413,6751023,"TERMINAL",0,0,"67",,terminal_output +6414,6752151,"TERMINAL",0,0,"89",,terminal_output +6415,6753065,"TERMINAL",0,0,"950",,terminal_output +6416,6754103,"TERMINAL",0,0,"201",,terminal_output +6417,6755219,"TERMINAL",0,0,"12",,terminal_output +6418,6756339,"TERMINAL",0,0,"23",,terminal_output +6419,6757230,"TERMINAL",0,0,"34",,terminal_output +6420,6758288,"TERMINAL",0,0,"488725",,terminal_output +6421,6759327,"TERMINAL",0,0,"56",,terminal_output +6422,6760375,"TERMINAL",0,0,"67",,terminal_output +6423,6761420,"TERMINAL",0,0,"78",,terminal_output +6424,6762465,"TERMINAL",0,0,"89",,terminal_output +6425,6763609,"TERMINAL",0,0,"97:00",,terminal_output +6426,6764556,"TERMINAL",0,0,"301",,terminal_output +6427,6765761,"TERMINAL",0,0,"12",,terminal_output +6428,6766784,"TERMINAL",0,0,"23",,terminal_output